From d989633e2e99b4859b071e05fdc233bb1513fa36 Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Mon, 21 Jul 2025 19:33:08 +0000 Subject: [PATCH 1/3] [Monitor] Update query package to use TypeSpec This removes Metrics-related code/content and converts this package to be for logs querying only. Signed-off-by: Paul Van Eck --- sdk/monitor/azure-monitor-query/CHANGELOG.md | 11 +- sdk/monitor/azure-monitor-query/MANIFEST.in | 6 +- sdk/monitor/azure-monitor-query/README.md | 194 +- .../azure-monitor-query/TROUBLESHOOTING.md | 44 +- .../azure-monitor-query/_metadata.json | 3 + .../apiview-properties.json | 6 + sdk/monitor/azure-monitor-query/assets.json | 2 +- .../azure-monitor-query/azure/__init__.py | 2 +- .../azure/monitor/__init__.py | 2 +- .../azure/monitor/query/__init__.py | 66 +- .../{_generated/metrics/batch => }/_client.py | 45 +- .../metrics/batch => }/_configuration.py | 42 +- .../azure/monitor/query/_enums.py | 47 - .../monitor/query/_generated/__init__.py | 29 - .../azure/monitor/query/_generated/_client.py | 92 - .../query/_generated/_configuration.py | 38 - .../azure/monitor/query/_generated/_patch.py | 20 - .../monitor/query/_generated/aio/__init__.py | 29 - .../monitor/query/_generated/aio/_client.py | 94 - .../query/_generated/aio/_configuration.py | 38 - .../monitor/query/_generated/aio/_patch.py | 20 - .../_generated/aio/operations/__init__.py | 27 - .../_generated/aio/operations/_operations.py | 2187 --------------- .../query/_generated/metrics/__init__.py | 29 - .../query/_generated/metrics/_client.py | 102 - .../_generated/metrics/_configuration.py | 44 - .../_generated/metrics/_serialization.py | 2118 --------------- .../query/_generated/metrics/aio/__init__.py | 29 - .../query/_generated/metrics/aio/_client.py | 104 - .../_generated/metrics/aio/_configuration.py | 44 - .../query/_generated/metrics/aio/_patch.py | 20 - .../metrics/aio/operations/__init__.py | 29 - .../metrics/aio/operations/_operations.py | 486 ---- .../metrics/aio/operations/_patch.py | 20 - .../_generated/metrics/batch/__init__.py | 29 - .../query/_generated/metrics/batch/_patch.py | 20 - .../metrics/batch/_serialization.py | 2118 --------------- .../_generated/metrics/batch/aio/__init__.py | 29 - .../_generated/metrics/batch/aio/_patch.py | 20 - .../batch/aio/operations/_operations.py | 537 ---- .../metrics/batch/aio/operations/_patch.py | 20 - .../metrics/batch/operations/_operations.py | 599 ----- .../metrics/batch/operations/_patch.py | 20 - .../query/_generated/metrics/batch/py.typed | 1 - .../_generated/metrics/operations/__init__.py | 29 - .../metrics/operations/_operations.py | 604 ----- .../_generated/metrics/operations/_patch.py | 20 - .../monitor/query/_generated/metrics/py.typed | 1 - .../_generated/operations/_operations.py | 2392 ----------------- .../query/_generated/operations/_patch.py | 20 - .../azure/monitor/query/_generated/py.typed | 1 - .../azure/monitor/query/_helpers.py | 94 +- .../azure/monitor/query/_metrics_client.py | 200 -- .../monitor/query/_metrics_query_client.py | 247 -- .../azure/monitor/query/_models.py | 325 +-- .../operations => _operations}/__init__.py | 7 +- .../monitor/query/_operations/_operations.py | 457 ++++ .../aio/operations => _operations}/_patch.py | 9 +- .../{_logs_query_client.py => _patch.py} | 101 +- .../azure/monitor/query/_sdk_moniker.py | 7 + .../azure/monitor/query/_utils/__init__.py | 6 + .../azure/monitor/query/_utils/model_base.py | 1232 +++++++++ .../serialization.py} | 184 +- .../azure/monitor/query/_utils/utils.py | 25 + .../azure/monitor/query/_version.py | 8 +- .../azure/monitor/query/aio/__init__.py | 26 +- .../metrics/batch => }/aio/_client.py | 45 +- .../metrics/batch => }/aio/_configuration.py | 42 +- .../azure/monitor/query/aio/_helpers_async.py | 26 - .../query/aio/_metrics_client_async.py | 201 -- .../query/aio/_metrics_query_client_async.py | 248 -- .../_operations}/__init__.py | 9 +- .../query/aio/_operations/_operations.py | 390 +++ .../metrics => aio/_operations}/_patch.py | 9 +- ...{_logs_query_client_async.py => _patch.py} | 88 +- .../batch/operations => models}/__init__.py | 8 +- .../azure/monitor/query/models/_enums.py | 42 + .../azure/monitor/query/models/_models.py | 532 ++++ .../azure/monitor/query/models/_patch.py | 23 + .../azure/monitor/query/py.typed | 1 + .../azure-monitor-query/dev_requirements.txt | 4 +- .../azure-monitor-query/samples/README.md | 11 +- .../sample_authentication_async.py | 54 +- .../async_samples/sample_batch_query_async.py | 1 + .../async_samples/sample_log_query_async.py | 1 + .../sample_logs_query_visualization_async.py | 1 + .../sample_metric_definitions_async.py | 49 - .../sample_metric_namespaces_async.py | 47 - .../sample_metrics_query_async.py | 60 - .../sample_metrics_query_multiple_async.py | 66 - .../sample_resource_logs_query_async.py | 1 + .../samples/sample_authentication.py | 52 +- .../samples/sample_batch_query.py | 1 + .../sample_log_query_multiple_workspaces.py | 1 + .../sample_logs_query_key_value_form.py | 1 + .../sample_logs_query_visualization.py | 1 + .../samples/sample_logs_single_query.py | 1 + ...sample_logs_single_query_partial_result.py | 1 + .../samples/sample_metric_definitions.py | 36 - .../samples/sample_metric_namespaces.py | 34 - .../samples/sample_metrics_query.py | 45 - .../samples/sample_metrics_query_multiple.py | 57 - .../samples/sample_resource_logs_query.py | 1 + .../samples/sample_server_timeout.py | 1 + .../sample_single_log_query_without_pandas.py | 1 + sdk/monitor/azure-monitor-query/setup.py | 71 +- .../azure-monitor-query/swagger/README.md | 111 - .../tests/base_testcase.py | 38 - .../azure-monitor-query/tests/conftest.py | 8 +- .../tests/perfstress_tests/README.md | 1 - .../tests/perfstress_tests/metric_query.py | 55 - .../azure-monitor-query/tests/test_helpers.py | 20 +- .../tests/test_logs_client.py | 8 +- .../tests/test_logs_client_async.py | 10 +- .../tests/test_logs_timespans.py | 14 +- .../tests/test_metrics_client.py | 60 - .../tests/test_metrics_client_async.py | 70 - .../tests/test_metrics_query_client.py | 125 - .../tests/test_metrics_query_client_async.py | 149 - .../azure-monitor-query/tsp-location.yaml | 4 + 120 files changed, 3169 insertions(+), 15224 deletions(-) create mode 100644 sdk/monitor/azure-monitor-query/_metadata.json create mode 100644 sdk/monitor/azure-monitor-query/apiview-properties.json rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics/batch => }/_client.py (69%) rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics/batch => }/_configuration.py (53%) delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/py.typed delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/py.typed delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/py.typed delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_client.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics/batch/aio/operations => _operations}/__init__.py (82%) create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/aio/operations => _operations}/_patch.py (61%) rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_logs_query_client.py => _patch.py} (82%) create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_sdk_moniker.py create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/__init__.py create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/_serialization.py => _utils/serialization.py} (93%) create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/utils.py rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics/batch => }/aio/_client.py (69%) rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics/batch => }/aio/_configuration.py (53%) delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_client_async.py delete mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/operations => aio/_operations}/__init__.py (76%) create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics => aio/_operations}/_patch.py (61%) rename sdk/monitor/azure-monitor-query/azure/monitor/query/aio/{_logs_query_client_async.py => _patch.py} (84%) rename sdk/monitor/azure-monitor-query/azure/monitor/query/{_generated/metrics/batch/operations => models}/__init__.py (83%) create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py create mode 100644 sdk/monitor/azure-monitor-query/azure/monitor/query/models/_patch.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_definitions_async.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_namespaces_async.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_async.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_multiple_async.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/sample_metric_definitions.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/sample_metric_namespaces.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/sample_metrics_query.py delete mode 100644 sdk/monitor/azure-monitor-query/samples/sample_metrics_query_multiple.py delete mode 100644 sdk/monitor/azure-monitor-query/swagger/README.md delete mode 100644 sdk/monitor/azure-monitor-query/tests/perfstress_tests/metric_query.py delete mode 100644 sdk/monitor/azure-monitor-query/tests/test_metrics_client.py delete mode 100644 sdk/monitor/azure-monitor-query/tests/test_metrics_client_async.py delete mode 100644 sdk/monitor/azure-monitor-query/tests/test_metrics_query_client.py delete mode 100644 sdk/monitor/azure-monitor-query/tests/test_metrics_query_client_async.py create mode 100644 sdk/monitor/azure-monitor-query/tsp-location.yaml diff --git a/sdk/monitor/azure-monitor-query/CHANGELOG.md b/sdk/monitor/azure-monitor-query/CHANGELOG.md index 0d6ee950c5ce..050498cec586 100644 --- a/sdk/monitor/azure-monitor-query/CHANGELOG.md +++ b/sdk/monitor/azure-monitor-query/CHANGELOG.md @@ -1,17 +1,26 @@ # Release History -## 1.4.2 (Unreleased) +## 2.0.0 (Unreleased) ### Features Added ### Breaking Changes +- `MetricsClient` and `MetricsQueryClient` have been removed from the `azure-monitor-query` package. This is part of the migration to split logs and metrics functionality into separate packages. ([#42205](https://github.com/Azure/azure-sdk-for-python/pull/42205)) + - The `MetricsClient` has been moved to the new `azure-monitor-querymetrics` + - Functionality provided by `MetricsQueryClient` can be access through the `azure-mgmt-monitor` package. + ### Bugs Fixed - Fixed an issue where the package version in operation user agent strings was always set to "unknown" instead of the actual package version. ([#39866](https://github.com/Azure/azure-sdk-for-python/pull/39866)) ### Other Changes +- Bump minimum dependencies: + - `azure-core` to `>=1.30.0` + - `isodate` to `>=0.6.1` + - `typing-extensions` to `>=4.6.0` + ## 1.4.1 (2025-01-14) ### Other Changes diff --git a/sdk/monitor/azure-monitor-query/MANIFEST.in b/sdk/monitor/azure-monitor-query/MANIFEST.in index 61d81c682e83..b6e34c880b60 100644 --- a/sdk/monitor/azure-monitor-query/MANIFEST.in +++ b/sdk/monitor/azure-monitor-query/MANIFEST.in @@ -1,7 +1,7 @@ -recursive-include tests *.py *.yaml -recursive-include samples *.py include *.md include LICENSE +include azure/monitor/query/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md include azure/__init__.py include azure/monitor/__init__.py -include azure/monitor/query/py.typed diff --git a/sdk/monitor/azure-monitor-query/README.md b/sdk/monitor/azure-monitor-query/README.md index 01def650de72..4fbe70635b2d 100644 --- a/sdk/monitor/azure-monitor-query/README.md +++ b/sdk/monitor/azure-monitor-query/README.md @@ -1,9 +1,8 @@ # Azure Monitor Query client library for Python -The Azure Monitor Query client library is used to execute read-only queries against [Azure Monitor][azure_monitor_overview]'s two data platforms: +The Azure Monitor Query client library is used to execute read-only queries against [Azure Monitor][azure_monitor_overview]'s Logs data platform. - [Logs](https://learn.microsoft.com/azure/azure-monitor/logs/data-platform-logs) - Collects and organizes log and performance data from monitored resources. Data from different sources such as platform logs from Azure services, log and performance data from virtual machines agents, and usage and performance data from apps can be consolidated into a single [Azure Log Analytics workspace](https://learn.microsoft.com/azure/azure-monitor/logs/data-platform-logs#log-analytics-and-workspaces). The various data types can be analyzed together using the [Kusto Query Language][kusto_query_language]. -- [Metrics](https://learn.microsoft.com/azure/azure-monitor/essentials/data-platform-metrics) - Collects numeric data from monitored resources into a time series database. Metrics are numerical values that are collected at regular intervals and describe some aspect of a system at a particular time. Metrics are lightweight and capable of supporting near real-time scenarios, making them useful for alerting and fast detection of issues. **Resources:** @@ -19,13 +18,11 @@ The Azure Monitor Query client library is used to execute read-only queries agai ### Prerequisites -- Python 3.8 or later +- Python 3.9 or later - An [Azure subscription][azure_subscription] -- A [TokenCredential](https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python) implementation, such as an [Azure Identity library credential type](https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#credential-classes). - To query Logs, you need one of the following things: - An [Azure Log Analytics workspace][azure_monitor_create_using_portal] - An Azure resource of any kind (Storage Account, Key Vault, Cosmos DB, etc.) -- To query Metrics, you need an Azure resource of any kind (Storage Account, Key Vault, Cosmos DB, etc.). ### Install the package @@ -37,20 +34,20 @@ pip install azure-monitor-query ### Create the client -An authenticated client is required to query Logs or Metrics. The library includes both synchronous and asynchronous forms of the clients. To authenticate, create an instance of a token credential. Use that instance when creating a `LogsQueryClient`, `MetricsQueryClient`, or `MetricsClient`. The following examples use `DefaultAzureCredential` from the [azure-identity](https://pypi.org/project/azure-identity/) package. +An authenticated client is required to query Logs. The library includes both synchronous and asynchronous forms of the client. To authenticate, create an instance of a token credential. Use that instance when creating a `LogsQueryClient`. The following examples use `DefaultAzureCredential` from the [azure-identity](https://pypi.org/project/azure-identity/) package. + +> **Note**: For Metrics querying capabilities, please use the separate `azure-monitor-querymetrics` package which provides `MetricsClient`, or the `azure-mgmt-monitor` package. #### Synchronous clients -Consider the following example, which creates synchronous clients for both Logs and Metrics querying: +Consider the following example, which creates a synchronous client for Logs querying: ```python from azure.identity import DefaultAzureCredential -from azure.monitor.query import LogsQueryClient, MetricsQueryClient, MetricsClient +from azure.monitor.query import LogsQueryClient credential = DefaultAzureCredential() logs_query_client = LogsQueryClient(credential) -metrics_query_client = MetricsQueryClient(credential) -metrics_client = MetricsClient("https://", credential) ``` #### Asynchronous clients @@ -59,12 +56,10 @@ The asynchronous forms of the query client APIs are found in the `.aio`-suffixed ```python from azure.identity.aio import DefaultAzureCredential -from azure.monitor.query.aio import LogsQueryClient, MetricsQueryClient, MetricsClient +from azure.monitor.query.aio import LogsQueryClient credential = DefaultAzureCredential() async_logs_query_client = LogsQueryClient(credential) -async_metrics_query_client = MetricsQueryClient(credential) -async_metrics_client = MetricsClient("https://", credential) ``` To use the asynchronous clients, you must also install an async transport, such as [aiohttp](https://pypi.org/project/aiohttp/). @@ -75,27 +70,21 @@ pip install aiohttp #### Configure client for Azure sovereign cloud -By default, all clients are configured to use the Azure public cloud. To use a sovereign cloud, provide the correct `endpoint` argument when using `LogsQueryClient` or `MetricsQueryClient`. For `MetricsClient`, provide the correct `audience` argument instead. For example: +By default, the client is configured to use the Azure public cloud. To use a sovereign cloud, provide the correct `endpoint` argument when using `LogsQueryClient`. For example: ```python from azure.identity import AzureAuthorityHosts, DefaultAzureCredential -from azure.monitor.query import LogsQueryClient, MetricsQueryClient, MetricsClient +from azure.monitor.query import LogsQueryClient # Authority can also be set via the AZURE_AUTHORITY_HOST environment variable. credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) -logs_query_client = LogsQueryClient(credential, endpoint="https://api.loganalytics.us/v1") -metrics_query_client = MetricsQueryClient(credential, endpoint="https://management.usgovcloudapi.net") -metrics_client = MetricsClient( - "https://usgovvirginia.metrics.monitor.azure.us", credential, audience="https://metrics.monitor.azure.us" -) +logs_query_client = LogsQueryClient(credential, endpoint="https://api.loganalytics.us") ``` -**Note**: Currently, `MetricsQueryClient` uses the Azure Resource Manager (ARM) endpoint for querying metrics. You need the corresponding management endpoint for your cloud when using this client. This detail is subject to change in the future. - ### Execute the query -For examples of Logs and Metrics queries, see the [Examples](#examples) section. +For examples of Logs queries, see the [Examples](#examples) section. ## Key concepts @@ -105,17 +94,6 @@ The Log Analytics service applies throttling when the request rate is too high. If you're executing a batch logs query, a throttled request returns a `LogsQueryError` object. That object's `code` value is `ThrottledError`. -### Metrics data structure - -Each set of metric values is a time series with the following characteristics: - -- The time the value was collected -- The resource associated with the value -- A namespace that acts like a category for the metric -- A metric name -- The value itself -- Some metrics have multiple dimensions as described in multi-dimensional metrics. Custom metrics can have up to 10 dimensions. - ## Examples - [Logs query](#logs-query) @@ -128,10 +106,6 @@ Each set of metric values is a time series with the following characteristics: - [Query multiple workspaces](#query-multiple-workspaces) - [Include statistics](#include-statistics) - [Include visualization](#include-visualization) -- [Metrics query](#metrics-query) - - [Handle metrics query response](#handle-metrics-query-response) - - [Example of handling response](#example-of-handling-response) - - [Query metrics for multiple resources](#query-metrics-for-multiple-resources) ### Logs query @@ -458,142 +432,6 @@ The `visualization` field is a `dict` that corresponds to the raw JSON response, Interpretation of the visualization data is left to the library consumer. To use this data with the [Plotly graphing library](https://plotly.com/python/), see the [synchronous](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_logs_query_visualization.py) or [asynchronous](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_logs_query_visualization_async.py) code samples. -### Metrics query - -The following example gets metrics for an Event Grid subscription. The resource ID (also known as resource URI) is that of an Event Grid topic. - -The resource ID must be that of the resource for which metrics are being queried. It's normally of the format `/subscriptions//resourceGroups//providers//topics/`. - -To find the resource ID/URI: - -1. Navigate to your resource's page in the Azure portal. -1. Select the **JSON View** link in the **Overview** section. -1. Copy the value in the **Resource ID** text box at the top of the JSON view. - -**NOTE**: The metrics are returned in the order of the `metric_names` sent. - -```python -import os -from datetime import timedelta, datetime -from azure.monitor.query import MetricsQueryClient -from azure.identity import DefaultAzureCredential - -credential = DefaultAzureCredential() -client = MetricsQueryClient(credential) -start_time = datetime(2021, 5, 25) -duration = timedelta(days=1) -metrics_uri = os.environ['METRICS_RESOURCE_URI'] -response = client.query_resource( - metrics_uri, - metric_names=["PublishSuccessCount"], - timespan=(start_time, duration) - ) - -for metric in response.metrics: - print(metric.name) - for time_series_element in metric.timeseries: - for metric_value in time_series_element.data: - print(metric_value.time_stamp) -``` - -#### Handle metrics query response - -The metrics query API returns a `MetricsQueryResult` object. The `MetricsQueryResult` object contains properties such as a list of `Metric`-typed objects, `granularity`, `namespace`, and `timespan`. The `Metric` objects list can be accessed using the `metrics` param. Each `Metric` object in this list contains a list of `TimeSeriesElement` objects. Each `TimeSeriesElement` object contains `data` and `metadata_values` properties. In visual form, the object hierarchy of the response resembles the following structure: - -``` -MetricsQueryResult -|---granularity -|---timespan -|---cost -|---namespace -|---resource_region -|---metrics (list of `Metric` objects) - |---id - |---type - |---name - |---unit - |---timeseries (list of `TimeSeriesElement` objects) - |---metadata_values - |---data (list of data points represented by `MetricValue` objects) -``` - -#### Example of handling response - -```python -import os -from azure.monitor.query import MetricsQueryClient, MetricAggregationType -from azure.identity import DefaultAzureCredential - -credential = DefaultAzureCredential() -client = MetricsQueryClient(credential) - -metrics_uri = os.environ['METRICS_RESOURCE_URI'] -response = client.query_resource( - metrics_uri, - metric_names=["MatchedEventCount"], - aggregations=[MetricAggregationType.COUNT] - ) - -for metric in response.metrics: - print(metric.name) - for time_series_element in metric.timeseries: - for metric_value in time_series_element.data: - if metric_value.count != 0: - print( - "There are {} matched events at {}".format( - metric_value.count, - metric_value.time_stamp - ) - ) -``` - -#### Query metrics for multiple resources - -To query metrics for multiple Azure resources in a single request, use the `query_resources` method of `MetricsClient`. This method: - -- Calls a different API than the `MetricsQueryClient` methods. -- Requires a regional endpoint when creating the client. For example, "https://westus3.metrics.monitor.azure.com". - -Each Azure resource must reside in: - -- The same region as the endpoint specified when creating the client. -- The same Azure subscription. - -Furthermore: - -- The user must be authorized to read monitoring data at the Azure subscription level. For example, the [Monitoring Reader role](https://learn.microsoft.com/azure/role-based-access-control/built-in-roles/monitor#monitoring-reader) on the subscription to be queried. -- The metric namespace containing the metrics to be queried must be provided. For a list of metric namespaces, see [Supported metrics and log categories by resource type][metric_namespaces]. - -```python -from datetime import timedelta -import os - -from azure.core.exceptions import HttpResponseError -from azure.identity import DefaultAzureCredential -from azure.monitor.query import MetricsClient, MetricAggregationType - -endpoint = "https://westus3.metrics.monitor.azure.com" -credential = DefaultAzureCredential() -client = MetricsClient(endpoint, credential) - -resource_ids = [ - "/subscriptions//resourceGroups//providers//storageAccounts/", - "/subscriptions//resourceGroups//providers//storageAccounts/" -] - -response = client.query_resources( - resource_ids=resource_ids, - metric_namespace="Microsoft.Storage/storageAccounts", - metric_names=["Ingress"], - timespan=timedelta(hours=2), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.AVERAGE], -) - -for metrics_query_result in response: - print(metrics_query_result.timespan) -``` - ## Troubleshooting See our [troubleshooting guide][troubleshooting_guide] for details on how to diagnose various failure scenarios. @@ -615,13 +453,6 @@ The following code samples show common scenarios with the Azure Monitor Query cl - [Send multiple queries with LogsQueryClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py) - [Send a single query with LogsQueryClient using server timeout](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_server_timeout.py) -#### Metrics query samples - -- [Send a query using MetricsQueryClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metrics_query.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_async.py)) -- [Send a query to multiple resources in a region and subscription using MetricsClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metrics_query_multiple.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_multiple_async.py)) -- [Get a list of metric namespaces](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metric_namespaces.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_namespaces_async.py)) -- [Get a list of metric definitions](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metric_definitions.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_definitions_async.py)) - ## Contributing This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit [cla.microsoft.com][cla]. @@ -639,7 +470,6 @@ This project has adopted the [Microsoft Open Source Code of Conduct][code_of_con [azure_subscription]: https://azure.microsoft.com/free/python/ [changelog]: https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query/CHANGELOG.md [kusto_query_language]: https://learn.microsoft.com/azure/data-explorer/kusto/query/ -[metric_namespaces]: https://learn.microsoft.com/azure/azure-monitor/reference/supported-metrics/metrics-index#supported-metrics-and-log-categories-by-resource-type [package]: https://aka.ms/azsdk-python-monitor-query-pypi [pip]: https://pypi.org/project/pip/ [python_logging]: https://docs.python.org/3/library/logging.html diff --git a/sdk/monitor/azure-monitor-query/TROUBLESHOOTING.md b/sdk/monitor/azure-monitor-query/TROUBLESHOOTING.md index ff036e90966b..88f5fdbc86b3 100644 --- a/sdk/monitor/azure-monitor-query/TROUBLESHOOTING.md +++ b/sdk/monitor/azure-monitor-query/TROUBLESHOOTING.md @@ -6,7 +6,7 @@ This troubleshooting guide contains instructions to diagnose frequently encounte * [General Troubleshooting](#general-troubleshooting) * [Enable client logging](#enable-client-logging) - * [Troubleshooting authentication issues with logs and metrics query requests](#authentication-errors) + * [Troubleshooting authentication issues with query requests](#authentication-errors) * [Troubleshooting running async APIs](#errors-with-running-async-apis) * [Troubleshooting Logs Query](#troubleshooting-logs-query) * [Troubleshooting insufficient access error](#troubleshooting-insufficient-access-error-for-logs-query) @@ -14,9 +14,6 @@ This troubleshooting guide contains instructions to diagnose frequently encounte * [Troubleshooting empty log query results](#troubleshooting-empty-log-query-results) * [Troubleshooting server timeouts when executing logs query request](#troubleshooting-server-timeouts-when-executing-logs-query-request) * [Troubleshooting partially successful logs query requests](#troubleshooting-partially-successful-logs-query-requests) -* [Troubleshooting Metrics Query](#troubleshooting-metrics-query) - * [Troubleshooting insufficient access error](#troubleshooting-insufficient-access-error-for-metrics-query) - * [Troubleshooting unsupported granularity for metrics query](#troubleshooting-unsupported-granularity-for-metrics-query) * [Additional azure-core configurations](#additional-azure-core-configurations) ## General Troubleshooting @@ -53,11 +50,7 @@ client.query_workspace(logging_enable=True) ### Authentication errors -Azure Monitor Query supports Azure Active Directory authentication. Both LogsQueryClient and -MetricsQueryClient have methods to set the `credential`. To provide a valid credential, you can use -`azure-identity` dependency. For more details on getting started, refer to -the [README](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query#create-the-client) -of Azure Monitor Query library. You can also refer to +Azure Monitor Query supports Microsoft Entra ID authentication. LogsQueryClient has methods to set the `credential`. To provide a valid credential, you can use the `azure-identity` package. For more details on getting started, refer to the [README](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/monitor/azure-monitor-query#create-the-client) of the Azure Monitor Query library. You can also refer to the [Azure Identity documentation](https://learn.microsoft.com/python/api/overview/azure/identity-readme) for more details on the various types of credential supported in `azure-identity`. @@ -174,39 +167,6 @@ data = response.partial_data error = response.partial_error ``` -## Troubleshooting Metrics Query - -### Troubleshooting insufficient access error for metrics query - -If you get an HTTP error with status code 403 (Forbidden), it means that the provided credentials does not have -sufficient permissions to query the workspace. -```text -"{"error":{"message":"The provided credentials have insufficient access to perform the requested operation","code":"InsufficientAccessError","correlationId":""}}" -``` - -1. Check that the application or user that is making the request has sufficient permissions: - * You can refer to this document to [manage access to workspaces](https://learn.microsoft.com/azure/azure-monitor/logs/manage-access#manage-access-using-workspace-permissions) -2. If the user or application is granted sufficient privileges to query the workspace, make sure you are - authenticating as that user/application. If you are authenticating using the - [DefaultAzureCredential](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/identity/azure-identity#defaultazurecredential) - then check the logs to verify that the credential used is the one you expected. To enable logging, see [enable - client logging](#enable-client-logging) section above. - -For more help on troubleshooting authentication errors please see the Azure Identity client library [troubleshooting guide](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/identity/azure-identity/TROUBLESHOOTING.md). - -### Troubleshooting unsupported granularity for metrics query - -If you notice the following exception, this is due to an invalid time granularity in the metrics query request. Your -query might have set the `granularity` keyword argument to an unsupported duration. - -```text -"{"code":"BadRequest","message":"Invalid time grain duration: PT10M, supported ones are: 00:01:00,00:05:00,00:15:00,00:30:00,01:00:00,06:00:00,12:00:00,1.00:00:00"}" -``` - -As documented in the error message, the supported granularity for metrics queries are 1 minute, 5 minutes, 15 minutes, -30 minutes, 1 hour, 6 hours, 12 hours and 1 day. - - ## Additional azure-core configurations When calling the methods, some properties including `retry_mode`, `timeout`, `connection_verify` can be configured by passing in as keyword arguments. See diff --git a/sdk/monitor/azure-monitor-query/_metadata.json b/sdk/monitor/azure-monitor-query/_metadata.json new file mode 100644 index 000000000000..539264d9b9bb --- /dev/null +++ b/sdk/monitor/azure-monitor-query/_metadata.json @@ -0,0 +1,3 @@ +{ + "apiVersion": "v1" +} \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/apiview-properties.json b/sdk/monitor/azure-monitor-query/apiview-properties.json new file mode 100644 index 000000000000..ad6da1dd065a --- /dev/null +++ b/sdk/monitor/azure-monitor-query/apiview-properties.json @@ -0,0 +1,6 @@ +{ + "CrossLanguagePackageId": "MonitorQueryLogs", + "CrossLanguageDefinitionId": { + "azure.monitor.query.models._ColumnType": "MonitorQueryLogs.ColumnDataType" + } +} \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/assets.json b/sdk/monitor/azure-monitor-query/assets.json index 2488895523bd..fc8cb7be0855 100644 --- a/sdk/monitor/azure-monitor-query/assets.json +++ b/sdk/monitor/azure-monitor-query/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "python", "TagPrefix": "python/monitor/azure-monitor-query", - "Tag": "python/monitor/azure-monitor-query_79499b2a8c" + "Tag": "python/monitor/azure-monitor-query_5e44a95839" } diff --git a/sdk/monitor/azure-monitor-query/azure/__init__.py b/sdk/monitor/azure-monitor-query/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/monitor/azure-monitor-query/azure/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py index d2601599ac80..6b026266ea45 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/__init__.py @@ -2,61 +2,31 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._logs_query_client import LogsQueryClient -from ._metrics_query_client import MetricsQueryClient -from ._metrics_client import MetricsClient +from typing import TYPE_CHECKING -from ._enums import ( - LogsQueryStatus, - MetricAggregationType, - MetricClass, - MetricNamespaceClassification, - MetricUnit, -) +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import -from ._exceptions import LogsQueryError +from ._client import MonitorQueryLogsClient # type: ignore +from ._version import VERSION -from ._models import ( - LogsQueryResult, - LogsTable, - LogsQueryPartialResult, - LogsTableRow, - MetricsQueryResult, - LogsBatchQuery, - MetricNamespace, - MetricDefinition, - TimeSeriesElement, - Metric, - MetricValue, - MetricAvailability, -) +__version__ = VERSION -from ._version import VERSION +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk __all__ = [ - "MetricAggregationType", - "LogsQueryClient", - "LogsQueryResult", - "LogsQueryPartialResult", - "LogsQueryStatus", - "LogsQueryError", - "LogsTable", - "LogsTableRow", - "LogsBatchQuery", - "MetricsQueryClient", - "MetricsClient", - "MetricNamespace", - "MetricNamespaceClassification", - "MetricDefinition", - "MetricUnit", - "MetricsQueryResult", - "TimeSeriesElement", - "Metric", - "MetricValue", - "MetricClass", - "MetricAvailability", + "MonitorQueryLogsClient", ] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore -__version__ = VERSION +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py similarity index 69% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py index 6d5b119dc7b9..660e4bc4067c 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_client.py @@ -2,42 +2,45 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any +from typing import Any, TYPE_CHECKING from typing_extensions import Self from azure.core import PipelineClient from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse -from ._configuration import MonitorBatchMetricsClientConfiguration -from ._serialization import Deserializer, Serializer -from .operations import MetricsBatchOperations +from ._configuration import MonitorQueryLogsClientConfiguration +from ._operations._operations import _MonitorQueryLogsClientOperationsMixin +from ._utils.serialization import Deserializer, Serializer +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential -class MonitorBatchMetricsClient: - """Azure Monitor Batch Metrics Python Client. - :ivar metrics_batch: MetricsBatchOperations operations - :vartype metrics_batch: monitor_batch_metrics_client.operations.MetricsBatchOperations - :param endpoint: The regional endpoint to use, for example - https://eastus.metrics.monitor.azure.com. The region should match the region of the requested - resources. For global resources, the region should be 'global'. Required. - :type endpoint: str - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str +class MonitorQueryLogsClient(_MonitorQueryLogsClientOperationsMixin): + """MonitorQueryLogsClient. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword endpoint: The Log Analytics service endpoint. Default value is + "https://api.loganalytics.io". + :paramtype endpoint: str + :keyword api_version: The service API version. Known values are "v1" and None. Default value is + "v1". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str or ~azure.monitor.query.models.Versions """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, endpoint: str, **kwargs: Any + def __init__( + self, credential: "TokenCredential", *, endpoint: str = "https://api.loganalytics.io", **kwargs: Any ) -> None: - _endpoint = "{endpoint}" - self._config = MonitorBatchMetricsClientConfiguration(endpoint=endpoint, **kwargs) + _endpoint = "{endpoint}/{apiVersion}" + self._config = MonitorQueryLogsClientConfiguration(credential=credential, endpoint=endpoint, **kwargs) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -60,7 +63,6 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential self._serialize = Serializer() self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.metrics_batch = MetricsBatchOperations(self._client, self._config, self._serialize, self._deserialize) def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. @@ -83,6 +85,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: request_copy = deepcopy(request) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py similarity index 53% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py index 87e673a3b12b..6ee9ddf5b776 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_configuration.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_configuration.py @@ -2,40 +2,48 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any +from typing import Any, TYPE_CHECKING from azure.core.pipeline import policies -VERSION = "unknown" +from ._version import VERSION +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential -class MonitorBatchMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for MonitorBatchMetricsClient. + +class MonitorQueryLogsClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for MonitorQueryLogsClient. Note that all parameters used to create this instance are saved as instance attributes. - :param endpoint: The regional endpoint to use, for example - https://eastus.metrics.monitor.azure.com. The region should match the region of the requested - resources. For global resources, the region should be 'global'. Required. + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param endpoint: The Log Analytics service endpoint. Default value is + "https://api.loganalytics.io". :type endpoint: str - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str + :keyword api_version: The service API version. Known values are "v1" and None. Default value is + "v1". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str or ~azure.monitor.query.models.Versions """ - def __init__(self, endpoint: str, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-02-01") + def __init__( + self, credential: "TokenCredential", endpoint: str = "https://api.loganalytics.io", **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "v1") - if endpoint is None: - raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + self.credential = credential self.endpoint = endpoint self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://api.loganalytics.io/.default"]) kwargs.setdefault("sdk_moniker", "monitor-query/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) @@ -50,3 +58,7 @@ def _configure(self, **kwargs: Any) -> None: self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_enums.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_enums.py index ec78c287fead..de70f1bc5240 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_enums.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_enums.py @@ -16,50 +16,3 @@ class LogsQueryStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): PARTIAL = "PartialError" SUCCESS = "Success" FAILURE = "Failure" - - -class MetricAggregationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The aggregation type of the metric.""" - - NONE = "None" - AVERAGE = "Average" - COUNT = "Count" - MINIMUM = "Minimum" - MAXIMUM = "Maximum" - TOTAL = "Total" - - -class MetricClass(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The class of the metric.""" - - AVAILABILITY = "Availability" - TRANSACTIONS = "Transactions" - ERRORS = "Errors" - LATENCY = "Latency" - SATURATION = "Saturation" - - -class MetricNamespaceClassification(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Kind of namespace""" - - PLATFORM = "Platform" - CUSTOM = "Custom" - QOS = "Qos" - - -class MetricUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The unit of the metric.""" - - COUNT = "Count" - BYTES = "Bytes" - SECONDS = "Seconds" - COUNT_PER_SECOND = "CountPerSecond" - BYTES_PER_SECOND = "BytesPerSecond" - PERCENT = "Percent" - MILLI_SECONDS = "MilliSeconds" - BYTE_SECONDS = "ByteSeconds" - UNSPECIFIED = "Unspecified" - CORES = "Cores" - MILLI_CORES = "MilliCores" - NANO_CORES = "NanoCores" - BITS_PER_SECOND = "BitsPerSecond" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py deleted file mode 100644 index 70b9d32ba891..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import MonitorQueryClient # type: ignore - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MonitorQueryClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py deleted file mode 100644 index dec20111e5d0..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_client.py +++ /dev/null @@ -1,92 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from copy import deepcopy -from typing import Any -from typing_extensions import Self - -from azure.core import PipelineClient -from azure.core.pipeline import policies -from azure.core.rest import HttpRequest, HttpResponse - -from ._configuration import MonitorQueryClientConfiguration -from ._serialization import Deserializer, Serializer -from .operations import MetadataOperations, QueryOperations - - -class MonitorQueryClient: # pylint: disable=client-accepts-api-version-keyword - """Azure Monitor Query Python Client. - - :ivar query: QueryOperations operations - :vartype query: monitor_query_client.operations.QueryOperations - :ivar metadata: MetadataOperations operations - :vartype metadata: monitor_query_client.operations.MetadataOperations - :keyword endpoint: Service URL. Default value is "https://api.loganalytics.io/v1". - :paramtype endpoint: str - """ - - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, *, endpoint: str = "https://api.loganalytics.io/v1", **kwargs: Any - ) -> None: - self._config = MonitorQueryClientConfiguration(**kwargs) - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: PipelineClient = PipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - self._serialize = Serializer() - self._deserialize = Deserializer() - self._serialize.client_side_validation = False - self.query = QueryOperations(self._client, self._config, self._serialize, self._deserialize) - self.metadata = MetadataOperations(self._client, self._config, self._serialize, self._deserialize) - - def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: - """Runs the network request through the client's chained policies. - - >>> from azure.core.rest import HttpRequest - >>> request = HttpRequest("GET", "https://www.example.org/") - - >>> response = client.send_request(request) - - - For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request - - :param request: The network request you want to make. Required. - :type request: ~azure.core.rest.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.rest.HttpResponse - """ - - request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - - def close(self) -> None: - self._client.close() - - def __enter__(self) -> Self: - self._client.__enter__() - return self - - def __exit__(self, *exc_details: Any) -> None: - self._client.__exit__(*exc_details) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py deleted file mode 100644 index 8962d9b6e7fe..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_configuration.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any - -from azure.core.pipeline import policies - -VERSION = "unknown" - - -class MonitorQueryClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for MonitorQueryClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - """ - - def __init__(self, **kwargs: Any) -> None: - - kwargs.setdefault("sdk_moniker", "monitor-query/{}".format(VERSION)) - self.polling_interval = kwargs.get("polling_interval", 30) - self._configure(**kwargs) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py deleted file mode 100644 index 70b9d32ba891..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import MonitorQueryClient # type: ignore - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MonitorQueryClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py deleted file mode 100644 index 64d5c312b008..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_client.py +++ /dev/null @@ -1,94 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from copy import deepcopy -from typing import Any, Awaitable -from typing_extensions import Self - -from azure.core import AsyncPipelineClient -from azure.core.pipeline import policies -from azure.core.rest import AsyncHttpResponse, HttpRequest - -from .._serialization import Deserializer, Serializer -from ._configuration import MonitorQueryClientConfiguration -from .operations import MetadataOperations, QueryOperations - - -class MonitorQueryClient: # pylint: disable=client-accepts-api-version-keyword - """Azure Monitor Query Python Client. - - :ivar query: QueryOperations operations - :vartype query: monitor_query_client.aio.operations.QueryOperations - :ivar metadata: MetadataOperations operations - :vartype metadata: monitor_query_client.aio.operations.MetadataOperations - :keyword endpoint: Service URL. Default value is "https://api.loganalytics.io/v1". - :paramtype endpoint: str - """ - - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, *, endpoint: str = "https://api.loganalytics.io/v1", **kwargs: Any - ) -> None: - self._config = MonitorQueryClientConfiguration(**kwargs) - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - self._serialize = Serializer() - self._deserialize = Deserializer() - self._serialize.client_side_validation = False - self.query = QueryOperations(self._client, self._config, self._serialize, self._deserialize) - self.metadata = MetadataOperations(self._client, self._config, self._serialize, self._deserialize) - - def send_request( - self, request: HttpRequest, *, stream: bool = False, **kwargs: Any - ) -> Awaitable[AsyncHttpResponse]: - """Runs the network request through the client's chained policies. - - >>> from azure.core.rest import HttpRequest - >>> request = HttpRequest("GET", "https://www.example.org/") - - >>> response = await client.send_request(request) - - - For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request - - :param request: The network request you want to make. Required. - :type request: ~azure.core.rest.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.rest.AsyncHttpResponse - """ - - request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> Self: - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details: Any) -> None: - await self._client.__aexit__(*exc_details) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py deleted file mode 100644 index 4e0060b03211..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_configuration.py +++ /dev/null @@ -1,38 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any - -from azure.core.pipeline import policies - -VERSION = "unknown" - - -class MonitorQueryClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for MonitorQueryClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - """ - - def __init__(self, **kwargs: Any) -> None: - - kwargs.setdefault("sdk_moniker", "monitor-query/{}".format(VERSION)) - self.polling_interval = kwargs.get("polling_interval", 30) - self._configure(**kwargs) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py deleted file mode 100644 index e4231c981f36..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._operations import QueryOperations # type: ignore -from ._operations import MetadataOperations # type: ignore - -from ._patch import __all__ as _patch_all -from ._patch import * -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "QueryOperations", - "MetadataOperations", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py deleted file mode 100644 index e2fba7296b10..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_operations.py +++ /dev/null @@ -1,2187 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ...operations._operations import ( - build_metadata_get_request, - build_metadata_post_request, - build_query_batch_request, - build_query_execute_request, - build_query_get_request, - build_query_resource_execute_request, - build_query_resource_execute_xms_request, - build_query_resource_get_request, - build_query_resource_get_xms_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class QueryOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_query_client.aio.MonitorQueryClient`'s - :attr:`query` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def get( - self, workspace_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :keyword query: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :paramtype query: str - :keyword timespan: Optional. The timespan over which to query data. This is an ISO8601 time - period value. This timespan is applied in addition to any that are specified in the query - expression. Default value is None. - :paramtype timespan: ~datetime.timedelta - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_query_get_request( - workspace_id=workspace_id, - query=query, - timespan=timespan, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - async def execute( - self, - workspace_id: str, - body: JSON, - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: JSON - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @overload - async def execute( - self, - workspace_id: str, - body: IO[bytes], - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @distributed_trace_async - async def execute( - self, workspace_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. Is - either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_execute_request( - workspace_id=workspace_id, - prefer=prefer, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @distributed_trace_async - async def resource_get( - self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource URI. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :keyword query: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :paramtype query: str - :keyword timespan: Optional. The timespan over which to query data. This is an ISO8601 time - period value. This timespan is applied in addition to any that are specified in the query - expression. Default value is None. - :paramtype timespan: ~datetime.timedelta - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_query_resource_get_request( - resource_id=resource_id, - query=query, - timespan=timespan, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - async def resource_execute( - self, - resource_id: str, - body: JSON, - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: JSON - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @overload - async def resource_execute( - self, - resource_id: str, - body: IO[bytes], - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @distributed_trace_async - async def resource_execute( - self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. Is - either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_resource_execute_request( - resource_id=resource_id, - prefer=prefer, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - async def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs: Any) -> JSON: - """Execute a batch of Analytics queries. - - Executes a batch of Analytics queries for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param body: The batch request body. Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "requests": [ - { - "body": { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - }, - "id": "str", - "workspace": "str", - "headers": { - "str": "str" - }, - "method": "POST", - "path": "/query" - } - ] - } - - # response body for status code(s): 200 - response == { - "responses": [ - { - "body": { - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {}, - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ] - }, - "headers": { - "str": "str" - }, - "id": "str", - "status": 0 - } - ] - } - """ - - @overload - async def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> JSON: - """Execute a batch of Analytics queries. - - Executes a batch of Analytics queries for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param body: The batch request body. Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "responses": [ - { - "body": { - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {}, - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ] - }, - "headers": { - "str": "str" - }, - "id": "str", - "status": 0 - } - ] - } - """ - - @distributed_trace_async - async def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON: - """Execute a batch of Analytics queries. - - Executes a batch of Analytics queries for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param body: The batch request body. Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "requests": [ - { - "body": { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - }, - "id": "str", - "workspace": "str", - "headers": { - "str": "str" - }, - "method": "POST", - "path": "/query" - } - ] - } - - # response body for status code(s): 200 - response == { - "responses": [ - { - "body": { - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {}, - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ] - }, - "headers": { - "str": "str" - }, - "id": "str", - "status": 0 - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_batch_request( - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @distributed_trace_async - async def resource_get_xms( - self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource URI. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :keyword query: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :paramtype query: str - :keyword timespan: Optional. The timespan over which to query data. This is an ISO8601 time - period value. This timespan is applied in addition to any that are specified in the query - expression. Default value is None. - :paramtype timespan: ~datetime.timedelta - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_query_resource_get_xms_request( - resource_id=resource_id, - query=query, - timespan=timespan, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - async def resource_execute_xms( - self, - resource_id: str, - body: JSON, - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: JSON - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @overload - async def resource_execute_xms( - self, - resource_id: str, - body: IO[bytes], - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @distributed_trace_async - async def resource_execute_xms( - self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. Is - either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_resource_execute_xms_request( - resource_id=resource_id, - prefer=prefer, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - -class MetadataOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_query_client.aio.MonitorQueryClient`'s - :attr:`metadata` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def get(self, workspace_id: str, **kwargs: Any) -> JSON: - """Gets metadata information. - - Retrieve the metadata information for the workspace, including its schema, functions, workspace - info, categories etc. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "applications": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "categories": [ - { - "displayName": "str", - "id": "str", - "description": "str", - "related": { - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "functions": [ - { - "body": "str", - "id": "str", - "name": "str", - "description": "str", - "displayName": "str", - "parameters": "str", - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "permissions": [ - { - "workspaces": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ], - "applications": [ - { - "resourceId": "str" - } - ], - "resources": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ] - } - ], - "queries": [ - { - "body": "str", - "id": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - }, - "tags": {} - } - ], - "resourceTypes": [ - { - "id": "str", - "type": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resources": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "resources": [ - {} - ], - "solutions": [ - { - "id": "str", - "name": "str", - "related": { - "tables": [ - "str" - ], - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "description": "str", - "displayName": "str", - "properties": {}, - "tags": {} - } - ], - "tables": [ - { - "id": "str", - "name": "str", - "columns": [ - { - "name": "str", - "type": "str", - "description": "str", - "isPreferredFacet": bool, - "source": {} - } - ], - "description": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {}, - "timespanColumn": "str" - } - ], - "workspaces": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "resources": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_metadata_get_request( - workspace_id=workspace_id, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @distributed_trace_async - async def post(self, workspace_id: str, **kwargs: Any) -> JSON: - """Gets metadata information. - - Retrieve the metadata information for the workspace, including its schema, functions, workspace - info, categories etc. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "applications": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "categories": [ - { - "displayName": "str", - "id": "str", - "description": "str", - "related": { - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "functions": [ - { - "body": "str", - "id": "str", - "name": "str", - "description": "str", - "displayName": "str", - "parameters": "str", - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "permissions": [ - { - "workspaces": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ], - "applications": [ - { - "resourceId": "str" - } - ], - "resources": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ] - } - ], - "queries": [ - { - "body": "str", - "id": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - }, - "tags": {} - } - ], - "resourceTypes": [ - { - "id": "str", - "type": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resources": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "resources": [ - {} - ], - "solutions": [ - { - "id": "str", - "name": "str", - "related": { - "tables": [ - "str" - ], - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "description": "str", - "displayName": "str", - "properties": {}, - "tags": {} - } - ], - "tables": [ - { - "id": "str", - "name": "str", - "columns": [ - { - "name": "str", - "type": "str", - "description": "str", - "isPreferredFacet": bool, - "source": {} - } - ], - "description": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {}, - "timespanColumn": "str" - } - ], - "workspaces": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "resources": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_metadata_post_request( - workspace_id=workspace_id, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py deleted file mode 100644 index 7e503e3d6846..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import MonitorMetricsClient # type: ignore - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MonitorMetricsClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py deleted file mode 100644 index 8c0ad4537a9e..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_client.py +++ /dev/null @@ -1,102 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from copy import deepcopy -from typing import Any -from typing_extensions import Self - -from azure.core import PipelineClient -from azure.core.pipeline import policies -from azure.core.rest import HttpRequest, HttpResponse - -from ._configuration import MonitorMetricsClientConfiguration -from ._serialization import Deserializer, Serializer -from .operations import MetricDefinitionsOperations, MetricNamespacesOperations, MetricsOperations - - -class MonitorMetricsClient: - """Azure Monitor Metrics Python Client. - - :ivar metric_definitions: MetricDefinitionsOperations operations - :vartype metric_definitions: monitor_metrics_client.operations.MetricDefinitionsOperations - :ivar metrics: MetricsOperations operations - :vartype metrics: monitor_metrics_client.operations.MetricsOperations - :ivar metric_namespaces: MetricNamespacesOperations operations - :vartype metric_namespaces: monitor_metrics_client.operations.MetricNamespacesOperations - :keyword endpoint: Service URL. Default value is "https://management.azure.com". - :paramtype endpoint: str - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str - """ - - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, *, endpoint: str = "https://management.azure.com", **kwargs: Any - ) -> None: - self._config = MonitorMetricsClientConfiguration(**kwargs) - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: PipelineClient = PipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - self._serialize = Serializer() - self._deserialize = Deserializer() - self._serialize.client_side_validation = False - self.metric_definitions = MetricDefinitionsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.metrics = MetricsOperations(self._client, self._config, self._serialize, self._deserialize) - self.metric_namespaces = MetricNamespacesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - - def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: - """Runs the network request through the client's chained policies. - - >>> from azure.core.rest import HttpRequest - >>> request = HttpRequest("GET", "https://www.example.org/") - - >>> response = client.send_request(request) - - - For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request - - :param request: The network request you want to make. Required. - :type request: ~azure.core.rest.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.rest.HttpResponse - """ - - request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - - def close(self) -> None: - self._client.close() - - def __enter__(self) -> Self: - self._client.__enter__() - return self - - def __exit__(self, *exc_details: Any) -> None: - self._client.__exit__(*exc_details) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py deleted file mode 100644 index e84c97f5a561..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_configuration.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any - -from azure.core.pipeline import policies - -VERSION = "unknown" - - -class MonitorMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for MonitorMetricsClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str - """ - - def __init__(self, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-02-01") - - self.api_version = api_version - kwargs.setdefault("sdk_moniker", "monitor-query/{}".format(VERSION)) - self.polling_interval = kwargs.get("polling_interval", 30) - self._configure(**kwargs) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py deleted file mode 100644 index e2ad51869908..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_serialization.py +++ /dev/null @@ -1,2118 +0,0 @@ -# pylint: disable=too-many-lines -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# pyright: reportUnnecessaryTypeIgnoreComment=false - -from base64 import b64decode, b64encode -import calendar -import datetime -import decimal -import email -from enum import Enum -import json -import logging -import re -import sys -import codecs -from typing import ( - Dict, - Any, - cast, - Optional, - Union, - AnyStr, - IO, - Mapping, - Callable, - TypeVar, - MutableMapping, - Type, - List, -) - -try: - from urllib import quote # type: ignore -except ImportError: - from urllib.parse import quote -import xml.etree.ElementTree as ET - -import isodate # type: ignore - -from azure.core.exceptions import DeserializationError, SerializationError -from azure.core.serialization import NULL as CoreNull - -_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") - -ModelType = TypeVar("ModelType", bound="Model") -JSON = MutableMapping[str, Any] - - -class RawDeserializer: - - # Accept "text" because we're open minded people... - JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") - - # Name used in context - CONTEXT_NAME = "deserialized_data" - - @classmethod - def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: - """Decode data according to content-type. - - Accept a stream of data as well, but will be load at once in memory for now. - - If no content-type, will return the string version (not bytes, not stream) - - :param data: Input, could be bytes or stream (will be decoded with UTF8) or text - :type data: str or bytes or IO - :param str content_type: The content type. - :return: The deserialized data. - :rtype: object - """ - if hasattr(data, "read"): - # Assume a stream - data = cast(IO, data).read() - - if isinstance(data, bytes): - data_as_str = data.decode(encoding="utf-8-sig") - else: - # Explain to mypy the correct type. - data_as_str = cast(str, data) - - # Remove Byte Order Mark if present in string - data_as_str = data_as_str.lstrip(_BOM) - - if content_type is None: - return data - - if cls.JSON_REGEXP.match(content_type): - try: - return json.loads(data_as_str) - except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) from err - elif "xml" in (content_type or []): - try: - - try: - if isinstance(data, unicode): # type: ignore - # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string - data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore - except NameError: - pass - - return ET.fromstring(data_as_str) # nosec - except ET.ParseError as err: - # It might be because the server has an issue, and returned JSON with - # content-type XML.... - # So let's try a JSON load, and if it's still broken - # let's flow the initial exception - def _json_attemp(data): - try: - return True, json.loads(data) - except ValueError: - return False, None # Don't care about this one - - success, json_result = _json_attemp(data) - if success: - return json_result - # If i'm here, it's not JSON, it's not XML, let's scream - # and raise the last context in this block (the XML exception) - # The function hack is because Py2.7 messes up with exception - # context otherwise. - _LOGGER.critical("Wasn't XML not JSON, failing") - raise DeserializationError("XML is invalid") from err - elif content_type.startswith("text/"): - return data_as_str - raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) - - @classmethod - def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: - """Deserialize from HTTP response. - - Use bytes and headers to NOT use any requests/aiohttp or whatever - specific implementation. - Headers will tested for "content-type" - - :param bytes body_bytes: The body of the response. - :param dict headers: The headers of the response. - :returns: The deserialized data. - :rtype: object - """ - # Try to use content-type from headers if available - content_type = None - if "content-type" in headers: - content_type = headers["content-type"].split(";")[0].strip().lower() - # Ouch, this server did not declare what it sent... - # Let's guess it's JSON... - # Also, since Autorest was considering that an empty body was a valid JSON, - # need that test as well.... - else: - content_type = "application/json" - - if body_bytes: - return cls.deserialize_from_text(body_bytes, content_type) - return None - - -_LOGGER = logging.getLogger(__name__) - -try: - _long_type = long # type: ignore -except NameError: - _long_type = int - - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0. - - :param datetime.datetime dt: The datetime - :returns: The offset - :rtype: datetime.timedelta - """ - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation. - - :param datetime.datetime dt: The datetime - :returns: The timestamp representation - :rtype: str - """ - return "Z" - - def dst(self, dt): - """No daylight saving for UTC. - - :param datetime.datetime dt: The datetime - :returns: The daylight saving time - :rtype: datetime.timedelta - """ - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset) -> None: - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore - -_FLATTEN = re.compile(r"(? None: - self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: # pylint: disable=consider-using-dict-items - if k not in self._attribute_map: - _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) - elif k in self._validation and self._validation[k].get("readonly", False): - _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) - else: - setattr(self, k, kwargs[k]) - - def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes. - - :param object other: The object to compare - :returns: True if objects are equal - :rtype: bool - """ - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False - - def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes. - - :param object other: The object to compare - :returns: True if objects are not equal - :rtype: bool - """ - return not self.__eq__(other) - - def __str__(self) -> str: - return str(self.__dict__) - - @classmethod - def enable_additional_properties_sending(cls) -> None: - cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} - - @classmethod - def is_xml_model(cls) -> bool: - try: - cls._xml_map # type: ignore - except AttributeError: - return False - return True - - @classmethod - def _create_xml_node(cls): - """Create XML node. - - :returns: The XML node - :rtype: xml.etree.ElementTree.Element - """ - try: - xml_map = cls._xml_map # type: ignore - except AttributeError: - xml_map = {} - - return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - - def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: - """Return the JSON that would be sent to server from this model. - - This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize( # type: ignore # pylint: disable=protected-access - self, keep_readonly=keep_readonly, **kwargs - ) - - def as_dict( - self, - keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, - **kwargs: Any - ) -> JSON: - """Return a dict that can be serialized using json.dump. - - Advanced usage might optionally use a callback as parameter: - - .. code::python - - def my_key_transformer(key, attr_desc, value): - return key - - Key is the attribute name used in Python. Attr_desc - is a dict of metadata. Currently contains 'type' with the - msrest type and 'key' with the RestAPI encoded key. - Value is the current value in this object. - - The string returned will be used to serialize the key. - If the return type is a list, this is considered hierarchical - result dict. - - See the three examples in this file: - - - attribute_transformer - - full_restapi_key_transformer - - last_restapi_key_transformer - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :param function key_transformer: A key transformer function. - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize( # type: ignore # pylint: disable=protected-access - self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs - ) - - @classmethod - def _infer_class_models(cls): - try: - str_models = cls.__module__.rsplit(".", 1)[0] - models = sys.modules[str_models] - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - if cls.__name__ not in client_models: - raise ValueError("Not Autorest generated code") - except Exception: # pylint: disable=broad-exception-caught - # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. - client_models = {cls.__name__: cls} - return client_models - - @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: - """Parse a str using the RestAPI syntax and return a model. - - :param str data: A str using RestAPI structure. JSON by default. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType - """ - deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def from_dict( - cls: Type[ModelType], - data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, - content_type: Optional[str] = None, - ) -> ModelType: - """Parse a dict using given key extractor return a model. - - By default consider key - extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor - and last_rest_key_case_insensitive_extractor) - - :param dict data: A dict using RestAPI structure - :param function key_extractors: A key extractor function. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType - """ - deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( # type: ignore - [ # type: ignore - attribute_key_case_insensitive_extractor, - rest_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - if key_extractors is None - else key_extractors - ) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def _flatten_subtype(cls, key, objects): - if "_subtype_map" not in cls.__dict__: - return {} - result = dict(cls._subtype_map[key]) - for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access - return result - - @classmethod - def _classify(cls, response, objects): - """Check the class _subtype_map for any child classes. - We want to ignore any inherited _subtype_maps. - - :param dict response: The initial data - :param dict objects: The class objects - :returns: The class to be used - :rtype: class - """ - for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): - subtype_value = None - - if not isinstance(response, ET.Element): - rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) - else: - subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) - if subtype_value: - # Try to match base class. Can be class name only - # (bug to fix in Autorest to support x-ms-discriminator-name) - if cls.__name__ == subtype_value: - return cls - flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) - try: - return objects[flatten_mapping_type[subtype_value]] # type: ignore - except KeyError: - _LOGGER.warning( - "Subtype value %s has no mapping, use base class %s.", - subtype_value, - cls.__name__, - ) - break - else: - _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) - break - return cls - - @classmethod - def _get_rest_key_parts(cls, attr_key): - """Get the RestAPI key of this attr, split it and decode part - :param str attr_key: Attribute key must be in attribute_map. - :returns: A list of RestAPI part - :rtype: list - """ - rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) - return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] - - -def _decode_attribute_map_key(key): - """This decode a key in an _attribute_map to the actual key we want to look at - inside the received data. - - :param str key: A key string from the generated code - :returns: The decoded key - :rtype: str - """ - return key.replace("\\.", ".") - - -class Serializer(object): # pylint: disable=too-many-public-methods - """Request object model serializer.""" - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} - days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} - months = { - 1: "Jan", - 2: "Feb", - 3: "Mar", - 4: "Apr", - 5: "May", - 6: "Jun", - 7: "Jul", - 8: "Aug", - 9: "Sep", - 10: "Oct", - 11: "Nov", - 12: "Dec", - } - validation = { - "min_length": lambda x, y: len(x) < y, - "max_length": lambda x, y: len(x) > y, - "minimum": lambda x, y: x < y, - "maximum": lambda x, y: x > y, - "minimum_ex": lambda x, y: x <= y, - "maximum_ex": lambda x, y: x >= y, - "min_items": lambda x, y: len(x) < y, - "max_items": lambda x, y: len(x) > y, - "pattern": lambda x, y: not re.match(y, x, re.UNICODE), - "unique": lambda x, y: len(x) != len(set(x)), - "multiple": lambda x, y: x % y != 0, - } - - def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: - self.serialize_type = { - "iso-8601": Serializer.serialize_iso, - "rfc-1123": Serializer.serialize_rfc, - "unix-time": Serializer.serialize_unix, - "duration": Serializer.serialize_duration, - "date": Serializer.serialize_date, - "time": Serializer.serialize_time, - "decimal": Serializer.serialize_decimal, - "long": Serializer.serialize_long, - "bytearray": Serializer.serialize_bytearray, - "base64": Serializer.serialize_base64, - "object": self.serialize_object, - "[]": self.serialize_iter, - "{}": self.serialize_dict, - } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} - self.key_transformer = full_restapi_key_transformer - self.client_side_validation = True - - def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals - self, target_obj, data_type=None, **kwargs - ): - """Serialize data into a string according to type. - - :param object target_obj: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, dict - :raises: SerializationError if serialization fails. - :returns: The serialized data. - """ - key_transformer = kwargs.get("key_transformer", self.key_transformer) - keep_readonly = kwargs.get("keep_readonly", False) - if target_obj is None: - return None - - attr_name = None - class_name = target_obj.__class__.__name__ - - if data_type: - return self.serialize_data(target_obj, data_type, **kwargs) - - if not hasattr(target_obj, "_attribute_map"): - data_type = type(target_obj).__name__ - if data_type in self.basic_types.values(): - return self.serialize_data(target_obj, data_type, **kwargs) - - # Force "is_xml" kwargs if we detect a XML model - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) - - serialized = {} - if is_xml_model_serialization: - serialized = target_obj._create_xml_node() # pylint: disable=protected-access - try: - attributes = target_obj._attribute_map # pylint: disable=protected-access - for attr, attr_desc in attributes.items(): - attr_name = attr - if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access - attr_name, {} - ).get("readonly", False): - continue - - if attr_name == "additional_properties" and attr_desc["key"] == "": - if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) - continue - try: - - orig_attr = getattr(target_obj, attr) - if is_xml_model_serialization: - pass # Don't provide "transformer" for XML for now. Keep "orig_attr" - else: # JSON - keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) - keys = keys if isinstance(keys, list) else [keys] - - kwargs["serialization_ctxt"] = attr_desc - new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) - - if is_xml_model_serialization: - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - xml_prefix = xml_desc.get("prefix", None) - xml_ns = xml_desc.get("ns", None) - if xml_desc.get("attr", False): - if xml_ns: - ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - serialized.set(xml_name, new_attr) # type: ignore - continue - if xml_desc.get("text", False): - serialized.text = new_attr # type: ignore - continue - if isinstance(new_attr, list): - serialized.extend(new_attr) # type: ignore - elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, - # we MUST replace the tag with the local tag. But keeping the namespaces. - if "name" not in getattr(orig_attr, "_xml_map", {}): - splitted_tag = new_attr.tag.split("}") - if len(splitted_tag) == 2: # Namespace - new_attr.tag = "}".join([splitted_tag[0], xml_name]) - else: - new_attr.tag = xml_name - serialized.append(new_attr) # type: ignore - else: # That's a basic type - # Integrate namespace if necessary - local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = str(new_attr) - serialized.append(local_node) # type: ignore - else: # JSON - for k in reversed(keys): # type: ignore - new_attr = {k: new_attr} - - _new_attr = new_attr - _serialized = serialized - for k in keys: # type: ignore - if k not in _serialized: - _serialized.update(_new_attr) # type: ignore - _new_attr = _new_attr[k] # type: ignore - _serialized = _serialized[k] - except ValueError as err: - if isinstance(err, SerializationError): - raise - - except (AttributeError, KeyError, TypeError) as err: - msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise SerializationError(msg) from err - return serialized - - def body(self, data, data_type, **kwargs): - """Serialize data intended for a request body. - - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None - :returns: The serialized request body - """ - - # Just in case this is a dict - internal_data_type_str = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type_str, None) - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - if internal_data_type and issubclass(internal_data_type, Model): - is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) - else: - is_xml_model_serialization = False - if internal_data_type and not isinstance(internal_data_type, Enum): - try: - deserializer = Deserializer(self.dependencies) - # Since it's on serialization, it's almost sure that format is not JSON REST - # We're not able to deal with additional properties for now. - deserializer.additional_properties_detection = False - if is_xml_model_serialization: - deserializer.key_extractors = [ # type: ignore - attribute_key_case_insensitive_extractor, - ] - else: - deserializer.key_extractors = [ - rest_key_case_insensitive_extractor, - attribute_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access - except DeserializationError as err: - raise SerializationError("Unable to build a model: " + str(err)) from err - - return self._serialize(data, data_type, **kwargs) - - def url(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL path. - - :param str name: The name of the URL path parameter. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :returns: The serialized URL path - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - """ - try: - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - - if kwargs.get("skip_quote") is True: - output = str(output) - output = output.replace("{", quote("{")).replace("}", quote("}")) - else: - output = quote(str(output), safe="") - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return output - - def query(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL query. - - :param str name: The name of the query parameter. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - :returns: The serialized query parameter - """ - try: - # Treat the list aside, since we don't want to encode the div separator - if data_type.startswith("["): - internal_data_type = data_type[1:-1] - do_quote = not kwargs.get("skip_quote", False) - return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) - - # Not a list, regular serialization - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - if kwargs.get("skip_quote") is True: - output = str(output) - else: - output = quote(str(output), safe="") - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return str(output) - - def header(self, name, data, data_type, **kwargs): - """Serialize data intended for a request header. - - :param str name: The name of the header. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - :returns: The serialized header - """ - try: - if data_type in ["[str]"]: - data = ["" if d is None else d for d in data] - - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return str(output) - - def serialize_data(self, data, data_type, **kwargs): - """Serialize generic data according to supplied data type. - - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. - :returns: The serialized data. - :rtype: str, int, float, bool, dict, list - """ - if data is None: - raise ValueError("No value for given attribute") - - try: - if data is CoreNull: - return None - if data_type in self.basic_types.values(): - return self.serialize_basic(data, data_type, **kwargs) - - if data_type in self.serialize_type: - return self.serialize_type[data_type](data, **kwargs) - - # If dependencies is empty, try with current data class - # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) - if issubclass(enum_type, Enum): - return Serializer.serialize_enum(data, enum_obj=enum_type) - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.serialize_type: - return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) - - except (ValueError, TypeError) as err: - msg = "Unable to serialize value: {!r} as type: {!r}." - raise SerializationError(msg.format(data, data_type)) from err - return self._serialize(data, **kwargs) - - @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements - custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) - if custom_serializer: - return custom_serializer - if kwargs.get("is_xml", False): - return cls._xml_basic_types_serializers.get(data_type) - - @classmethod - def serialize_basic(cls, data, data_type, **kwargs): - """Serialize basic builting data type. - Serializes objects to str, int, float or bool. - - Possible kwargs: - - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - - is_xml bool : If set, use xml_basic_types_serializers - - :param obj data: Object to be serialized. - :param str data_type: Type of object in the iterable. - :rtype: str, int, float, bool - :return: serialized object - """ - custom_serializer = cls._get_custom_serializers(data_type, **kwargs) - if custom_serializer: - return custom_serializer(data) - if data_type == "str": - return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used - - @classmethod - def serialize_unicode(cls, data): - """Special handling for serializing unicode strings in Py2. - Encode to UTF-8 if unicode, otherwise handle as a str. - - :param str data: Object to be serialized. - :rtype: str - :return: serialized object - """ - try: # If I received an enum, return its value - return data.value - except AttributeError: - pass - - try: - if isinstance(data, unicode): # type: ignore - # Don't change it, JSON and XML ElementTree are totally able - # to serialize correctly u'' strings - return data - except NameError: - return str(data) - return str(data) - - def serialize_iter(self, data, iter_type, div=None, **kwargs): - """Serialize iterable. - - Supported kwargs: - - serialization_ctxt dict : The current entry of _attribute_map, or same format. - serialization_ctxt['type'] should be same as data_type. - - is_xml bool : If set, serialize as XML - - :param list data: Object to be serialized. - :param str iter_type: Type of object in the iterable. - :param str div: If set, this str will be used to combine the elements - in the iterable into a combined string. Default is 'None'. - Defaults to False. - :rtype: list, str - :return: serialized iterable - """ - if isinstance(data, str): - raise SerializationError("Refuse str type as a valid iter type.") - - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - is_xml = kwargs.get("is_xml", False) - - serialized = [] - for d in data: - try: - serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized.append(None) - - if kwargs.get("do_quote", False): - serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] - - if div: - serialized = ["" if s is None else str(s) for s in serialized] - serialized = div.join(serialized) - - if "xml" in serialization_ctxt or is_xml: - # XML serialization is more complicated - xml_desc = serialization_ctxt.get("xml", {}) - xml_name = xml_desc.get("name") - if not xml_name: - xml_name = serialization_ctxt["key"] - - # Create a wrap node if necessary (use the fact that Element and list have "append") - is_wrapped = xml_desc.get("wrapped", False) - node_name = xml_desc.get("itemsName", xml_name) - if is_wrapped: - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - else: - final_result = [] - # All list elements to "local_node" - for el in serialized: - if isinstance(el, ET.Element): - el_node = el - else: - el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - if el is not None: # Otherwise it writes "None" :-p - el_node.text = str(el) - final_result.append(el_node) - return final_result - return serialized - - def serialize_dict(self, attr, dict_type, **kwargs): - """Serialize a dictionary of objects. - - :param dict attr: Object to be serialized. - :param str dict_type: Type of object in the dictionary. - :rtype: dict - :return: serialized dictionary - """ - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized[self.serialize_unicode(key)] = None - - if "xml" in serialization_ctxt: - # XML serialization is more complicated - xml_desc = serialization_ctxt["xml"] - xml_name = xml_desc["name"] - - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - for key, value in serialized.items(): - ET.SubElement(final_result, key).text = value - return final_result - - return serialized - - def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements - """Serialize a generic object. - This will be handled as a dictionary. If object passed in is not - a basic type (str, int, float, dict, list) it will simply be - cast to str. - - :param dict attr: Object to be serialized. - :rtype: dict or str - :return: serialized object - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - return attr - obj_type = type(attr) - if obj_type in self.basic_types: - return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) - if obj_type is _long_type: - return self.serialize_long(attr) - if obj_type is str: - return self.serialize_unicode(attr) - if obj_type is datetime.datetime: - return self.serialize_iso(attr) - if obj_type is datetime.date: - return self.serialize_date(attr) - if obj_type is datetime.time: - return self.serialize_time(attr) - if obj_type is datetime.timedelta: - return self.serialize_duration(attr) - if obj_type is decimal.Decimal: - return self.serialize_decimal(attr) - - # If it's a model or I know this dependency, serialize as a Model - if obj_type in self.dependencies.values() or isinstance(attr, Model): - return self._serialize(attr) - - if obj_type == dict: - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) - except ValueError: - serialized[self.serialize_unicode(key)] = None - return serialized - - if obj_type == list: - serialized = [] - for obj in attr: - try: - serialized.append(self.serialize_object(obj, **kwargs)) - except ValueError: - pass - return serialized - return str(attr) - - @staticmethod - def serialize_enum(attr, enum_obj=None): - try: - result = attr.value - except AttributeError: - result = attr - try: - enum_obj(result) # type: ignore - return result - except ValueError as exc: - for enum_value in enum_obj: # type: ignore - if enum_value.value.lower() == str(attr).lower(): - return enum_value.value - error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) from exc - - @staticmethod - def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument - """Serialize bytearray into base-64 string. - - :param str attr: Object to be serialized. - :rtype: str - :return: serialized base64 - """ - return b64encode(attr).decode() - - @staticmethod - def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument - """Serialize str into base-64 string. - - :param str attr: Object to be serialized. - :rtype: str - :return: serialized base64 - """ - encoded = b64encode(attr).decode("ascii") - return encoded.strip("=").replace("+", "-").replace("/", "_") - - @staticmethod - def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Decimal object to float. - - :param decimal attr: Object to be serialized. - :rtype: float - :return: serialized decimal - """ - return float(attr) - - @staticmethod - def serialize_long(attr, **kwargs): # pylint: disable=unused-argument - """Serialize long (Py2) or int (Py3). - - :param int attr: Object to be serialized. - :rtype: int/long - :return: serialized long - """ - return _long_type(attr) - - @staticmethod - def serialize_date(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Date object into ISO-8601 formatted string. - - :param Date attr: Object to be serialized. - :rtype: str - :return: serialized date - """ - if isinstance(attr, str): - attr = isodate.parse_date(attr) - t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) - return t - - @staticmethod - def serialize_time(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Time object into ISO-8601 formatted string. - - :param datetime.time attr: Object to be serialized. - :rtype: str - :return: serialized time - """ - if isinstance(attr, str): - attr = isodate.parse_time(attr) - t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) - if attr.microsecond: - t += ".{:02}".format(attr.microsecond) - return t - - @staticmethod - def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument - """Serialize TimeDelta object into ISO-8601 formatted string. - - :param TimeDelta attr: Object to be serialized. - :rtype: str - :return: serialized duration - """ - if isinstance(attr, str): - attr = isodate.parse_duration(attr) - return isodate.duration_isoformat(attr) - - @staticmethod - def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into RFC-1123 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: TypeError if format invalid. - :return: serialized rfc - """ - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - except AttributeError as exc: - raise TypeError("RFC1123 object must be valid Datetime object.") from exc - - return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( - Serializer.days[utc.tm_wday], - utc.tm_mday, - Serializer.months[utc.tm_mon], - utc.tm_year, - utc.tm_hour, - utc.tm_min, - utc.tm_sec, - ) - - @staticmethod - def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into ISO-8601 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: SerializationError if format invalid. - :return: serialized iso - """ - if isinstance(attr, str): - attr = isodate.parse_datetime(attr) - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - if utc.tm_year > 9999 or utc.tm_year < 1: - raise OverflowError("Hit max or min date") - - microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") - if microseconds: - microseconds = "." + microseconds - date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec - ) - return date + microseconds + "Z" - except (ValueError, OverflowError) as err: - msg = "Unable to serialize datetime object." - raise SerializationError(msg) from err - except AttributeError as err: - msg = "ISO-8601 object must be valid Datetime object." - raise TypeError(msg) from err - - @staticmethod - def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param Datetime attr: Object to be serialized. - :rtype: int - :raises: SerializationError if format invalid - :return: serialied unix - """ - if isinstance(attr, int): - return attr - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - return int(calendar.timegm(attr.utctimetuple())) - except AttributeError as exc: - raise TypeError("Unix time object must be valid Datetime object.") from exc - - -def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - key = attr_desc["key"] - working_data = data - - while "." in key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = working_data.get(working_key, data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - return working_data.get(key) - - -def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements - attr, attr_desc, data -): - key = attr_desc["key"] - working_data = data - - while "." in key: - dict_keys = _FLATTEN.split(key) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - if working_data: - return attribute_key_case_insensitive_extractor(key, None, working_data) - - -def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - """Extract the attribute in "data" based on the last part of the JSON path key. - - :param str attr: The attribute to extract - :param dict attr_desc: The attribute description - :param dict data: The data to extract from - :rtype: object - :returns: The extracted attribute - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_extractor(dict_keys[-1], None, data) - - -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - """Extract the attribute in "data" based on the last part of the JSON path key. - - This is the case insensitive version of "last_rest_key_extractor" - :param str attr: The attribute to extract - :param dict attr_desc: The attribute description - :param dict data: The data to extract from - :rtype: object - :returns: The extracted attribute - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) - - -def attribute_key_extractor(attr, _, data): - return data.get(attr) - - -def attribute_key_case_insensitive_extractor(attr, _, data): - found_key = None - lower_attr = attr.lower() - for key in data: - if lower_attr == key.lower(): - found_key = key - break - - return data.get(found_key) - - -def _extract_name_from_internal_type(internal_type): - """Given an internal type XML description, extract correct XML name with namespace. - - :param dict internal_type: An model type - :rtype: tuple - :returns: A tuple XML name + namespace dict - """ - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - xml_name = internal_type_xml_map.get("name", internal_type.__name__) - xml_ns = internal_type_xml_map.get("ns", None) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - return xml_name - - -def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements - if isinstance(data, dict): - return None - - # Test if this model is XML ready first - if not isinstance(data, ET.Element): - return None - - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - - # Look for a children - is_iter_type = attr_desc["type"].startswith("[") - is_wrapped = xml_desc.get("wrapped", False) - internal_type = attr_desc.get("internalType", None) - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - - # Integrate namespace if necessary - xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - - # If it's an attribute, that's simple - if xml_desc.get("attr", False): - return data.get(xml_name) - - # If it's x-ms-text, that's simple too - if xml_desc.get("text", False): - return data.text - - # Scenario where I take the local name: - # - Wrapped node - # - Internal type is an enum (considered basic types) - # - Internal type has no XML/Name node - if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): - children = data.findall(xml_name) - # If internal type has a local name and it's not a list, I use that name - elif not is_iter_type and internal_type and "name" in internal_type_xml_map: - xml_name = _extract_name_from_internal_type(internal_type) - children = data.findall(xml_name) - # That's an array - else: - if internal_type: # Complex type, ignore itemsName and use the complex type name - items_name = _extract_name_from_internal_type(internal_type) - else: - items_name = xml_desc.get("itemsName", xml_name) - children = data.findall(items_name) - - if len(children) == 0: - if is_iter_type: - if is_wrapped: - return None # is_wrapped no node, we want None - return [] # not wrapped, assume empty list - return None # Assume it's not there, maybe an optional node. - - # If is_iter_type and not wrapped, return all found children - if is_iter_type: - if not is_wrapped: - return children - # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long - xml_name - ) - ) - return list(children[0]) # Might be empty list and that's ok. - - # Here it's not a itertype, we should have found one element only or empty - if len(children) > 1: - raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) - return children[0] - - -class Deserializer(object): - """Response object model deserializer. - - :param dict classes: Class type dictionary for deserializing complex types. - :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. - """ - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - - def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: - self.deserialize_type = { - "iso-8601": Deserializer.deserialize_iso, - "rfc-1123": Deserializer.deserialize_rfc, - "unix-time": Deserializer.deserialize_unix, - "duration": Deserializer.deserialize_duration, - "date": Deserializer.deserialize_date, - "time": Deserializer.deserialize_time, - "decimal": Deserializer.deserialize_decimal, - "long": Deserializer.deserialize_long, - "bytearray": Deserializer.deserialize_bytearray, - "base64": Deserializer.deserialize_base64, - "object": self.deserialize_object, - "[]": self.deserialize_iter, - "{}": self.deserialize_dict, - } - self.deserialize_expected_types = { - "duration": (isodate.Duration, datetime.timedelta), - "iso-8601": (datetime.datetime), - } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} - self.key_extractors = [rest_key_extractor, xml_key_extractor] - # Additional properties only works if the "rest_key_extractor" is used to - # extract the keys. Making it to work whatever the key extractor is too much - # complicated, with no real scenario for now. - # So adding a flag to disable additional properties detection. This flag should be - # used if your expect the deserialization to NOT come from a JSON REST syntax. - # Otherwise, result are unexpected - self.additional_properties_detection = True - - def __call__(self, target_obj, response_data, content_type=None): - """Call the deserializer to process a REST response. - - :param str target_obj: Target data type to deserialize to. - :param requests.Response response_data: REST response object. - :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - data = self._unpack_content(response_data, content_type) - return self._deserialize(target_obj, data) - - def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements - """Call the deserializer on a model. - - Data needs to be already deserialized as JSON or XML ElementTree - - :param str target_obj: Target data type to deserialize to. - :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - # This is already a model, go recursive just in case - if hasattr(data, "_attribute_map"): - constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] - try: - for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access - if attr in constants: - continue - value = getattr(data, attr) - if value is None: - continue - local_type = mapconfig["type"] - internal_data_type = local_type.strip("[]{}") - if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): - continue - setattr(data, attr, self._deserialize(local_type, value)) - return data - except AttributeError: - return - - response, class_name = self._classify_target(target_obj, data) - - if isinstance(response, str): - return self.deserialize_data(data, response) - if isinstance(response, type) and issubclass(response, Enum): - return self.deserialize_enum(data, response) - - if data is None or data is CoreNull: - return data - try: - attributes = response._attribute_map # type: ignore # pylint: disable=protected-access - d_attrs = {} - for attr, attr_desc in attributes.items(): - # Check empty string. If it's not empty, someone has a real "additionalProperties"... - if attr == "additional_properties" and attr_desc["key"] == "": - continue - raw_value = None - # Enhance attr_desc with some dynamic data - attr_desc = attr_desc.copy() # Do a copy, do not change the real one - internal_data_type = attr_desc["type"].strip("[]{}") - if internal_data_type in self.dependencies: - attr_desc["internalType"] = self.dependencies[internal_data_type] - - for key_extractor in self.key_extractors: - found_value = key_extractor(attr, attr_desc, data) - if found_value is not None: - if raw_value is not None and raw_value != found_value: - msg = ( - "Ignoring extracted value '%s' from %s for key '%s'" - " (duplicate extraction, follow extractors order)" - ) - _LOGGER.warning(msg, found_value, key_extractor, attr) - continue - raw_value = found_value - - value = self.deserialize_data(raw_value, attr_desc["type"]) - d_attrs[attr] = value - except (AttributeError, TypeError, KeyError) as err: - msg = "Unable to deserialize to object: " + class_name # type: ignore - raise DeserializationError(msg) from err - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) - - def _build_additional_properties(self, attribute_map, data): - if not self.additional_properties_detection: - return None - if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": - # Check empty string. If it's not empty, someone has a real "additionalProperties" - return None - if isinstance(data, ET.Element): - data = {el.tag: el.text for el in data} - - known_keys = { - _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) - for desc in attribute_map.values() - if desc["key"] != "" - } - present_keys = set(data.keys()) - missing_keys = present_keys - known_keys - return {key: data[key] for key in missing_keys} - - def _classify_target(self, target, data): - """Check to see whether the deserialization target object can - be classified into a subclass. - Once classification has been determined, initialize object. - - :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :return: The classified target object and its class name. - :rtype: tuple - """ - if target is None: - return None, None - - if isinstance(target, str): - try: - target = self.dependencies[target] - except KeyError: - return target, target - - try: - target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access - except AttributeError: - pass # Target is not a Model, no classify - return target, target.__class__.__name__ # type: ignore - - def failsafe_deserialize(self, target_obj, data, content_type=None): - """Ignores any errors encountered in deserialization, - and falls back to not deserializing the object. Recommended - for use in error deserialization, as we want to return the - HttpResponseError to users, and not have them deal with - a deserialization error. - - :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :param str content_type: Swagger "produces" if available. - :return: Deserialized object. - :rtype: object - """ - try: - return self(target_obj, data, content_type=content_type) - except: # pylint: disable=bare-except - _LOGGER.debug( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True - ) - return None - - @staticmethod - def _unpack_content(raw_data, content_type=None): - """Extract the correct structure for deserialization. - - If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. - if we can't, raise. Your Pipeline should have a RawDeserializer. - - If not a pipeline response and raw_data is bytes or string, use content-type - to decode it. If no content-type, try JSON. - - If raw_data is something else, bypass all logic and return it directly. - - :param obj raw_data: Data to be processed. - :param str content_type: How to parse if raw_data is a string/bytes. - :raises JSONDecodeError: If JSON is requested and parsing is impossible. - :raises UnicodeDecodeError: If bytes is not UTF8 - :rtype: object - :return: Unpacked content. - """ - # Assume this is enough to detect a Pipeline Response without importing it - context = getattr(raw_data, "context", {}) - if context: - if RawDeserializer.CONTEXT_NAME in context: - return context[RawDeserializer.CONTEXT_NAME] - raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") - - # Assume this is enough to recognize universal_http.ClientResponse without importing it - if hasattr(raw_data, "body"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) - - # Assume this enough to recognize requests.Response without importing it. - if hasattr(raw_data, "_content_consumed"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - - if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): - return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore - return raw_data - - def _instantiate_model(self, response, attrs, additional_properties=None): - """Instantiate a response model passing in deserialized args. - - :param Response response: The response model class. - :param dict attrs: The deserialized response attributes. - :param dict additional_properties: Additional properties to be set. - :rtype: Response - :return: The instantiated response model. - """ - if callable(response): - subtype = getattr(response, "_subtype_map", {}) - try: - readonly = [ - k - for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore - if v.get("readonly") - ] - const = [ - k - for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore - if v.get("constant") - ] - kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} - response_obj = response(**kwargs) - for attr in readonly: - setattr(response_obj, attr, attrs.get(attr)) - if additional_properties: - response_obj.additional_properties = additional_properties # type: ignore - return response_obj - except TypeError as err: - msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) from err - else: - try: - for attr, value in attrs.items(): - setattr(response, attr, value) - return response - except Exception as exp: - msg = "Unable to populate response model. " - msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) from exp - - def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements - """Process data for deserialization according to data type. - - :param str data: The response string to be deserialized. - :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - if data is None: - return data - - try: - if not data_type: - return data - if data_type in self.basic_types.values(): - return self.deserialize_basic(data, data_type) - if data_type in self.deserialize_type: - if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): - return data - - is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment - "object", - "[]", - r"{}", - ] - if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: - return None - data_val = self.deserialize_type[data_type](data) - return data_val - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.deserialize_type: - return self.deserialize_type[iter_type](data, data_type[1:-1]) - - obj_type = self.dependencies[data_type] - if issubclass(obj_type, Enum): - if isinstance(data, ET.Element): - data = data.text - return self.deserialize_enum(data, obj_type) - - except (ValueError, TypeError, AttributeError) as err: - msg = "Unable to deserialize response data." - msg += " Data: {}, {}".format(data, data_type) - raise DeserializationError(msg) from err - return self._deserialize(obj_type, data) - - def deserialize_iter(self, attr, iter_type): - """Deserialize an iterable. - - :param list attr: Iterable to be deserialized. - :param str iter_type: The type of object in the iterable. - :return: Deserialized iterable. - :rtype: list - """ - if attr is None: - return None - if isinstance(attr, ET.Element): # If I receive an element here, get the children - attr = list(attr) - if not isinstance(attr, (list, set)): - raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) - return [self.deserialize_data(a, iter_type) for a in attr] - - def deserialize_dict(self, attr, dict_type): - """Deserialize a dictionary. - - :param dict/list attr: Dictionary to be deserialized. Also accepts - a list of key, value pairs. - :param str dict_type: The object type of the items in the dictionary. - :return: Deserialized dictionary. - :rtype: dict - """ - if isinstance(attr, list): - return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} - - if isinstance(attr, ET.Element): - # Transform value into {"Key": "value"} - attr = {el.tag: el.text for el in attr} - return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - - def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements - """Deserialize a generic object. - This will be handled as a dictionary. - - :param dict attr: Dictionary to be deserialized. - :return: Deserialized object. - :rtype: dict - :raises: TypeError if non-builtin datatype encountered. - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - # Do no recurse on XML, just return the tree as-is - return attr - if isinstance(attr, str): - return self.deserialize_basic(attr, "str") - obj_type = type(attr) - if obj_type in self.basic_types: - return self.deserialize_basic(attr, self.basic_types[obj_type]) - if obj_type is _long_type: - return self.deserialize_long(attr) - - if obj_type == dict: - deserialized = {} - for key, value in attr.items(): - try: - deserialized[key] = self.deserialize_object(value, **kwargs) - except ValueError: - deserialized[key] = None - return deserialized - - if obj_type == list: - deserialized = [] - for obj in attr: - try: - deserialized.append(self.deserialize_object(obj, **kwargs)) - except ValueError: - pass - return deserialized - - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) - - def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements - """Deserialize basic builtin data type from string. - Will attempt to convert to str, int, float and bool. - This function will also accept '1', '0', 'true' and 'false' as - valid bool values. - - :param str attr: response string to be deserialized. - :param str data_type: deserialization data type. - :return: Deserialized basic type. - :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. - """ - # If we're here, data is supposed to be a basic type. - # If it's still an XML node, take the text - if isinstance(attr, ET.Element): - attr = attr.text - if not attr: - if data_type == "str": - # None or '', node is empty string. - return "" - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None - - if data_type == "bool": - if attr in [True, False, 1, 0]: - return bool(attr) - if isinstance(attr, str): - if attr.lower() in ["true", "1"]: - return True - if attr.lower() in ["false", "0"]: - return False - raise TypeError("Invalid boolean value: {}".format(attr)) - - if data_type == "str": - return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used - - @staticmethod - def deserialize_unicode(data): - """Preserve unicode objects in Python 2, otherwise return data - as a string. - - :param str data: response string to be deserialized. - :return: Deserialized string. - :rtype: str or unicode - """ - # We might be here because we have an enum modeled as string, - # and we try to deserialize a partial dict with enum inside - if isinstance(data, Enum): - return data - - # Consider this is real string - try: - if isinstance(data, unicode): # type: ignore - return data - except NameError: - return str(data) - return str(data) - - @staticmethod - def deserialize_enum(data, enum_obj): - """Deserialize string into enum object. - - If the string is not a valid enum value it will be returned as-is - and a warning will be logged. - - :param str data: Response string to be deserialized. If this value is - None or invalid it will be returned as-is. - :param Enum enum_obj: Enum object to deserialize to. - :return: Deserialized enum object. - :rtype: Enum - """ - if isinstance(data, enum_obj) or data is None: - return data - if isinstance(data, Enum): - data = data.value - if isinstance(data, int): - # Workaround. We might consider remove it in the future. - try: - return list(enum_obj.__members__.values())[data] - except IndexError as exc: - error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) from exc - try: - return enum_obj(str(data)) - except ValueError: - for enum_value in enum_obj: - if enum_value.value.lower() == str(data).lower(): - return enum_value - # We don't fail anymore for unknown value, we deserialize as a string - _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) - return Deserializer.deserialize_unicode(data) - - @staticmethod - def deserialize_bytearray(attr): - """Deserialize string into bytearray. - - :param str attr: response string to be deserialized. - :return: Deserialized bytearray - :rtype: bytearray - :raises: TypeError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return bytearray(b64decode(attr)) # type: ignore - - @staticmethod - def deserialize_base64(attr): - """Deserialize base64 encoded string into string. - - :param str attr: response string to be deserialized. - :return: Deserialized base64 string - :rtype: bytearray - :raises: TypeError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore - attr = attr + padding # type: ignore - encoded = attr.replace("-", "+").replace("_", "/") - return b64decode(encoded) - - @staticmethod - def deserialize_decimal(attr): - """Deserialize string into Decimal object. - - :param str attr: response string to be deserialized. - :return: Deserialized decimal - :raises: DeserializationError if string format invalid. - :rtype: decimal - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - return decimal.Decimal(str(attr)) # type: ignore - except decimal.DecimalException as err: - msg = "Invalid decimal {}".format(attr) - raise DeserializationError(msg) from err - - @staticmethod - def deserialize_long(attr): - """Deserialize string into long (Py2) or int (Py3). - - :param str attr: response string to be deserialized. - :return: Deserialized int - :rtype: long or int - :raises: ValueError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return _long_type(attr) # type: ignore - - @staticmethod - def deserialize_duration(attr): - """Deserialize ISO-8601 formatted string into TimeDelta object. - - :param str attr: response string to be deserialized. - :return: Deserialized duration - :rtype: TimeDelta - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - duration = isodate.parse_duration(attr) - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize duration object." - raise DeserializationError(msg) from err - return duration - - @staticmethod - def deserialize_date(attr): - """Deserialize ISO-8601 formatted string into Date object. - - :param str attr: response string to be deserialized. - :return: Deserialized date - :rtype: Date - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=0, defaultday=0) - - @staticmethod - def deserialize_time(attr): - """Deserialize ISO-8601 formatted string into time object. - - :param str attr: response string to be deserialized. - :return: Deserialized time - :rtype: datetime.time - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - return isodate.parse_time(attr) - - @staticmethod - def deserialize_rfc(attr): - """Deserialize RFC-1123 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :return: Deserialized RFC datetime - :rtype: Datetime - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - parsed_date = email.utils.parsedate_tz(attr) # type: ignore - date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) - ) - if not date_obj.tzinfo: - date_obj = date_obj.astimezone(tz=TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to rfc datetime object." - raise DeserializationError(msg) from err - return date_obj - - @staticmethod - def deserialize_iso(attr): - """Deserialize ISO-8601 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :return: Deserialized ISO datetime - :rtype: Datetime - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - attr = attr.upper() # type: ignore - match = Deserializer.valid_date.match(attr) - if not match: - raise ValueError("Invalid datetime string: " + attr) - - check_decimal = attr.split(".") - if len(check_decimal) > 1: - decimal_str = "" - for digit in check_decimal[1]: - if digit.isdigit(): - decimal_str += digit - else: - break - if len(decimal_str) > 6: - attr = attr.replace(decimal_str, decimal_str[0:6]) - - date_obj = isodate.parse_datetime(attr) - test_utc = date_obj.utctimetuple() - if test_utc.tm_year > 9999 or test_utc.tm_year < 1: - raise OverflowError("Hit max or min date") - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize datetime object." - raise DeserializationError(msg) from err - return date_obj - - @staticmethod - def deserialize_unix(attr): - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param int attr: Object to be serialized. - :return: Deserialized datetime - :rtype: Datetime - :raises: DeserializationError if format invalid - """ - if isinstance(attr, ET.Element): - attr = int(attr.text) # type: ignore - try: - attr = int(attr) - date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to unix datetime object." - raise DeserializationError(msg) from err - return date_obj diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py deleted file mode 100644 index 7e503e3d6846..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import MonitorMetricsClient # type: ignore - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MonitorMetricsClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py deleted file mode 100644 index 99d5dbea0753..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_client.py +++ /dev/null @@ -1,104 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from copy import deepcopy -from typing import Any, Awaitable -from typing_extensions import Self - -from azure.core import AsyncPipelineClient -from azure.core.pipeline import policies -from azure.core.rest import AsyncHttpResponse, HttpRequest - -from .._serialization import Deserializer, Serializer -from ._configuration import MonitorMetricsClientConfiguration -from .operations import MetricDefinitionsOperations, MetricNamespacesOperations, MetricsOperations - - -class MonitorMetricsClient: - """Azure Monitor Metrics Python Client. - - :ivar metric_definitions: MetricDefinitionsOperations operations - :vartype metric_definitions: monitor_metrics_client.aio.operations.MetricDefinitionsOperations - :ivar metrics: MetricsOperations operations - :vartype metrics: monitor_metrics_client.aio.operations.MetricsOperations - :ivar metric_namespaces: MetricNamespacesOperations operations - :vartype metric_namespaces: monitor_metrics_client.aio.operations.MetricNamespacesOperations - :keyword endpoint: Service URL. Default value is "https://management.azure.com". - :paramtype endpoint: str - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str - """ - - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, *, endpoint: str = "https://management.azure.com", **kwargs: Any - ) -> None: - self._config = MonitorMetricsClientConfiguration(**kwargs) - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - self._serialize = Serializer() - self._deserialize = Deserializer() - self._serialize.client_side_validation = False - self.metric_definitions = MetricDefinitionsOperations( - self._client, self._config, self._serialize, self._deserialize - ) - self.metrics = MetricsOperations(self._client, self._config, self._serialize, self._deserialize) - self.metric_namespaces = MetricNamespacesOperations( - self._client, self._config, self._serialize, self._deserialize - ) - - def send_request( - self, request: HttpRequest, *, stream: bool = False, **kwargs: Any - ) -> Awaitable[AsyncHttpResponse]: - """Runs the network request through the client's chained policies. - - >>> from azure.core.rest import HttpRequest - >>> request = HttpRequest("GET", "https://www.example.org/") - - >>> response = await client.send_request(request) - - - For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request - - :param request: The network request you want to make. Required. - :type request: ~azure.core.rest.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.rest.AsyncHttpResponse - """ - - request_copy = deepcopy(request) - request_copy.url = self._client.format_url(request_copy.url) - return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - - async def close(self) -> None: - await self._client.close() - - async def __aenter__(self) -> Self: - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details: Any) -> None: - await self._client.__aexit__(*exc_details) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py deleted file mode 100644 index 01db55ca7f03..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_configuration.py +++ /dev/null @@ -1,44 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import Any - -from azure.core.pipeline import policies - -VERSION = "unknown" - - -class MonitorMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for MonitorMetricsClient. - - Note that all parameters used to create this instance are saved as instance - attributes. - - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str - """ - - def __init__(self, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-02-01") - - self.api_version = api_version - kwargs.setdefault("sdk_moniker", "monitor-query/{}".format(VERSION)) - self.polling_interval = kwargs.get("polling_interval", 30) - self._configure(**kwargs) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py deleted file mode 100644 index 64e613d8c69b..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._operations import MetricDefinitionsOperations # type: ignore -from ._operations import MetricsOperations # type: ignore -from ._operations import MetricNamespacesOperations # type: ignore - -from ._patch import __all__ as _patch_all -from ._patch import * -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MetricDefinitionsOperations", - "MetricsOperations", - "MetricNamespacesOperations", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py deleted file mode 100644 index 3a51ef43479e..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_operations.py +++ /dev/null @@ -1,486 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, cast -import urllib.parse - -from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ...operations._operations import ( - build_metric_definitions_list_request, - build_metric_namespaces_list_request, - build_metrics_list_request, -) - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class MetricDefinitionsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_metrics_client.aio.MonitorMetricsClient`'s - :attr:`metric_definitions` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kwargs: Any) -> AsyncIterable[JSON]: - """Lists the metric definitions for the resource. - - :param resource_uri: The identifier of the resource. Required. - :type resource_uri: str - :keyword metricnamespace: Metric namespace where the metrics you want reside. Default value is - None. - :paramtype metricnamespace: str - :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "category": "str", - "dimensions": [ - { - "value": "str", - "localizedValue": "str" - } - ], - "displayDescription": "str", - "id": "str", - "isDimensionRequired": bool, - "metricAvailabilities": [ - { - "retention": "1 day, 0:00:00", - "timeGrain": "1 day, 0:00:00" - } - ], - "metricClass": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "namespace": "str", - "primaryAggregationType": "str", - "resourceId": "str", - "supportedAggregationTypes": [ - "str" - ], - "unit": "str" - } - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_metric_definitions_list_request( - resource_uri=resource_uri, - metricnamespace=metricnamespace, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - - return _request - - async def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) - - -class MetricsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_metrics_client.aio.MonitorMetricsClient`'s - :attr:`metrics` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace_async - async def list( - self, - resource_uri: str, - *, - timespan: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - metricnames: Optional[str] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - result_type: Optional[str] = None, - metricnamespace: Optional[str] = None, - auto_adjust_timegrain: Optional[bool] = None, - validate_dimensions: Optional[bool] = None, - rollupby: Optional[str] = None, - **kwargs: Any - ) -> JSON: - """**Lists the metric values for a resource**. - - :param resource_uri: The identifier of the resource. Required. - :type resource_uri: str - :keyword timespan: The timespan of the query. It is a string with the following format - 'startDateTime_ISO/endDateTime_ISO'. Default value is None. - :paramtype timespan: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Default value is - None. - :paramtype metricnames: str - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The **$filter** is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **$filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ - **$filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid - because the logical or operator cannot separate two different metadata names.:code:`
`- - Return all time series where A = a1, B = b1 and C = c1::code:`
`\\ **$filter=A eq ‘a1’ and B - eq ‘b1’ and C eq ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ - **$filter=A eq ‘a1’ and B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword result_type: Reduces the set of data collected. The syntax allowed depends on the - operation. See the operation's description for details. Known values are: "Data" and - "Metadata". Default value is None. - :paramtype result_type: str - :keyword metricnamespace: Metric namespace where the metrics you want reside. Default value is - None. - :paramtype metricnamespace: str - :keyword auto_adjust_timegrain: When set to true, if the timespan passed in is not supported by - this metric, the API will return the result using the closest supported timespan. When set to - false, an error is returned for invalid timespan parameters. Defaults to false. Default value - is None. - :paramtype auto_adjust_timegrain: bool - :keyword validate_dimensions: When set to false, invalid filter parameter values will be - ignored. When set to true, an error is returned for invalid filter parameters. Defaults to - true. Default value is None. - :paramtype validate_dimensions: bool - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "timespan": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": "2020-02-20 - 00:00:00", - "average": 0.0, - "count": 0.0, - "maximum": 0.0, - "minimum": 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": - "str" - }, - "value": "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "cost": 0, - "interval": "str", - "namespace": "str", - "resourceregion": "str" - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_metrics_list_request( - resource_uri=resource_uri, - timespan=timespan, - interval=interval, - metricnames=metricnames, - aggregation=aggregation, - top=top, - orderby=orderby, - filter=filter, - result_type=result_type, - metricnamespace=metricnamespace, - auto_adjust_timegrain=auto_adjust_timegrain, - validate_dimensions=validate_dimensions, - rollupby=rollupby, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - -class MetricNamespacesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_metrics_client.aio.MonitorMetricsClient`'s - :attr:`metric_namespaces` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_uri: str, *, start_time: Optional[str] = None, **kwargs: Any) -> AsyncIterable[JSON]: - """Lists the metric namespaces for the resource. - - :param resource_uri: The identifier of the resource. Required. - :type resource_uri: str - :keyword start_time: The ISO 8601 conform Date start time from which to query for metric - namespaces. Default value is None. - :paramtype start_time: str - :return: An iterator like instance of JSON object - :rtype: ~azure.core.async_paging.AsyncItemPaged[JSON] - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "classification": "str", - "id": "str", - "name": "str", - "properties": { - "metricNamespaceName": "str" - }, - "type": "str" - } - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_metric_namespaces_list_request( - resource_uri=resource_uri, - start_time=start_time, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - - return _request - - async def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, AsyncList(list_of_elem) - - async def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/aio/operations/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py deleted file mode 100644 index f1f7797cdfcb..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import MonitorBatchMetricsClient # type: ignore - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MonitorBatchMetricsClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py deleted file mode 100644 index e2ad51869908..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/_serialization.py +++ /dev/null @@ -1,2118 +0,0 @@ -# pylint: disable=too-many-lines -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# pyright: reportUnnecessaryTypeIgnoreComment=false - -from base64 import b64decode, b64encode -import calendar -import datetime -import decimal -import email -from enum import Enum -import json -import logging -import re -import sys -import codecs -from typing import ( - Dict, - Any, - cast, - Optional, - Union, - AnyStr, - IO, - Mapping, - Callable, - TypeVar, - MutableMapping, - Type, - List, -) - -try: - from urllib import quote # type: ignore -except ImportError: - from urllib.parse import quote -import xml.etree.ElementTree as ET - -import isodate # type: ignore - -from azure.core.exceptions import DeserializationError, SerializationError -from azure.core.serialization import NULL as CoreNull - -_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") - -ModelType = TypeVar("ModelType", bound="Model") -JSON = MutableMapping[str, Any] - - -class RawDeserializer: - - # Accept "text" because we're open minded people... - JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") - - # Name used in context - CONTEXT_NAME = "deserialized_data" - - @classmethod - def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: - """Decode data according to content-type. - - Accept a stream of data as well, but will be load at once in memory for now. - - If no content-type, will return the string version (not bytes, not stream) - - :param data: Input, could be bytes or stream (will be decoded with UTF8) or text - :type data: str or bytes or IO - :param str content_type: The content type. - :return: The deserialized data. - :rtype: object - """ - if hasattr(data, "read"): - # Assume a stream - data = cast(IO, data).read() - - if isinstance(data, bytes): - data_as_str = data.decode(encoding="utf-8-sig") - else: - # Explain to mypy the correct type. - data_as_str = cast(str, data) - - # Remove Byte Order Mark if present in string - data_as_str = data_as_str.lstrip(_BOM) - - if content_type is None: - return data - - if cls.JSON_REGEXP.match(content_type): - try: - return json.loads(data_as_str) - except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) from err - elif "xml" in (content_type or []): - try: - - try: - if isinstance(data, unicode): # type: ignore - # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string - data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore - except NameError: - pass - - return ET.fromstring(data_as_str) # nosec - except ET.ParseError as err: - # It might be because the server has an issue, and returned JSON with - # content-type XML.... - # So let's try a JSON load, and if it's still broken - # let's flow the initial exception - def _json_attemp(data): - try: - return True, json.loads(data) - except ValueError: - return False, None # Don't care about this one - - success, json_result = _json_attemp(data) - if success: - return json_result - # If i'm here, it's not JSON, it's not XML, let's scream - # and raise the last context in this block (the XML exception) - # The function hack is because Py2.7 messes up with exception - # context otherwise. - _LOGGER.critical("Wasn't XML not JSON, failing") - raise DeserializationError("XML is invalid") from err - elif content_type.startswith("text/"): - return data_as_str - raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) - - @classmethod - def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: - """Deserialize from HTTP response. - - Use bytes and headers to NOT use any requests/aiohttp or whatever - specific implementation. - Headers will tested for "content-type" - - :param bytes body_bytes: The body of the response. - :param dict headers: The headers of the response. - :returns: The deserialized data. - :rtype: object - """ - # Try to use content-type from headers if available - content_type = None - if "content-type" in headers: - content_type = headers["content-type"].split(";")[0].strip().lower() - # Ouch, this server did not declare what it sent... - # Let's guess it's JSON... - # Also, since Autorest was considering that an empty body was a valid JSON, - # need that test as well.... - else: - content_type = "application/json" - - if body_bytes: - return cls.deserialize_from_text(body_bytes, content_type) - return None - - -_LOGGER = logging.getLogger(__name__) - -try: - _long_type = long # type: ignore -except NameError: - _long_type = int - - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0. - - :param datetime.datetime dt: The datetime - :returns: The offset - :rtype: datetime.timedelta - """ - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation. - - :param datetime.datetime dt: The datetime - :returns: The timestamp representation - :rtype: str - """ - return "Z" - - def dst(self, dt): - """No daylight saving for UTC. - - :param datetime.datetime dt: The datetime - :returns: The daylight saving time - :rtype: datetime.timedelta - """ - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset) -> None: - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore - -_FLATTEN = re.compile(r"(? None: - self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: # pylint: disable=consider-using-dict-items - if k not in self._attribute_map: - _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) - elif k in self._validation and self._validation[k].get("readonly", False): - _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) - else: - setattr(self, k, kwargs[k]) - - def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes. - - :param object other: The object to compare - :returns: True if objects are equal - :rtype: bool - """ - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False - - def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes. - - :param object other: The object to compare - :returns: True if objects are not equal - :rtype: bool - """ - return not self.__eq__(other) - - def __str__(self) -> str: - return str(self.__dict__) - - @classmethod - def enable_additional_properties_sending(cls) -> None: - cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} - - @classmethod - def is_xml_model(cls) -> bool: - try: - cls._xml_map # type: ignore - except AttributeError: - return False - return True - - @classmethod - def _create_xml_node(cls): - """Create XML node. - - :returns: The XML node - :rtype: xml.etree.ElementTree.Element - """ - try: - xml_map = cls._xml_map # type: ignore - except AttributeError: - xml_map = {} - - return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - - def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: - """Return the JSON that would be sent to server from this model. - - This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize( # type: ignore # pylint: disable=protected-access - self, keep_readonly=keep_readonly, **kwargs - ) - - def as_dict( - self, - keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, - **kwargs: Any - ) -> JSON: - """Return a dict that can be serialized using json.dump. - - Advanced usage might optionally use a callback as parameter: - - .. code::python - - def my_key_transformer(key, attr_desc, value): - return key - - Key is the attribute name used in Python. Attr_desc - is a dict of metadata. Currently contains 'type' with the - msrest type and 'key' with the RestAPI encoded key. - Value is the current value in this object. - - The string returned will be used to serialize the key. - If the return type is a list, this is considered hierarchical - result dict. - - See the three examples in this file: - - - attribute_transformer - - full_restapi_key_transformer - - last_restapi_key_transformer - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :param function key_transformer: A key transformer function. - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize( # type: ignore # pylint: disable=protected-access - self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs - ) - - @classmethod - def _infer_class_models(cls): - try: - str_models = cls.__module__.rsplit(".", 1)[0] - models = sys.modules[str_models] - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - if cls.__name__ not in client_models: - raise ValueError("Not Autorest generated code") - except Exception: # pylint: disable=broad-exception-caught - # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. - client_models = {cls.__name__: cls} - return client_models - - @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: - """Parse a str using the RestAPI syntax and return a model. - - :param str data: A str using RestAPI structure. JSON by default. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType - """ - deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def from_dict( - cls: Type[ModelType], - data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, - content_type: Optional[str] = None, - ) -> ModelType: - """Parse a dict using given key extractor return a model. - - By default consider key - extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor - and last_rest_key_case_insensitive_extractor) - - :param dict data: A dict using RestAPI structure - :param function key_extractors: A key extractor function. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType - """ - deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( # type: ignore - [ # type: ignore - attribute_key_case_insensitive_extractor, - rest_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - if key_extractors is None - else key_extractors - ) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def _flatten_subtype(cls, key, objects): - if "_subtype_map" not in cls.__dict__: - return {} - result = dict(cls._subtype_map[key]) - for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access - return result - - @classmethod - def _classify(cls, response, objects): - """Check the class _subtype_map for any child classes. - We want to ignore any inherited _subtype_maps. - - :param dict response: The initial data - :param dict objects: The class objects - :returns: The class to be used - :rtype: class - """ - for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): - subtype_value = None - - if not isinstance(response, ET.Element): - rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) - else: - subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) - if subtype_value: - # Try to match base class. Can be class name only - # (bug to fix in Autorest to support x-ms-discriminator-name) - if cls.__name__ == subtype_value: - return cls - flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) - try: - return objects[flatten_mapping_type[subtype_value]] # type: ignore - except KeyError: - _LOGGER.warning( - "Subtype value %s has no mapping, use base class %s.", - subtype_value, - cls.__name__, - ) - break - else: - _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) - break - return cls - - @classmethod - def _get_rest_key_parts(cls, attr_key): - """Get the RestAPI key of this attr, split it and decode part - :param str attr_key: Attribute key must be in attribute_map. - :returns: A list of RestAPI part - :rtype: list - """ - rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) - return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] - - -def _decode_attribute_map_key(key): - """This decode a key in an _attribute_map to the actual key we want to look at - inside the received data. - - :param str key: A key string from the generated code - :returns: The decoded key - :rtype: str - """ - return key.replace("\\.", ".") - - -class Serializer(object): # pylint: disable=too-many-public-methods - """Request object model serializer.""" - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} - days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} - months = { - 1: "Jan", - 2: "Feb", - 3: "Mar", - 4: "Apr", - 5: "May", - 6: "Jun", - 7: "Jul", - 8: "Aug", - 9: "Sep", - 10: "Oct", - 11: "Nov", - 12: "Dec", - } - validation = { - "min_length": lambda x, y: len(x) < y, - "max_length": lambda x, y: len(x) > y, - "minimum": lambda x, y: x < y, - "maximum": lambda x, y: x > y, - "minimum_ex": lambda x, y: x <= y, - "maximum_ex": lambda x, y: x >= y, - "min_items": lambda x, y: len(x) < y, - "max_items": lambda x, y: len(x) > y, - "pattern": lambda x, y: not re.match(y, x, re.UNICODE), - "unique": lambda x, y: len(x) != len(set(x)), - "multiple": lambda x, y: x % y != 0, - } - - def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: - self.serialize_type = { - "iso-8601": Serializer.serialize_iso, - "rfc-1123": Serializer.serialize_rfc, - "unix-time": Serializer.serialize_unix, - "duration": Serializer.serialize_duration, - "date": Serializer.serialize_date, - "time": Serializer.serialize_time, - "decimal": Serializer.serialize_decimal, - "long": Serializer.serialize_long, - "bytearray": Serializer.serialize_bytearray, - "base64": Serializer.serialize_base64, - "object": self.serialize_object, - "[]": self.serialize_iter, - "{}": self.serialize_dict, - } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} - self.key_transformer = full_restapi_key_transformer - self.client_side_validation = True - - def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals - self, target_obj, data_type=None, **kwargs - ): - """Serialize data into a string according to type. - - :param object target_obj: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, dict - :raises: SerializationError if serialization fails. - :returns: The serialized data. - """ - key_transformer = kwargs.get("key_transformer", self.key_transformer) - keep_readonly = kwargs.get("keep_readonly", False) - if target_obj is None: - return None - - attr_name = None - class_name = target_obj.__class__.__name__ - - if data_type: - return self.serialize_data(target_obj, data_type, **kwargs) - - if not hasattr(target_obj, "_attribute_map"): - data_type = type(target_obj).__name__ - if data_type in self.basic_types.values(): - return self.serialize_data(target_obj, data_type, **kwargs) - - # Force "is_xml" kwargs if we detect a XML model - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) - - serialized = {} - if is_xml_model_serialization: - serialized = target_obj._create_xml_node() # pylint: disable=protected-access - try: - attributes = target_obj._attribute_map # pylint: disable=protected-access - for attr, attr_desc in attributes.items(): - attr_name = attr - if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access - attr_name, {} - ).get("readonly", False): - continue - - if attr_name == "additional_properties" and attr_desc["key"] == "": - if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) - continue - try: - - orig_attr = getattr(target_obj, attr) - if is_xml_model_serialization: - pass # Don't provide "transformer" for XML for now. Keep "orig_attr" - else: # JSON - keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) - keys = keys if isinstance(keys, list) else [keys] - - kwargs["serialization_ctxt"] = attr_desc - new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) - - if is_xml_model_serialization: - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - xml_prefix = xml_desc.get("prefix", None) - xml_ns = xml_desc.get("ns", None) - if xml_desc.get("attr", False): - if xml_ns: - ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - serialized.set(xml_name, new_attr) # type: ignore - continue - if xml_desc.get("text", False): - serialized.text = new_attr # type: ignore - continue - if isinstance(new_attr, list): - serialized.extend(new_attr) # type: ignore - elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, - # we MUST replace the tag with the local tag. But keeping the namespaces. - if "name" not in getattr(orig_attr, "_xml_map", {}): - splitted_tag = new_attr.tag.split("}") - if len(splitted_tag) == 2: # Namespace - new_attr.tag = "}".join([splitted_tag[0], xml_name]) - else: - new_attr.tag = xml_name - serialized.append(new_attr) # type: ignore - else: # That's a basic type - # Integrate namespace if necessary - local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = str(new_attr) - serialized.append(local_node) # type: ignore - else: # JSON - for k in reversed(keys): # type: ignore - new_attr = {k: new_attr} - - _new_attr = new_attr - _serialized = serialized - for k in keys: # type: ignore - if k not in _serialized: - _serialized.update(_new_attr) # type: ignore - _new_attr = _new_attr[k] # type: ignore - _serialized = _serialized[k] - except ValueError as err: - if isinstance(err, SerializationError): - raise - - except (AttributeError, KeyError, TypeError) as err: - msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise SerializationError(msg) from err - return serialized - - def body(self, data, data_type, **kwargs): - """Serialize data intended for a request body. - - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None - :returns: The serialized request body - """ - - # Just in case this is a dict - internal_data_type_str = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type_str, None) - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - if internal_data_type and issubclass(internal_data_type, Model): - is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) - else: - is_xml_model_serialization = False - if internal_data_type and not isinstance(internal_data_type, Enum): - try: - deserializer = Deserializer(self.dependencies) - # Since it's on serialization, it's almost sure that format is not JSON REST - # We're not able to deal with additional properties for now. - deserializer.additional_properties_detection = False - if is_xml_model_serialization: - deserializer.key_extractors = [ # type: ignore - attribute_key_case_insensitive_extractor, - ] - else: - deserializer.key_extractors = [ - rest_key_case_insensitive_extractor, - attribute_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access - except DeserializationError as err: - raise SerializationError("Unable to build a model: " + str(err)) from err - - return self._serialize(data, data_type, **kwargs) - - def url(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL path. - - :param str name: The name of the URL path parameter. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :returns: The serialized URL path - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - """ - try: - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - - if kwargs.get("skip_quote") is True: - output = str(output) - output = output.replace("{", quote("{")).replace("}", quote("}")) - else: - output = quote(str(output), safe="") - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return output - - def query(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL query. - - :param str name: The name of the query parameter. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - :returns: The serialized query parameter - """ - try: - # Treat the list aside, since we don't want to encode the div separator - if data_type.startswith("["): - internal_data_type = data_type[1:-1] - do_quote = not kwargs.get("skip_quote", False) - return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) - - # Not a list, regular serialization - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - if kwargs.get("skip_quote") is True: - output = str(output) - else: - output = quote(str(output), safe="") - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return str(output) - - def header(self, name, data, data_type, **kwargs): - """Serialize data intended for a request header. - - :param str name: The name of the header. - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - :returns: The serialized header - """ - try: - if data_type in ["[str]"]: - data = ["" if d is None else d for d in data] - - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - except SerializationError as exc: - raise TypeError("{} must be type {}.".format(name, data_type)) from exc - return str(output) - - def serialize_data(self, data, data_type, **kwargs): - """Serialize generic data according to supplied data type. - - :param object data: The data to be serialized. - :param str data_type: The type to be serialized from. - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. - :returns: The serialized data. - :rtype: str, int, float, bool, dict, list - """ - if data is None: - raise ValueError("No value for given attribute") - - try: - if data is CoreNull: - return None - if data_type in self.basic_types.values(): - return self.serialize_basic(data, data_type, **kwargs) - - if data_type in self.serialize_type: - return self.serialize_type[data_type](data, **kwargs) - - # If dependencies is empty, try with current data class - # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) - if issubclass(enum_type, Enum): - return Serializer.serialize_enum(data, enum_obj=enum_type) - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.serialize_type: - return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) - - except (ValueError, TypeError) as err: - msg = "Unable to serialize value: {!r} as type: {!r}." - raise SerializationError(msg.format(data, data_type)) from err - return self._serialize(data, **kwargs) - - @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements - custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) - if custom_serializer: - return custom_serializer - if kwargs.get("is_xml", False): - return cls._xml_basic_types_serializers.get(data_type) - - @classmethod - def serialize_basic(cls, data, data_type, **kwargs): - """Serialize basic builting data type. - Serializes objects to str, int, float or bool. - - Possible kwargs: - - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - - is_xml bool : If set, use xml_basic_types_serializers - - :param obj data: Object to be serialized. - :param str data_type: Type of object in the iterable. - :rtype: str, int, float, bool - :return: serialized object - """ - custom_serializer = cls._get_custom_serializers(data_type, **kwargs) - if custom_serializer: - return custom_serializer(data) - if data_type == "str": - return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used - - @classmethod - def serialize_unicode(cls, data): - """Special handling for serializing unicode strings in Py2. - Encode to UTF-8 if unicode, otherwise handle as a str. - - :param str data: Object to be serialized. - :rtype: str - :return: serialized object - """ - try: # If I received an enum, return its value - return data.value - except AttributeError: - pass - - try: - if isinstance(data, unicode): # type: ignore - # Don't change it, JSON and XML ElementTree are totally able - # to serialize correctly u'' strings - return data - except NameError: - return str(data) - return str(data) - - def serialize_iter(self, data, iter_type, div=None, **kwargs): - """Serialize iterable. - - Supported kwargs: - - serialization_ctxt dict : The current entry of _attribute_map, or same format. - serialization_ctxt['type'] should be same as data_type. - - is_xml bool : If set, serialize as XML - - :param list data: Object to be serialized. - :param str iter_type: Type of object in the iterable. - :param str div: If set, this str will be used to combine the elements - in the iterable into a combined string. Default is 'None'. - Defaults to False. - :rtype: list, str - :return: serialized iterable - """ - if isinstance(data, str): - raise SerializationError("Refuse str type as a valid iter type.") - - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - is_xml = kwargs.get("is_xml", False) - - serialized = [] - for d in data: - try: - serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized.append(None) - - if kwargs.get("do_quote", False): - serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] - - if div: - serialized = ["" if s is None else str(s) for s in serialized] - serialized = div.join(serialized) - - if "xml" in serialization_ctxt or is_xml: - # XML serialization is more complicated - xml_desc = serialization_ctxt.get("xml", {}) - xml_name = xml_desc.get("name") - if not xml_name: - xml_name = serialization_ctxt["key"] - - # Create a wrap node if necessary (use the fact that Element and list have "append") - is_wrapped = xml_desc.get("wrapped", False) - node_name = xml_desc.get("itemsName", xml_name) - if is_wrapped: - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - else: - final_result = [] - # All list elements to "local_node" - for el in serialized: - if isinstance(el, ET.Element): - el_node = el - else: - el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - if el is not None: # Otherwise it writes "None" :-p - el_node.text = str(el) - final_result.append(el_node) - return final_result - return serialized - - def serialize_dict(self, attr, dict_type, **kwargs): - """Serialize a dictionary of objects. - - :param dict attr: Object to be serialized. - :param str dict_type: Type of object in the dictionary. - :rtype: dict - :return: serialized dictionary - """ - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized[self.serialize_unicode(key)] = None - - if "xml" in serialization_ctxt: - # XML serialization is more complicated - xml_desc = serialization_ctxt["xml"] - xml_name = xml_desc["name"] - - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - for key, value in serialized.items(): - ET.SubElement(final_result, key).text = value - return final_result - - return serialized - - def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements - """Serialize a generic object. - This will be handled as a dictionary. If object passed in is not - a basic type (str, int, float, dict, list) it will simply be - cast to str. - - :param dict attr: Object to be serialized. - :rtype: dict or str - :return: serialized object - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - return attr - obj_type = type(attr) - if obj_type in self.basic_types: - return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) - if obj_type is _long_type: - return self.serialize_long(attr) - if obj_type is str: - return self.serialize_unicode(attr) - if obj_type is datetime.datetime: - return self.serialize_iso(attr) - if obj_type is datetime.date: - return self.serialize_date(attr) - if obj_type is datetime.time: - return self.serialize_time(attr) - if obj_type is datetime.timedelta: - return self.serialize_duration(attr) - if obj_type is decimal.Decimal: - return self.serialize_decimal(attr) - - # If it's a model or I know this dependency, serialize as a Model - if obj_type in self.dependencies.values() or isinstance(attr, Model): - return self._serialize(attr) - - if obj_type == dict: - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) - except ValueError: - serialized[self.serialize_unicode(key)] = None - return serialized - - if obj_type == list: - serialized = [] - for obj in attr: - try: - serialized.append(self.serialize_object(obj, **kwargs)) - except ValueError: - pass - return serialized - return str(attr) - - @staticmethod - def serialize_enum(attr, enum_obj=None): - try: - result = attr.value - except AttributeError: - result = attr - try: - enum_obj(result) # type: ignore - return result - except ValueError as exc: - for enum_value in enum_obj: # type: ignore - if enum_value.value.lower() == str(attr).lower(): - return enum_value.value - error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) from exc - - @staticmethod - def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument - """Serialize bytearray into base-64 string. - - :param str attr: Object to be serialized. - :rtype: str - :return: serialized base64 - """ - return b64encode(attr).decode() - - @staticmethod - def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument - """Serialize str into base-64 string. - - :param str attr: Object to be serialized. - :rtype: str - :return: serialized base64 - """ - encoded = b64encode(attr).decode("ascii") - return encoded.strip("=").replace("+", "-").replace("/", "_") - - @staticmethod - def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Decimal object to float. - - :param decimal attr: Object to be serialized. - :rtype: float - :return: serialized decimal - """ - return float(attr) - - @staticmethod - def serialize_long(attr, **kwargs): # pylint: disable=unused-argument - """Serialize long (Py2) or int (Py3). - - :param int attr: Object to be serialized. - :rtype: int/long - :return: serialized long - """ - return _long_type(attr) - - @staticmethod - def serialize_date(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Date object into ISO-8601 formatted string. - - :param Date attr: Object to be serialized. - :rtype: str - :return: serialized date - """ - if isinstance(attr, str): - attr = isodate.parse_date(attr) - t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) - return t - - @staticmethod - def serialize_time(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Time object into ISO-8601 formatted string. - - :param datetime.time attr: Object to be serialized. - :rtype: str - :return: serialized time - """ - if isinstance(attr, str): - attr = isodate.parse_time(attr) - t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) - if attr.microsecond: - t += ".{:02}".format(attr.microsecond) - return t - - @staticmethod - def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument - """Serialize TimeDelta object into ISO-8601 formatted string. - - :param TimeDelta attr: Object to be serialized. - :rtype: str - :return: serialized duration - """ - if isinstance(attr, str): - attr = isodate.parse_duration(attr) - return isodate.duration_isoformat(attr) - - @staticmethod - def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into RFC-1123 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: TypeError if format invalid. - :return: serialized rfc - """ - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - except AttributeError as exc: - raise TypeError("RFC1123 object must be valid Datetime object.") from exc - - return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( - Serializer.days[utc.tm_wday], - utc.tm_mday, - Serializer.months[utc.tm_mon], - utc.tm_year, - utc.tm_hour, - utc.tm_min, - utc.tm_sec, - ) - - @staticmethod - def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into ISO-8601 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: SerializationError if format invalid. - :return: serialized iso - """ - if isinstance(attr, str): - attr = isodate.parse_datetime(attr) - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - if utc.tm_year > 9999 or utc.tm_year < 1: - raise OverflowError("Hit max or min date") - - microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") - if microseconds: - microseconds = "." + microseconds - date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec - ) - return date + microseconds + "Z" - except (ValueError, OverflowError) as err: - msg = "Unable to serialize datetime object." - raise SerializationError(msg) from err - except AttributeError as err: - msg = "ISO-8601 object must be valid Datetime object." - raise TypeError(msg) from err - - @staticmethod - def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param Datetime attr: Object to be serialized. - :rtype: int - :raises: SerializationError if format invalid - :return: serialied unix - """ - if isinstance(attr, int): - return attr - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - return int(calendar.timegm(attr.utctimetuple())) - except AttributeError as exc: - raise TypeError("Unix time object must be valid Datetime object.") from exc - - -def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - key = attr_desc["key"] - working_data = data - - while "." in key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = working_data.get(working_key, data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - return working_data.get(key) - - -def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements - attr, attr_desc, data -): - key = attr_desc["key"] - working_data = data - - while "." in key: - dict_keys = _FLATTEN.split(key) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - if working_data: - return attribute_key_case_insensitive_extractor(key, None, working_data) - - -def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - """Extract the attribute in "data" based on the last part of the JSON path key. - - :param str attr: The attribute to extract - :param dict attr_desc: The attribute description - :param dict data: The data to extract from - :rtype: object - :returns: The extracted attribute - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_extractor(dict_keys[-1], None, data) - - -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument - """Extract the attribute in "data" based on the last part of the JSON path key. - - This is the case insensitive version of "last_rest_key_extractor" - :param str attr: The attribute to extract - :param dict attr_desc: The attribute description - :param dict data: The data to extract from - :rtype: object - :returns: The extracted attribute - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) - - -def attribute_key_extractor(attr, _, data): - return data.get(attr) - - -def attribute_key_case_insensitive_extractor(attr, _, data): - found_key = None - lower_attr = attr.lower() - for key in data: - if lower_attr == key.lower(): - found_key = key - break - - return data.get(found_key) - - -def _extract_name_from_internal_type(internal_type): - """Given an internal type XML description, extract correct XML name with namespace. - - :param dict internal_type: An model type - :rtype: tuple - :returns: A tuple XML name + namespace dict - """ - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - xml_name = internal_type_xml_map.get("name", internal_type.__name__) - xml_ns = internal_type_xml_map.get("ns", None) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - return xml_name - - -def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements - if isinstance(data, dict): - return None - - # Test if this model is XML ready first - if not isinstance(data, ET.Element): - return None - - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - - # Look for a children - is_iter_type = attr_desc["type"].startswith("[") - is_wrapped = xml_desc.get("wrapped", False) - internal_type = attr_desc.get("internalType", None) - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - - # Integrate namespace if necessary - xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - - # If it's an attribute, that's simple - if xml_desc.get("attr", False): - return data.get(xml_name) - - # If it's x-ms-text, that's simple too - if xml_desc.get("text", False): - return data.text - - # Scenario where I take the local name: - # - Wrapped node - # - Internal type is an enum (considered basic types) - # - Internal type has no XML/Name node - if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): - children = data.findall(xml_name) - # If internal type has a local name and it's not a list, I use that name - elif not is_iter_type and internal_type and "name" in internal_type_xml_map: - xml_name = _extract_name_from_internal_type(internal_type) - children = data.findall(xml_name) - # That's an array - else: - if internal_type: # Complex type, ignore itemsName and use the complex type name - items_name = _extract_name_from_internal_type(internal_type) - else: - items_name = xml_desc.get("itemsName", xml_name) - children = data.findall(items_name) - - if len(children) == 0: - if is_iter_type: - if is_wrapped: - return None # is_wrapped no node, we want None - return [] # not wrapped, assume empty list - return None # Assume it's not there, maybe an optional node. - - # If is_iter_type and not wrapped, return all found children - if is_iter_type: - if not is_wrapped: - return children - # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long - xml_name - ) - ) - return list(children[0]) # Might be empty list and that's ok. - - # Here it's not a itertype, we should have found one element only or empty - if len(children) > 1: - raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) - return children[0] - - -class Deserializer(object): - """Response object model deserializer. - - :param dict classes: Class type dictionary for deserializing complex types. - :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. - """ - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - - def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: - self.deserialize_type = { - "iso-8601": Deserializer.deserialize_iso, - "rfc-1123": Deserializer.deserialize_rfc, - "unix-time": Deserializer.deserialize_unix, - "duration": Deserializer.deserialize_duration, - "date": Deserializer.deserialize_date, - "time": Deserializer.deserialize_time, - "decimal": Deserializer.deserialize_decimal, - "long": Deserializer.deserialize_long, - "bytearray": Deserializer.deserialize_bytearray, - "base64": Deserializer.deserialize_base64, - "object": self.deserialize_object, - "[]": self.deserialize_iter, - "{}": self.deserialize_dict, - } - self.deserialize_expected_types = { - "duration": (isodate.Duration, datetime.timedelta), - "iso-8601": (datetime.datetime), - } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} - self.key_extractors = [rest_key_extractor, xml_key_extractor] - # Additional properties only works if the "rest_key_extractor" is used to - # extract the keys. Making it to work whatever the key extractor is too much - # complicated, with no real scenario for now. - # So adding a flag to disable additional properties detection. This flag should be - # used if your expect the deserialization to NOT come from a JSON REST syntax. - # Otherwise, result are unexpected - self.additional_properties_detection = True - - def __call__(self, target_obj, response_data, content_type=None): - """Call the deserializer to process a REST response. - - :param str target_obj: Target data type to deserialize to. - :param requests.Response response_data: REST response object. - :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - data = self._unpack_content(response_data, content_type) - return self._deserialize(target_obj, data) - - def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements - """Call the deserializer on a model. - - Data needs to be already deserialized as JSON or XML ElementTree - - :param str target_obj: Target data type to deserialize to. - :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - # This is already a model, go recursive just in case - if hasattr(data, "_attribute_map"): - constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] - try: - for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access - if attr in constants: - continue - value = getattr(data, attr) - if value is None: - continue - local_type = mapconfig["type"] - internal_data_type = local_type.strip("[]{}") - if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): - continue - setattr(data, attr, self._deserialize(local_type, value)) - return data - except AttributeError: - return - - response, class_name = self._classify_target(target_obj, data) - - if isinstance(response, str): - return self.deserialize_data(data, response) - if isinstance(response, type) and issubclass(response, Enum): - return self.deserialize_enum(data, response) - - if data is None or data is CoreNull: - return data - try: - attributes = response._attribute_map # type: ignore # pylint: disable=protected-access - d_attrs = {} - for attr, attr_desc in attributes.items(): - # Check empty string. If it's not empty, someone has a real "additionalProperties"... - if attr == "additional_properties" and attr_desc["key"] == "": - continue - raw_value = None - # Enhance attr_desc with some dynamic data - attr_desc = attr_desc.copy() # Do a copy, do not change the real one - internal_data_type = attr_desc["type"].strip("[]{}") - if internal_data_type in self.dependencies: - attr_desc["internalType"] = self.dependencies[internal_data_type] - - for key_extractor in self.key_extractors: - found_value = key_extractor(attr, attr_desc, data) - if found_value is not None: - if raw_value is not None and raw_value != found_value: - msg = ( - "Ignoring extracted value '%s' from %s for key '%s'" - " (duplicate extraction, follow extractors order)" - ) - _LOGGER.warning(msg, found_value, key_extractor, attr) - continue - raw_value = found_value - - value = self.deserialize_data(raw_value, attr_desc["type"]) - d_attrs[attr] = value - except (AttributeError, TypeError, KeyError) as err: - msg = "Unable to deserialize to object: " + class_name # type: ignore - raise DeserializationError(msg) from err - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) - - def _build_additional_properties(self, attribute_map, data): - if not self.additional_properties_detection: - return None - if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": - # Check empty string. If it's not empty, someone has a real "additionalProperties" - return None - if isinstance(data, ET.Element): - data = {el.tag: el.text for el in data} - - known_keys = { - _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) - for desc in attribute_map.values() - if desc["key"] != "" - } - present_keys = set(data.keys()) - missing_keys = present_keys - known_keys - return {key: data[key] for key in missing_keys} - - def _classify_target(self, target, data): - """Check to see whether the deserialization target object can - be classified into a subclass. - Once classification has been determined, initialize object. - - :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :return: The classified target object and its class name. - :rtype: tuple - """ - if target is None: - return None, None - - if isinstance(target, str): - try: - target = self.dependencies[target] - except KeyError: - return target, target - - try: - target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access - except AttributeError: - pass # Target is not a Model, no classify - return target, target.__class__.__name__ # type: ignore - - def failsafe_deserialize(self, target_obj, data, content_type=None): - """Ignores any errors encountered in deserialization, - and falls back to not deserializing the object. Recommended - for use in error deserialization, as we want to return the - HttpResponseError to users, and not have them deal with - a deserialization error. - - :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :param str content_type: Swagger "produces" if available. - :return: Deserialized object. - :rtype: object - """ - try: - return self(target_obj, data, content_type=content_type) - except: # pylint: disable=bare-except - _LOGGER.debug( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True - ) - return None - - @staticmethod - def _unpack_content(raw_data, content_type=None): - """Extract the correct structure for deserialization. - - If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. - if we can't, raise. Your Pipeline should have a RawDeserializer. - - If not a pipeline response and raw_data is bytes or string, use content-type - to decode it. If no content-type, try JSON. - - If raw_data is something else, bypass all logic and return it directly. - - :param obj raw_data: Data to be processed. - :param str content_type: How to parse if raw_data is a string/bytes. - :raises JSONDecodeError: If JSON is requested and parsing is impossible. - :raises UnicodeDecodeError: If bytes is not UTF8 - :rtype: object - :return: Unpacked content. - """ - # Assume this is enough to detect a Pipeline Response without importing it - context = getattr(raw_data, "context", {}) - if context: - if RawDeserializer.CONTEXT_NAME in context: - return context[RawDeserializer.CONTEXT_NAME] - raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") - - # Assume this is enough to recognize universal_http.ClientResponse without importing it - if hasattr(raw_data, "body"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) - - # Assume this enough to recognize requests.Response without importing it. - if hasattr(raw_data, "_content_consumed"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - - if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): - return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore - return raw_data - - def _instantiate_model(self, response, attrs, additional_properties=None): - """Instantiate a response model passing in deserialized args. - - :param Response response: The response model class. - :param dict attrs: The deserialized response attributes. - :param dict additional_properties: Additional properties to be set. - :rtype: Response - :return: The instantiated response model. - """ - if callable(response): - subtype = getattr(response, "_subtype_map", {}) - try: - readonly = [ - k - for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore - if v.get("readonly") - ] - const = [ - k - for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore - if v.get("constant") - ] - kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} - response_obj = response(**kwargs) - for attr in readonly: - setattr(response_obj, attr, attrs.get(attr)) - if additional_properties: - response_obj.additional_properties = additional_properties # type: ignore - return response_obj - except TypeError as err: - msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) from err - else: - try: - for attr, value in attrs.items(): - setattr(response, attr, value) - return response - except Exception as exp: - msg = "Unable to populate response model. " - msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) from exp - - def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements - """Process data for deserialization according to data type. - - :param str data: The response string to be deserialized. - :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - :rtype: object - """ - if data is None: - return data - - try: - if not data_type: - return data - if data_type in self.basic_types.values(): - return self.deserialize_basic(data, data_type) - if data_type in self.deserialize_type: - if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): - return data - - is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment - "object", - "[]", - r"{}", - ] - if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: - return None - data_val = self.deserialize_type[data_type](data) - return data_val - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.deserialize_type: - return self.deserialize_type[iter_type](data, data_type[1:-1]) - - obj_type = self.dependencies[data_type] - if issubclass(obj_type, Enum): - if isinstance(data, ET.Element): - data = data.text - return self.deserialize_enum(data, obj_type) - - except (ValueError, TypeError, AttributeError) as err: - msg = "Unable to deserialize response data." - msg += " Data: {}, {}".format(data, data_type) - raise DeserializationError(msg) from err - return self._deserialize(obj_type, data) - - def deserialize_iter(self, attr, iter_type): - """Deserialize an iterable. - - :param list attr: Iterable to be deserialized. - :param str iter_type: The type of object in the iterable. - :return: Deserialized iterable. - :rtype: list - """ - if attr is None: - return None - if isinstance(attr, ET.Element): # If I receive an element here, get the children - attr = list(attr) - if not isinstance(attr, (list, set)): - raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) - return [self.deserialize_data(a, iter_type) for a in attr] - - def deserialize_dict(self, attr, dict_type): - """Deserialize a dictionary. - - :param dict/list attr: Dictionary to be deserialized. Also accepts - a list of key, value pairs. - :param str dict_type: The object type of the items in the dictionary. - :return: Deserialized dictionary. - :rtype: dict - """ - if isinstance(attr, list): - return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} - - if isinstance(attr, ET.Element): - # Transform value into {"Key": "value"} - attr = {el.tag: el.text for el in attr} - return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - - def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements - """Deserialize a generic object. - This will be handled as a dictionary. - - :param dict attr: Dictionary to be deserialized. - :return: Deserialized object. - :rtype: dict - :raises: TypeError if non-builtin datatype encountered. - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - # Do no recurse on XML, just return the tree as-is - return attr - if isinstance(attr, str): - return self.deserialize_basic(attr, "str") - obj_type = type(attr) - if obj_type in self.basic_types: - return self.deserialize_basic(attr, self.basic_types[obj_type]) - if obj_type is _long_type: - return self.deserialize_long(attr) - - if obj_type == dict: - deserialized = {} - for key, value in attr.items(): - try: - deserialized[key] = self.deserialize_object(value, **kwargs) - except ValueError: - deserialized[key] = None - return deserialized - - if obj_type == list: - deserialized = [] - for obj in attr: - try: - deserialized.append(self.deserialize_object(obj, **kwargs)) - except ValueError: - pass - return deserialized - - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) - - def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements - """Deserialize basic builtin data type from string. - Will attempt to convert to str, int, float and bool. - This function will also accept '1', '0', 'true' and 'false' as - valid bool values. - - :param str attr: response string to be deserialized. - :param str data_type: deserialization data type. - :return: Deserialized basic type. - :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. - """ - # If we're here, data is supposed to be a basic type. - # If it's still an XML node, take the text - if isinstance(attr, ET.Element): - attr = attr.text - if not attr: - if data_type == "str": - # None or '', node
is empty string. - return "" - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None - - if data_type == "bool": - if attr in [True, False, 1, 0]: - return bool(attr) - if isinstance(attr, str): - if attr.lower() in ["true", "1"]: - return True - if attr.lower() in ["false", "0"]: - return False - raise TypeError("Invalid boolean value: {}".format(attr)) - - if data_type == "str": - return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used - - @staticmethod - def deserialize_unicode(data): - """Preserve unicode objects in Python 2, otherwise return data - as a string. - - :param str data: response string to be deserialized. - :return: Deserialized string. - :rtype: str or unicode - """ - # We might be here because we have an enum modeled as string, - # and we try to deserialize a partial dict with enum inside - if isinstance(data, Enum): - return data - - # Consider this is real string - try: - if isinstance(data, unicode): # type: ignore - return data - except NameError: - return str(data) - return str(data) - - @staticmethod - def deserialize_enum(data, enum_obj): - """Deserialize string into enum object. - - If the string is not a valid enum value it will be returned as-is - and a warning will be logged. - - :param str data: Response string to be deserialized. If this value is - None or invalid it will be returned as-is. - :param Enum enum_obj: Enum object to deserialize to. - :return: Deserialized enum object. - :rtype: Enum - """ - if isinstance(data, enum_obj) or data is None: - return data - if isinstance(data, Enum): - data = data.value - if isinstance(data, int): - # Workaround. We might consider remove it in the future. - try: - return list(enum_obj.__members__.values())[data] - except IndexError as exc: - error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) from exc - try: - return enum_obj(str(data)) - except ValueError: - for enum_value in enum_obj: - if enum_value.value.lower() == str(data).lower(): - return enum_value - # We don't fail anymore for unknown value, we deserialize as a string - _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) - return Deserializer.deserialize_unicode(data) - - @staticmethod - def deserialize_bytearray(attr): - """Deserialize string into bytearray. - - :param str attr: response string to be deserialized. - :return: Deserialized bytearray - :rtype: bytearray - :raises: TypeError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return bytearray(b64decode(attr)) # type: ignore - - @staticmethod - def deserialize_base64(attr): - """Deserialize base64 encoded string into string. - - :param str attr: response string to be deserialized. - :return: Deserialized base64 string - :rtype: bytearray - :raises: TypeError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore - attr = attr + padding # type: ignore - encoded = attr.replace("-", "+").replace("_", "/") - return b64decode(encoded) - - @staticmethod - def deserialize_decimal(attr): - """Deserialize string into Decimal object. - - :param str attr: response string to be deserialized. - :return: Deserialized decimal - :raises: DeserializationError if string format invalid. - :rtype: decimal - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - return decimal.Decimal(str(attr)) # type: ignore - except decimal.DecimalException as err: - msg = "Invalid decimal {}".format(attr) - raise DeserializationError(msg) from err - - @staticmethod - def deserialize_long(attr): - """Deserialize string into long (Py2) or int (Py3). - - :param str attr: response string to be deserialized. - :return: Deserialized int - :rtype: long or int - :raises: ValueError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return _long_type(attr) # type: ignore - - @staticmethod - def deserialize_duration(attr): - """Deserialize ISO-8601 formatted string into TimeDelta object. - - :param str attr: response string to be deserialized. - :return: Deserialized duration - :rtype: TimeDelta - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - duration = isodate.parse_duration(attr) - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize duration object." - raise DeserializationError(msg) from err - return duration - - @staticmethod - def deserialize_date(attr): - """Deserialize ISO-8601 formatted string into Date object. - - :param str attr: response string to be deserialized. - :return: Deserialized date - :rtype: Date - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=0, defaultday=0) - - @staticmethod - def deserialize_time(attr): - """Deserialize ISO-8601 formatted string into time object. - - :param str attr: response string to be deserialized. - :return: Deserialized time - :rtype: datetime.time - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - return isodate.parse_time(attr) - - @staticmethod - def deserialize_rfc(attr): - """Deserialize RFC-1123 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :return: Deserialized RFC datetime - :rtype: Datetime - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - parsed_date = email.utils.parsedate_tz(attr) # type: ignore - date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) - ) - if not date_obj.tzinfo: - date_obj = date_obj.astimezone(tz=TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to rfc datetime object." - raise DeserializationError(msg) from err - return date_obj - - @staticmethod - def deserialize_iso(attr): - """Deserialize ISO-8601 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :return: Deserialized ISO datetime - :rtype: Datetime - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - attr = attr.upper() # type: ignore - match = Deserializer.valid_date.match(attr) - if not match: - raise ValueError("Invalid datetime string: " + attr) - - check_decimal = attr.split(".") - if len(check_decimal) > 1: - decimal_str = "" - for digit in check_decimal[1]: - if digit.isdigit(): - decimal_str += digit - else: - break - if len(decimal_str) > 6: - attr = attr.replace(decimal_str, decimal_str[0:6]) - - date_obj = isodate.parse_datetime(attr) - test_utc = date_obj.utctimetuple() - if test_utc.tm_year > 9999 or test_utc.tm_year < 1: - raise OverflowError("Hit max or min date") - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize datetime object." - raise DeserializationError(msg) from err - return date_obj - - @staticmethod - def deserialize_unix(attr): - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param int attr: Object to be serialized. - :return: Deserialized datetime - :rtype: Datetime - :raises: DeserializationError if format invalid - """ - if isinstance(attr, ET.Element): - attr = int(attr.text) # type: ignore - try: - attr = int(attr) - date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to unix datetime object." - raise DeserializationError(msg) from err - return date_obj diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py deleted file mode 100644 index f1f7797cdfcb..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import MonitorBatchMetricsClient # type: ignore - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MonitorBatchMetricsClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py deleted file mode 100644 index ac2d727ced4d..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_operations.py +++ /dev/null @@ -1,537 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import AsyncHttpResponse, HttpRequest -from azure.core.tracing.decorator_async import distributed_trace_async -from azure.core.utils import case_insensitive_dict - -from ...operations._operations import build_metrics_batch_batch_request - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] - - -class MetricsBatchOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_batch_metrics_client.aio.MonitorBatchMetricsClient`'s - :attr:`metrics_batch` attribute. - """ - - def __init__(self, *args, **kwargs) -> None: - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - async def batch( - self, - subscription_id: str, - batch_request: JSON, - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Lists the metric values for multiple resources. - - :param subscription_id: The subscription identifier for the resources in this batch. Required. - :type subscription_id: str - :param batch_request: Metrics batch body including the list of resource ids. Required. - :type batch_request: JSON - :keyword metricnamespace: Metric namespace that contains the requested metric names. Required. - :paramtype metricnamespace: str - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Required. - :paramtype metricnames: list[str] - :keyword starttime: The start time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. If you have specified the endtime parameter, then this parameter is - required. - If only starttime is specified, then endtime defaults to the current time. - If no time interval is specified, the default is 1 hour. Default value is None. - :paramtype starttime: str - :keyword endtime: The end time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. Default value is None. - :paramtype endtime: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The filter is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A - eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the - logical or operator cannot separate two different metadata names.:code:`
`- Return all time - series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq - ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - batch_request = { - "resourceids": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "values": [ - { - "endtime": "str", - "starttime": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": - "2020-02-20 00:00:00", - "average": - 0.0, - "count": 0.0, - "maximum": - 0.0, - "minimum": - 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": "str" - }, - "value": - "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "interval": "str", - "namespace": "str", - "resourceid": "str", - "resourceregion": "str" - } - ] - } - """ - - @overload - async def batch( - self, - subscription_id: str, - batch_request: IO[bytes], - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Lists the metric values for multiple resources. - - :param subscription_id: The subscription identifier for the resources in this batch. Required. - :type subscription_id: str - :param batch_request: Metrics batch body including the list of resource ids. Required. - :type batch_request: IO[bytes] - :keyword metricnamespace: Metric namespace that contains the requested metric names. Required. - :paramtype metricnamespace: str - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Required. - :paramtype metricnames: list[str] - :keyword starttime: The start time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. If you have specified the endtime parameter, then this parameter is - required. - If only starttime is specified, then endtime defaults to the current time. - If no time interval is specified, the default is 1 hour. Default value is None. - :paramtype starttime: str - :keyword endtime: The end time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. Default value is None. - :paramtype endtime: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The filter is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A - eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the - logical or operator cannot separate two different metadata names.:code:`
`- Return all time - series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq - ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "values": [ - { - "endtime": "str", - "starttime": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": - "2020-02-20 00:00:00", - "average": - 0.0, - "count": 0.0, - "maximum": - 0.0, - "minimum": - 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": "str" - }, - "value": - "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "interval": "str", - "namespace": "str", - "resourceid": "str", - "resourceregion": "str" - } - ] - } - """ - - @distributed_trace_async - async def batch( - self, - subscription_id: str, - batch_request: Union[JSON, IO[bytes]], - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - **kwargs: Any - ) -> JSON: - """Lists the metric values for multiple resources. - - :param subscription_id: The subscription identifier for the resources in this batch. Required. - :type subscription_id: str - :param batch_request: Metrics batch body including the list of resource ids. Is either a JSON - type or a IO[bytes] type. Required. - :type batch_request: JSON or IO[bytes] - :keyword metricnamespace: Metric namespace that contains the requested metric names. Required. - :paramtype metricnamespace: str - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Required. - :paramtype metricnames: list[str] - :keyword starttime: The start time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. If you have specified the endtime parameter, then this parameter is - required. - If only starttime is specified, then endtime defaults to the current time. - If no time interval is specified, the default is 1 hour. Default value is None. - :paramtype starttime: str - :keyword endtime: The end time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. Default value is None. - :paramtype endtime: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The filter is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A - eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the - logical or operator cannot separate two different metadata names.:code:`
`- Return all time - series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq - ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - batch_request = { - "resourceids": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "values": [ - { - "endtime": "str", - "starttime": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": - "2020-02-20 00:00:00", - "average": - 0.0, - "count": 0.0, - "maximum": - 0.0, - "minimum": - 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": "str" - }, - "value": - "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "interval": "str", - "namespace": "str", - "resourceid": "str", - "resourceregion": "str" - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(batch_request, (IOBase, bytes)): - _content = batch_request - else: - _json = batch_request - - _request = build_metrics_batch_batch_request( - subscription_id=subscription_id, - metricnamespace=metricnamespace, - metricnames=metricnames, - starttime=starttime, - endtime=endtime, - interval=interval, - aggregation=aggregation, - top=top, - orderby=orderby, - filter=filter, - rollupby=rollupby, - content_type=content_type, - api_version=self._config.api_version, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py deleted file mode 100644 index e4cd867f64fa..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_operations.py +++ /dev/null @@ -1,599 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, List, Optional, TypeVar, Union, cast, overload - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_metrics_batch_batch_request( - subscription_id: str, - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-02-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/metrics:getBatch" - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if starttime is not None: - _params["starttime"] = _SERIALIZER.query("starttime", starttime, "str") - if endtime is not None: - _params["endtime"] = _SERIALIZER.query("endtime", endtime, "str") - if interval is not None: - _params["interval"] = _SERIALIZER.query("interval", interval, "duration") - _params["metricnamespace"] = _SERIALIZER.query("metricnamespace", metricnamespace, "str") - _params["metricnames"] = _SERIALIZER.query("metricnames", metricnames, "[str]", div=",") - if aggregation is not None: - _params["aggregation"] = _SERIALIZER.query("aggregation", aggregation, "str") - if top is not None: - _params["top"] = _SERIALIZER.query("top", top, "int") - if orderby is not None: - _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") - if filter is not None: - _params["filter"] = _SERIALIZER.query("filter", filter, "str") - if rollupby is not None: - _params["rollupby"] = _SERIALIZER.query("rollupby", rollupby, "str") - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -class MetricsBatchOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_batch_metrics_client.MonitorBatchMetricsClient`'s - :attr:`metrics_batch` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def batch( - self, - subscription_id: str, - batch_request: JSON, - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Lists the metric values for multiple resources. - - :param subscription_id: The subscription identifier for the resources in this batch. Required. - :type subscription_id: str - :param batch_request: Metrics batch body including the list of resource ids. Required. - :type batch_request: JSON - :keyword metricnamespace: Metric namespace that contains the requested metric names. Required. - :paramtype metricnamespace: str - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Required. - :paramtype metricnames: list[str] - :keyword starttime: The start time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. If you have specified the endtime parameter, then this parameter is - required. - If only starttime is specified, then endtime defaults to the current time. - If no time interval is specified, the default is 1 hour. Default value is None. - :paramtype starttime: str - :keyword endtime: The end time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. Default value is None. - :paramtype endtime: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The filter is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A - eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the - logical or operator cannot separate two different metadata names.:code:`
`- Return all time - series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq - ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - batch_request = { - "resourceids": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "values": [ - { - "endtime": "str", - "starttime": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": - "2020-02-20 00:00:00", - "average": - 0.0, - "count": 0.0, - "maximum": - 0.0, - "minimum": - 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": "str" - }, - "value": - "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "interval": "str", - "namespace": "str", - "resourceid": "str", - "resourceregion": "str" - } - ] - } - """ - - @overload - def batch( - self, - subscription_id: str, - batch_request: IO[bytes], - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Lists the metric values for multiple resources. - - :param subscription_id: The subscription identifier for the resources in this batch. Required. - :type subscription_id: str - :param batch_request: Metrics batch body including the list of resource ids. Required. - :type batch_request: IO[bytes] - :keyword metricnamespace: Metric namespace that contains the requested metric names. Required. - :paramtype metricnamespace: str - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Required. - :paramtype metricnames: list[str] - :keyword starttime: The start time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. If you have specified the endtime parameter, then this parameter is - required. - If only starttime is specified, then endtime defaults to the current time. - If no time interval is specified, the default is 1 hour. Default value is None. - :paramtype starttime: str - :keyword endtime: The end time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. Default value is None. - :paramtype endtime: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The filter is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A - eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the - logical or operator cannot separate two different metadata names.:code:`
`- Return all time - series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq - ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "values": [ - { - "endtime": "str", - "starttime": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": - "2020-02-20 00:00:00", - "average": - 0.0, - "count": 0.0, - "maximum": - 0.0, - "minimum": - 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": "str" - }, - "value": - "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "interval": "str", - "namespace": "str", - "resourceid": "str", - "resourceregion": "str" - } - ] - } - """ - - @distributed_trace - def batch( - self, - subscription_id: str, - batch_request: Union[JSON, IO[bytes]], - *, - metricnamespace: str, - metricnames: List[str], - starttime: Optional[str] = None, - endtime: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - rollupby: Optional[str] = None, - **kwargs: Any - ) -> JSON: - """Lists the metric values for multiple resources. - - :param subscription_id: The subscription identifier for the resources in this batch. Required. - :type subscription_id: str - :param batch_request: Metrics batch body including the list of resource ids. Is either a JSON - type or a IO[bytes] type. Required. - :type batch_request: JSON or IO[bytes] - :keyword metricnamespace: Metric namespace that contains the requested metric names. Required. - :paramtype metricnamespace: str - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Required. - :paramtype metricnames: list[str] - :keyword starttime: The start time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. If you have specified the endtime parameter, then this parameter is - required. - If only starttime is specified, then endtime defaults to the current time. - If no time interval is specified, the default is 1 hour. Default value is None. - :paramtype starttime: str - :keyword endtime: The end time of the query. It is a string in the format - 'yyyy-MM-ddTHH:mm:ss.fffZ'. Default value is None. - :paramtype endtime: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The filter is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ **filter=A - eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid because the - logical or operator cannot separate two different metadata names.:code:`
`- Return all time - series where A = a1, B = b1 and C = c1::code:`
`\\ **filter=A eq ‘a1’ and B eq ‘b1’ and C eq - ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ **filter=A eq ‘a1’ and - B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - batch_request = { - "resourceids": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "values": [ - { - "endtime": "str", - "starttime": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": - "2020-02-20 00:00:00", - "average": - 0.0, - "count": 0.0, - "maximum": - 0.0, - "minimum": - 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": "str" - }, - "value": - "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "interval": "str", - "namespace": "str", - "resourceid": "str", - "resourceregion": "str" - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(batch_request, (IOBase, bytes)): - _content = batch_request - else: - _json = batch_request - - _request = build_metrics_batch_batch_request( - subscription_id=subscription_id, - metricnamespace=metricnamespace, - metricnames=metricnames, - starttime=starttime, - endtime=endtime, - interval=interval, - aggregation=aggregation, - top=top, - orderby=orderby, - filter=filter, - rollupby=rollupby, - content_type=content_type, - api_version=self._config.api_version, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/py.typed b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/py.typed deleted file mode 100644 index e5aff4f83af8..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py deleted file mode 100644 index 64e613d8c69b..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._operations import MetricDefinitionsOperations # type: ignore -from ._operations import MetricsOperations # type: ignore -from ._operations import MetricNamespacesOperations # type: ignore - -from ._patch import __all__ as _patch_all -from ._patch import * -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "MetricDefinitionsOperations", - "MetricsOperations", - "MetricNamespacesOperations", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore -_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py deleted file mode 100644 index b8973c7292a6..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_operations.py +++ /dev/null @@ -1,604 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -import sys -from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, cast -import urllib.parse - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.paging import ItemPaged -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_metric_definitions_list_request( - resource_uri: str, *, metricnamespace: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-02-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceUri}/providers/Microsoft.Insights/metricDefinitions" - path_format_arguments = { - "resourceUri": _SERIALIZER.url("resource_uri", resource_uri, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if metricnamespace is not None: - _params["metricnamespace"] = _SERIALIZER.query("metricnamespace", metricnamespace, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_metrics_list_request( - resource_uri: str, - *, - timespan: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - metricnames: Optional[str] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - result_type: Optional[str] = None, - metricnamespace: Optional[str] = None, - auto_adjust_timegrain: Optional[bool] = None, - validate_dimensions: Optional[bool] = None, - rollupby: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-02-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceUri}/providers/Microsoft.Insights/metrics" - path_format_arguments = { - "resourceUri": _SERIALIZER.url("resource_uri", resource_uri, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - if timespan is not None: - _params["timespan"] = _SERIALIZER.query("timespan", timespan, "str") - if interval is not None: - _params["interval"] = _SERIALIZER.query("interval", interval, "duration") - if metricnames is not None: - _params["metricnames"] = _SERIALIZER.query("metricnames", metricnames, "str") - if aggregation is not None: - _params["aggregation"] = _SERIALIZER.query("aggregation", aggregation, "str") - if top is not None: - _params["top"] = _SERIALIZER.query("top", top, "int") - if orderby is not None: - _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") - if filter is not None: - _params["$filter"] = _SERIALIZER.query("filter", filter, "str") - if result_type is not None: - _params["resultType"] = _SERIALIZER.query("result_type", result_type, "str") - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if metricnamespace is not None: - _params["metricnamespace"] = _SERIALIZER.query("metricnamespace", metricnamespace, "str") - if auto_adjust_timegrain is not None: - _params["AutoAdjustTimegrain"] = _SERIALIZER.query("auto_adjust_timegrain", auto_adjust_timegrain, "bool") - if validate_dimensions is not None: - _params["ValidateDimensions"] = _SERIALIZER.query("validate_dimensions", validate_dimensions, "bool") - if rollupby is not None: - _params["rollupby"] = _SERIALIZER.query("rollupby", rollupby, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_metric_namespaces_list_request( - resource_uri: str, *, start_time: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-02-01")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceUri}/providers/microsoft.insights/metricNamespaces" - path_format_arguments = { - "resourceUri": _SERIALIZER.url("resource_uri", resource_uri, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if start_time is not None: - _params["startTime"] = _SERIALIZER.query("start_time", start_time, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class MetricDefinitionsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_metrics_client.MonitorMetricsClient`'s - :attr:`metric_definitions` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_uri: str, *, metricnamespace: Optional[str] = None, **kwargs: Any) -> Iterable[JSON]: - """Lists the metric definitions for the resource. - - :param resource_uri: The identifier of the resource. Required. - :type resource_uri: str - :keyword metricnamespace: Metric namespace where the metrics you want reside. Default value is - None. - :paramtype metricnamespace: str - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSON] - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "category": "str", - "dimensions": [ - { - "value": "str", - "localizedValue": "str" - } - ], - "displayDescription": "str", - "id": "str", - "isDimensionRequired": bool, - "metricAvailabilities": [ - { - "retention": "1 day, 0:00:00", - "timeGrain": "1 day, 0:00:00" - } - ], - "metricClass": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "namespace": "str", - "primaryAggregationType": "str", - "resourceId": "str", - "supportedAggregationTypes": [ - "str" - ], - "unit": "str" - } - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_metric_definitions_list_request( - resource_uri=resource_uri, - metricnamespace=metricnamespace, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - -class MetricsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_metrics_client.MonitorMetricsClient`'s - :attr:`metrics` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - resource_uri: str, - *, - timespan: Optional[str] = None, - interval: Optional[datetime.timedelta] = None, - metricnames: Optional[str] = None, - aggregation: Optional[str] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - filter: Optional[str] = None, - result_type: Optional[str] = None, - metricnamespace: Optional[str] = None, - auto_adjust_timegrain: Optional[bool] = None, - validate_dimensions: Optional[bool] = None, - rollupby: Optional[str] = None, - **kwargs: Any - ) -> JSON: - """**Lists the metric values for a resource**. - - :param resource_uri: The identifier of the resource. Required. - :type resource_uri: str - :keyword timespan: The timespan of the query. It is a string with the following format - 'startDateTime_ISO/endDateTime_ISO'. Default value is None. - :paramtype timespan: str - :keyword interval: The interval (i.e. timegrain) of the query in ISO 8601 duration format. - Defaults to PT1M. Special case for 'FULL' value that returns single datapoint for entire time - span requested. - *Examples: PT15M, PT1H, P1D, FULL*. Default value is None. - :paramtype interval: ~datetime.timedelta - :keyword metricnames: The names of the metrics (comma separated) to retrieve. Default value is - None. - :paramtype metricnames: str - :keyword aggregation: The list of aggregation types (comma separated) to retrieve. - *Examples: average, minimum, maximum*. Default value is None. - :paramtype aggregation: str - :keyword top: The maximum number of records to retrieve per resource ID in the request. - Valid only if filter is specified. - Defaults to 10. Default value is None. - :paramtype top: int - :keyword orderby: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - *Examples: sum asc*. Default value is None. - :paramtype orderby: str - :keyword filter: The **$filter** is used to reduce the set of metric data - returned.:code:`
`Example::code:`
`Metric contains metadata A, B and C.:code:`
`- - Return all time series of C where A = a1 and B = b1 or b2:code:`
`\\ **$filter=A eq ‘a1’ and - B eq ‘b1’ or B eq ‘b2’ and C eq ‘*’**\\ :code:`
`- Invalid variant::code:`
`\\ - **$filter=A eq ‘a1’ and B eq ‘b1’ and C eq ‘*’ or B = ‘b2’**\\ :code:`
`This is invalid - because the logical or operator cannot separate two different metadata names.:code:`
`- - Return all time series where A = a1, B = b1 and C = c1::code:`
`\\ **$filter=A eq ‘a1’ and B - eq ‘b1’ and C eq ‘c1’**\\ :code:`
`- Return all time series where A = a1:code:`
`\\ - **$filter=A eq ‘a1’ and B eq ‘\\ *’ and C eq ‘*\\ ’**. Default value is None. - :paramtype filter: str - :keyword result_type: Reduces the set of data collected. The syntax allowed depends on the - operation. See the operation's description for details. Known values are: "Data" and - "Metadata". Default value is None. - :paramtype result_type: str - :keyword metricnamespace: Metric namespace where the metrics you want reside. Default value is - None. - :paramtype metricnamespace: str - :keyword auto_adjust_timegrain: When set to true, if the timespan passed in is not supported by - this metric, the API will return the result using the closest supported timespan. When set to - false, an error is returned for invalid timespan parameters. Defaults to false. Default value - is None. - :paramtype auto_adjust_timegrain: bool - :keyword validate_dimensions: When set to false, invalid filter parameter values will be - ignored. When set to true, an error is returned for invalid filter parameters. Defaults to - true. Default value is None. - :paramtype validate_dimensions: bool - :keyword rollupby: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'RollUpBy=City' to see the results for Seattle - and Tacoma rolled up into one timeseries. Default value is None. - :paramtype rollupby: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "timespan": "str", - "value": [ - { - "id": "str", - "name": { - "value": "str", - "localizedValue": "str" - }, - "timeseries": [ - { - "data": [ - { - "timeStamp": "2020-02-20 - 00:00:00", - "average": 0.0, - "count": 0.0, - "maximum": 0.0, - "minimum": 0.0, - "total": 0.0 - } - ], - "metadatavalues": [ - { - "name": { - "value": "str", - "localizedValue": - "str" - }, - "value": "str" - } - ] - } - ], - "type": "str", - "unit": "str", - "displayDescription": "str", - "errorCode": "str", - "errorMessage": "str" - } - ], - "cost": 0, - "interval": "str", - "namespace": "str", - "resourceregion": "str" - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_metrics_list_request( - resource_uri=resource_uri, - timespan=timespan, - interval=interval, - metricnames=metricnames, - aggregation=aggregation, - top=top, - orderby=orderby, - filter=filter, - result_type=result_type, - metricnamespace=metricnamespace, - auto_adjust_timegrain=auto_adjust_timegrain, - validate_dimensions=validate_dimensions, - rollupby=rollupby, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - -class MetricNamespacesOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_metrics_client.MonitorMetricsClient`'s - :attr:`metric_namespaces` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list(self, resource_uri: str, *, start_time: Optional[str] = None, **kwargs: Any) -> Iterable[JSON]: - """Lists the metric namespaces for the resource. - - :param resource_uri: The identifier of the resource. Required. - :type resource_uri: str - :keyword start_time: The ISO 8601 conform Date start time from which to query for metric - namespaces. Default value is None. - :paramtype start_time: str - :return: An iterator like instance of JSON object - :rtype: ~azure.core.paging.ItemPaged[JSON] - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "classification": "str", - "id": "str", - "name": "str", - "properties": { - "metricNamespaceName": "str" - }, - "type": "str" - } - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_metric_namespaces_list_request( - resource_uri=resource_uri, - start_time=start_time, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - _request.url = self._client.format_url(_request.url) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = deserialized["value"] - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/operations/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/py.typed b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/py.typed deleted file mode 100644 index e5aff4f83af8..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py deleted file mode 100644 index 875c13d68051..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_operations.py +++ /dev/null @@ -1,2392 +0,0 @@ -# pylint: disable=too-many-lines -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -import datetime -from io import IOBase -import sys -from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.tracing.decorator import distributed_trace -from azure.core.utils import case_insensitive_dict - -from .._serialization import Serializer - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_query_get_request( - workspace_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/workspaces/{workspaceId}/query" - path_format_arguments = { - "workspaceId": _SERIALIZER.url("workspace_id", workspace_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["query"] = _SERIALIZER.query("query", query, "str") - if timespan is not None: - _params["timespan"] = _SERIALIZER.query("timespan", timespan, "duration") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_query_execute_request(workspace_id: str, *, prefer: Optional[str] = None, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/workspaces/{workspaceId}/query" - path_format_arguments = { - "workspaceId": _SERIALIZER.url("workspace_id", workspace_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct headers - if prefer is not None: - _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) - - -def build_query_resource_get_request( - resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceId}/query" - path_format_arguments = { - "resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["query"] = _SERIALIZER.query("query", query, "str") - if timespan is not None: - _params["timespan"] = _SERIALIZER.query("timespan", timespan, "duration") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_query_resource_execute_request( - resource_id: str, *, prefer: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceId}/query" - path_format_arguments = { - "resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct headers - if prefer is not None: - _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) - - -def build_query_batch_request(**kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/$batch" - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) - - -def build_query_resource_get_xms_request( - resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceId}/query" - path_format_arguments = { - "resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["query"] = _SERIALIZER.query("query", query, "str") - if timespan is not None: - _params["timespan"] = _SERIALIZER.query("timespan", timespan, "duration") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_query_resource_execute_xms_request( - resource_id: str, *, prefer: Optional[str] = None, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/{resourceId}/query" - path_format_arguments = { - "resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct headers - if prefer is not None: - _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) - - -def build_metadata_get_request(workspace_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/workspaces/{workspaceId}/metadata" - path_format_arguments = { - "workspaceId": _SERIALIZER.url("workspace_id", workspace_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, headers=_headers, **kwargs) - - -def build_metadata_post_request(workspace_id: str, **kwargs: Any) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/workspaces/{workspaceId}/metadata" - path_format_arguments = { - "workspaceId": _SERIALIZER.url("workspace_id", workspace_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) - - -class QueryOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_query_client.MonitorQueryClient`'s - :attr:`query` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def get( - self, workspace_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :keyword query: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :paramtype query: str - :keyword timespan: Optional. The timespan over which to query data. This is an ISO8601 time - period value. This timespan is applied in addition to any that are specified in the query - expression. Default value is None. - :paramtype timespan: ~datetime.timedelta - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_query_get_request( - workspace_id=workspace_id, - query=query, - timespan=timespan, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - def execute( - self, - workspace_id: str, - body: JSON, - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: JSON - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @overload - def execute( - self, - workspace_id: str, - body: IO[bytes], - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @distributed_trace - def execute( - self, workspace_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query. - - Executes an Analytics query for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. Is - either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_execute_request( - workspace_id=workspace_id, - prefer=prefer, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @distributed_trace - def resource_get( - self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource URI. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :keyword query: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :paramtype query: str - :keyword timespan: Optional. The timespan over which to query data. This is an ISO8601 time - period value. This timespan is applied in addition to any that are specified in the query - expression. Default value is None. - :paramtype timespan: ~datetime.timedelta - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_query_resource_get_request( - resource_id=resource_id, - query=query, - timespan=timespan, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - def resource_execute( - self, - resource_id: str, - body: JSON, - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: JSON - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @overload - def resource_execute( - self, - resource_id: str, - body: IO[bytes], - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @distributed_trace - def resource_execute( - self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. Is - either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_resource_execute_request( - resource_id=resource_id, - prefer=prefer, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - def batch(self, body: JSON, *, content_type: str = "application/json", **kwargs: Any) -> JSON: - """Execute a batch of Analytics queries. - - Executes a batch of Analytics queries for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param body: The batch request body. Required. - :type body: JSON - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "requests": [ - { - "body": { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - }, - "id": "str", - "workspace": "str", - "headers": { - "str": "str" - }, - "method": "POST", - "path": "/query" - } - ] - } - - # response body for status code(s): 200 - response == { - "responses": [ - { - "body": { - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {}, - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ] - }, - "headers": { - "str": "str" - }, - "id": "str", - "status": 0 - } - ] - } - """ - - @overload - def batch(self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any) -> JSON: - """Execute a batch of Analytics queries. - - Executes a batch of Analytics queries for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param body: The batch request body. Required. - :type body: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "responses": [ - { - "body": { - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {}, - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ] - }, - "headers": { - "str": "str" - }, - "id": "str", - "status": 0 - } - ] - } - """ - - @distributed_trace - def batch(self, body: Union[JSON, IO[bytes]], **kwargs: Any) -> JSON: - """Execute a batch of Analytics queries. - - Executes a batch of Analytics queries for data. `Here - `_ is an example for - using POST with an Analytics query. - - :param body: The batch request body. Is either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "requests": [ - { - "body": { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - }, - "id": "str", - "workspace": "str", - "headers": { - "str": "str" - }, - "method": "POST", - "path": "/query" - } - ] - } - - # response body for status code(s): 200 - response == { - "responses": [ - { - "body": { - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {}, - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ] - }, - "headers": { - "str": "str" - }, - "id": "str", - "status": 0 - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_batch_request( - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @distributed_trace - def resource_get_xms( - self, resource_id: str, *, query: str, timespan: Optional[datetime.timedelta] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource URI. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :keyword query: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :paramtype query: str - :keyword timespan: Optional. The timespan over which to query data. This is an ISO8601 time - period value. This timespan is applied in addition to any that are specified in the query - expression. Default value is None. - :paramtype timespan: ~datetime.timedelta - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_query_resource_get_xms_request( - resource_id=resource_id, - query=query, - timespan=timespan, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @overload - def resource_execute_xms( - self, - resource_id: str, - body: JSON, - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: JSON - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @overload - def resource_execute_xms( - self, - resource_id: str, - body: IO[bytes], - *, - prefer: Optional[str] = None, - content_type: str = "application/json", - **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. - Required. - :type body: IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - - @distributed_trace - def resource_execute_xms( - self, resource_id: str, body: Union[JSON, IO[bytes]], *, prefer: Optional[str] = None, **kwargs: Any - ) -> JSON: - """Execute an Analytics query using resource ID. - - Executes an Analytics query for data in the context of a resource. `Here - `_ is an - example for using POST with an Analytics query. - - :param resource_id: The identifier of the resource. Required. - :type resource_id: str - :param body: The Analytics query. Learn more about the `Analytics query syntax - `_. Is - either a JSON type or a IO[bytes] type. Required. - :type body: JSON or IO[bytes] - :keyword prefer: Optional. The prefer header to set server timeout, query statistics and - visualization information. Default value is None. - :paramtype prefer: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # JSON input template you can fill out and use as your body input. - body = { - "query": "str", - "timespan": "str", - "workspaces": [ - "str" - ] - } - - # response body for status code(s): 200 - response == { - "tables": [ - { - "columns": [ - { - "name": "str", - "type": "str" - } - ], - "name": "str", - "rows": [ - [ - {} - ] - ] - } - ], - "error": { - "code": "str", - "message": "str", - "details": [ - { - "code": "str", - "message": "str", - "resources": [ - "str" - ], - "target": "str", - "value": "str" - } - ], - "innererror": ... - }, - "render": {}, - "statistics": {} - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[JSON] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(body, (IOBase, bytes)): - _content = body - else: - _json = body - - _request = build_query_resource_execute_xms_request( - resource_id=resource_id, - prefer=prefer, - content_type=content_type, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - -class MetadataOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~monitor_query_client.MonitorQueryClient`'s - :attr:`metadata` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def get(self, workspace_id: str, **kwargs: Any) -> JSON: - """Gets metadata information. - - Retrieve the metadata information for the workspace, including its schema, functions, workspace - info, categories etc. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "applications": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "categories": [ - { - "displayName": "str", - "id": "str", - "description": "str", - "related": { - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "functions": [ - { - "body": "str", - "id": "str", - "name": "str", - "description": "str", - "displayName": "str", - "parameters": "str", - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "permissions": [ - { - "workspaces": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ], - "applications": [ - { - "resourceId": "str" - } - ], - "resources": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ] - } - ], - "queries": [ - { - "body": "str", - "id": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - }, - "tags": {} - } - ], - "resourceTypes": [ - { - "id": "str", - "type": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resources": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "resources": [ - {} - ], - "solutions": [ - { - "id": "str", - "name": "str", - "related": { - "tables": [ - "str" - ], - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "description": "str", - "displayName": "str", - "properties": {}, - "tags": {} - } - ], - "tables": [ - { - "id": "str", - "name": "str", - "columns": [ - { - "name": "str", - "type": "str", - "description": "str", - "isPreferredFacet": bool, - "source": {} - } - ], - "description": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {}, - "timespanColumn": "str" - } - ], - "workspaces": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "resources": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_metadata_get_request( - workspace_id=workspace_id, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore - - @distributed_trace - def post(self, workspace_id: str, **kwargs: Any) -> JSON: - """Gets metadata information. - - Retrieve the metadata information for the workspace, including its schema, functions, workspace - info, categories etc. - - :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the - Properties blade in the Azure portal. Required. - :type workspace_id: str - :return: JSON object - :rtype: JSON - :raises ~azure.core.exceptions.HttpResponseError: - - Example: - .. code-block:: python - - # response body for status code(s): 200 - response == { - "applications": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "categories": [ - { - "displayName": "str", - "id": "str", - "description": "str", - "related": { - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ], - "functions": [ - { - "body": "str", - "id": "str", - "name": "str", - "description": "str", - "displayName": "str", - "parameters": "str", - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "permissions": [ - { - "workspaces": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ], - "applications": [ - { - "resourceId": "str" - } - ], - "resources": [ - { - "resourceId": "str", - "denyTables": [ - "str" - ] - } - ] - } - ], - "queries": [ - { - "body": "str", - "id": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - }, - "tags": {} - } - ], - "resourceTypes": [ - { - "id": "str", - "type": "str", - "description": "str", - "displayName": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resources": [ - "str" - ], - "tables": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {} - } - ], - "resources": [ - {} - ], - "solutions": [ - { - "id": "str", - "name": "str", - "related": { - "tables": [ - "str" - ], - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "description": "str", - "displayName": "str", - "properties": {}, - "tags": {} - } - ], - "tables": [ - { - "id": "str", - "name": "str", - "columns": [ - { - "name": "str", - "type": "str", - "description": "str", - "isPreferredFacet": bool, - "source": {} - } - ], - "description": "str", - "labels": [ - "str" - ], - "properties": {}, - "related": { - "categories": [ - "str" - ], - "functions": [ - "str" - ], - "queries": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "solutions": [ - "str" - ], - "workspaces": [ - "str" - ] - }, - "tags": {}, - "timespanColumn": "str" - } - ], - "workspaces": [ - { - "id": "str", - "name": "str", - "region": "str", - "resourceId": "str", - "related": { - "functions": [ - "str" - ], - "resourceTypes": [ - "str" - ], - "resources": [ - "str" - ], - "solutions": [ - "str" - ], - "tables": [ - "str" - ] - } - } - ] - } - """ - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[JSON] = kwargs.pop("cls", None) - - _request = build_metadata_post_request( - workspace_id=workspace_id, - headers=_headers, - params=_params, - ) - _request.url = self._client.format_url(_request.url) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - if response.content: - deserialized = response.json() - else: - deserialized = None - - if cls: - return cls(pipeline_response, cast(JSON, deserialized), {}) # type: ignore - - return cast(JSON, deserialized) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_patch.py deleted file mode 100644 index f7dd32510333..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/py.typed b/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/py.typed deleted file mode 100644 index e5aff4f83af8..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py index acf266cb18a9..120a29e72fb8 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_helpers.py @@ -4,38 +4,23 @@ # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- -from datetime import datetime, timedelta, timezone -from typing import List, Dict, Any, Optional, Tuple, Union +from datetime import datetime, timedelta +from typing import List, Dict, Any, Optional +from json import loads -from azure.core.credentials import TokenCredential from azure.core.exceptions import HttpResponseError -from azure.core.pipeline.policies import BearerTokenCredentialPolicy -from ._generated._serialization import Serializer, Deserializer - - -def get_authentication_policy(credential: TokenCredential, audience: str) -> BearerTokenCredentialPolicy: - """Returns the correct authentication policy. - - :param credential: The credential to use for authentication with the service. - :type credential: ~azure.core.credentials.TokenCredential - :param str audience: The audience for the token. - :returns: The correct authentication policy. - :rtype: ~azure.core.pipeline.policies.BearerTokenCredentialPolicy - """ - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - scope = audience.rstrip("/") + "/.default" - if hasattr(credential, "get_token"): - return BearerTokenCredentialPolicy(credential, scope) - - raise TypeError("Unsupported credential") +from ._utils.serialization import Serializer, Deserializer def order_results(request_order: List, mapping: Dict[str, Any], **kwargs: Any) -> List: ordered = [mapping[id] for id in request_order] results = [] for item in ordered: + # In rare cases, the generated value is a JSON string instead of a dict. This potentially stems from a bug in + # the service. This check handles that case. + if isinstance(item["body"], str): + item["body"] = loads(item["body"]) if not item["body"].get("error"): result_obj = kwargs.get("obj") if result_obj: @@ -90,48 +75,6 @@ def construct_iso8601(timespan=None) -> Optional[str]: return iso_str -def get_timespan_iso8601_endpoints( - timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]] = None -) -> Tuple[Optional[str], Optional[str]]: - - if not timespan: - return None, None - start, end, duration = None, None, None - - if isinstance(timespan, timedelta): - duration = timespan - else: - if isinstance(timespan[1], datetime): - start, end = timespan[0], timespan[1] - elif isinstance(timespan[1], timedelta): - start, duration = timespan[0], timespan[1] - else: - raise ValueError("Tuple must be a start datetime with a timedelta or an end datetime.") - - iso_start = None - iso_end = None - if start is not None: - iso_start = Serializer.serialize_iso(start) - if end is not None: - iso_end = Serializer.serialize_iso(end) - elif duration is not None: - iso_end = Serializer.serialize_iso(start + duration) - else: # means that an invalid value None that is provided with start_time - raise ValueError("Duration or end_time cannot be None when provided with start_time.") - else: - # Only duration was provided - if duration is None: - raise ValueError("Duration cannot be None when start_time is None.") - end = datetime.now(timezone.utc) - iso_end = Serializer.serialize_iso(end) - iso_start = Serializer.serialize_iso(end - duration) - - # In some cases with a negative timedelta, the start time will be after the end time. - if iso_start and iso_end and iso_start > iso_end: - return iso_end, iso_start - return iso_start, iso_end - - def native_col_type(col_type, value): if col_type == "datetime": try: @@ -166,24 +109,3 @@ def process_prefer(server_timeout, include_statistics, include_visualization): if include_visualization: prefer += "include-render=true" return prefer.rstrip(",") - - -def get_subscription_id_from_resource(resource_id: str) -> str: - """Get the subscription ID from the provided resource ID. - - The format of the resource ID is: - /subscriptions/{subscriptionId}/resourceGroups/{group}/providers/{provider}/{type}/{name} - - :param str resource_id: The resource ID to parse. - :returns: The subscription ID. - :rtype: str - """ - if not resource_id: - raise ValueError("Resource ID must not be None or empty.") - - parts = resource_id.split("subscriptions/") - if len(parts) != 2: - raise ValueError("Resource ID must contain a subscription ID.") - - subscription_id = parts[1].split("/")[0] - return subscription_id diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_client.py deleted file mode 100644 index 2779f996d0ef..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_client.py +++ /dev/null @@ -1,200 +0,0 @@ -# -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from datetime import timedelta, datetime -from json import loads -from typing import Any, List, MutableMapping, Sequence, Optional, Union, Tuple - -from azure.core.credentials import TokenCredential -from azure.core.tracing.decorator import distributed_trace - -from ._generated.metrics.batch import MonitorBatchMetricsClient -from ._models import MetricsQueryResult -from ._enums import MetricAggregationType -from ._helpers import get_authentication_policy, get_timespan_iso8601_endpoints, get_subscription_id_from_resource -from ._version import SDK_MONIKER - -JSON = MutableMapping[str, Any] - - -class MetricsClient: # pylint: disable=client-accepts-api-version-keyword - """MetricsClient should be used for performing metrics queries on multiple monitored resources in the - same region. A credential with authorization at the subscription level is required when using this client. - - :param str endpoint: The regional endpoint to use, for example - https://eastus.metrics.monitor.azure.com. The region should match the region of the requested - resources. For global resources, the region should be 'global'. Required. - :param credential: The credential to authenticate the client. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str audience: The audience to use when requesting tokens for Microsoft Entra ID. Defaults to the public - cloud audience (https://metrics.monitor.azure.com). - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_metrics_client] - :end-before: [END create_metrics_client] - :language: python - :dedent: 4 - :caption: Creating the MetricsClient with a TokenCredential. - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_metrics_client_sovereign_cloud] - :end-before: [END create_metrics_client_sovereign_cloud] - :language: python - :dedent: 4 - :caption: Creating the MetricsClient for use with a sovereign cloud (i.e. non-public cloud). - """ - - def __init__(self, endpoint: str, credential: TokenCredential, **kwargs: Any) -> None: - self._endpoint = endpoint - if not self._endpoint.startswith("https://") and not self._endpoint.startswith("http://"): - self._endpoint = "https://" + self._endpoint - audience = kwargs.pop("audience", "https://metrics.monitor.azure.com") - - authentication_policy = kwargs.pop("authentication_policy", None) or get_authentication_policy( - credential, audience - ) - kwargs.setdefault("sdk_moniker", SDK_MONIKER) - self._client = MonitorBatchMetricsClient( - credential=credential, endpoint=self._endpoint, authentication_policy=authentication_policy, **kwargs - ) - self._batch_metrics_op = self._client.metrics_batch - - @distributed_trace - def query_resources( - self, - *, - resource_ids: Sequence[str], - metric_namespace: str, - metric_names: Sequence[str], - timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]] = None, - granularity: Optional[timedelta] = None, - aggregations: Optional[Sequence[Union[MetricAggregationType, str]]] = None, - max_results: Optional[int] = None, - order_by: Optional[str] = None, - filter: Optional[str] = None, - roll_up_by: Optional[str] = None, - **kwargs: Any, - ) -> List[MetricsQueryResult]: - """Lists the metric values for multiple resources. - - :keyword resource_ids: A list of resource IDs to query metrics for. Required. - :paramtype resource_ids: list[str] - :keyword metric_namespace: Metric namespace that contains the requested metric names. Required. - :paramtype metric_namespace: str - :keyword metric_names: The names of the metrics to retrieve. Required. - :paramtype metric_names: list[str] - :keyword timespan: The timespan for which to query the data. This can be a timedelta, - a tuple of a start datetime with timedelta, or a tuple with start and end datetimes. - :paramtype timespan: Optional[Union[~datetime.timedelta, tuple[~datetime.datetime, ~datetime.timedelta], - tuple[~datetime.datetime, ~datetime.datetime]]] - :keyword granularity: The granularity (i.e. timegrain) of the query. - :paramtype granularity: Optional[~datetime.timedelta] - :keyword aggregations: The list of aggregation types to retrieve. Use - `azure.monitor.query.MetricAggregationType` enum to get each aggregation type. - :paramtype aggregations: Optional[list[Union[~azure.monitor.query.MetricAggregationType, str]]] - :keyword max_results: The maximum number of records to retrieve. - Valid only if 'filter' is specified. Defaults to 10. - :paramtype max_results: Optional[int] - :keyword order_by: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. Examples: 'sum asc', 'maximum desc'. - :paramtype order_by: Optional[str] - :keyword filter: The **filter** is used to reduce the set of metric data returned. Default value is None. - - Example: Metric contains metadata A, B and C. - - - Return all time series of C where A = a1 and B = b1 or b2: - - **filter="A eq 'a1' and B eq 'b1' or B eq 'b2' and C eq '*'"** - - - Invalid variant: - - **filter="A eq 'a1' and B eq 'b1' and C eq '*' or B = 'b2'"**. This is invalid because the - logical 'or' operator cannot separate two different metadata names. - - - Return all time series where A = a1, B = b1 and C = c1: - - **filter="A eq 'a1' and B eq 'b1' and C eq 'c1'"** - - - Return all time series where A = a1: - - **filter="A eq 'a1' and B eq '*' and C eq '*'"** - - - Special case: When dimension name or dimension value uses round brackets. Example: When dimension name - is **dim (test) 1**, instead of using **filter="dim (test) 1 eq '*'"** use - **filter="dim %2528test%2529 1 eq '*'"**. - - When dimension name is **dim (test) 3** and dimension value is - **dim3 (test) val**, instead of using **filter="dim (test) 3 eq 'dim3 (test) val'"**, use **filter="dim - %2528test%2529 3 eq 'dim3 %2528test%2529 val'"**. - :paramtype filter: str - :keyword roll_up_by: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'City' to see the results for Seattle - and Tacoma rolled up into one timeseries. - :paramtype roll_up_by: str - :return: A list of MetricsQueryResult objects. - :rtype: list[~azure.monitor.query.MetricsQueryResult] - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_metrics_query_multiple.py - :start-after: [START send_metrics_batch_query] - :end-before: [END send_metrics_batch_query] - :language: python - :dedent: 0 - :caption: Get a response for a batch metrics query. - """ - if not resource_ids: - raise ValueError("'resource_ids' must be provided and must not be empty.") - - # Metric names with commas need to be encoded. - metric_names = [x.replace(",", "%2") for x in metric_names] - - start_time, end_time = get_timespan_iso8601_endpoints(timespan) - resource_id_json: JSON = {"resourceids": list(resource_ids)} - subscription_id = get_subscription_id_from_resource(resource_ids[0]) - - generated = self._batch_metrics_op.batch( - subscription_id, - resource_id_json, - metricnamespace=metric_namespace, - metricnames=metric_names, - starttime=start_time, - endtime=end_time, - interval=granularity, - aggregation=",".join(aggregations) if aggregations else None, - top=max_results, - orderby=order_by, - filter=filter, - rollupby=roll_up_by, # cspell:ignore rollupby - **kwargs, - ) - - # In rare cases, the generated value is a JSON string instead of a dict. This potentially stems from a bug in - # the service. This check handles that case. - if isinstance(generated, str): - generated = loads(generated) - - return [ - MetricsQueryResult._from_generated(value) # pylint: disable=protected-access - for value in generated["values"] - ] - - def close(self) -> None: - """Close the client session.""" - return self._client.close() - - def __enter__(self) -> "MetricsClient": - self._client.__enter__() - return self - - def __exit__(self, *args: Any) -> None: - self._client.__exit__(*args) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py deleted file mode 100644 index 8cee75251fc6..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_metrics_query_client.py +++ /dev/null @@ -1,247 +0,0 @@ -# -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from datetime import timedelta, datetime -from typing import Any, cast, Optional, Tuple, Union, Sequence - -from azure.core.credentials import TokenCredential -from azure.core.paging import ItemPaged -from azure.core.tracing.decorator import distributed_trace - -from ._generated._serialization import Serializer -from ._generated.metrics._client import MonitorMetricsClient -from ._models import MetricsQueryResult, MetricDefinition, MetricNamespace -from ._helpers import get_authentication_policy, construct_iso8601 -from ._version import SDK_MONIKER - - -class MetricsQueryClient(object): # pylint: disable=client-accepts-api-version-keyword - """MetricsQueryClient should be used to collect numeric data from monitored resources into a - time series database. Metrics are numerical values that are collected at regular intervals and - describe some aspect of a system at a particular time. Metrics are lightweight and capable of - supporting near real-time scenarios, making them particularly useful for alerting and - fast detection of issues. - - :param credential: The credential to authenticate the client. - :type credential: ~azure.core.credentials.TokenCredential - :keyword endpoint: The endpoint to connect to. Defaults to 'https://management.azure.com'. - :paramtype endpoint: Optional[str] - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_metrics_query_client] - :end-before: [END create_metrics_query_client] - :language: python - :dedent: 4 - :caption: Creating the MetricsQueryClient with a TokenCredential. - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_authentication.py - :start-after: [START create_metrics_query_client_sovereign_cloud] - :end-before: [END create_metrics_query_client_sovereign_cloud] - :language: python - :dedent: 4 - :caption: Creating the MetricsQueryClient for use with a sovereign cloud (i.e. non-public cloud). - """ - - def __init__(self, credential: TokenCredential, **kwargs: Any) -> None: - self._endpoint = kwargs.pop("endpoint", "https://management.azure.com") - if not self._endpoint.startswith("https://") and not self._endpoint.startswith("http://"): - self._endpoint = "https://" + self._endpoint - audience = kwargs.pop("audience", self._endpoint) - - authentication_policy = kwargs.pop("authentication_policy", None) or get_authentication_policy( - credential, audience - ) - kwargs.setdefault("sdk_moniker", SDK_MONIKER) - self._client = MonitorMetricsClient( - credential=credential, endpoint=self._endpoint, authentication_policy=authentication_policy, **kwargs - ) - - self._metrics_op = self._client.metrics - self._namespace_op = self._client.metric_namespaces - self._definitions_op = self._client.metric_definitions - - @distributed_trace - def query_resource( - self, - resource_uri: str, - metric_names: Sequence[str], - *, - timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]] = None, - granularity: Optional[timedelta] = None, - aggregations: Optional[Sequence[str]] = None, - max_results: Optional[int] = None, - order_by: Optional[str] = None, - filter: Optional[str] = None, - metric_namespace: Optional[str] = None, - **kwargs: Any - ) -> MetricsQueryResult: - """Lists the metric values for a resource. - - :param resource_uri: The identifier of the resource. - :type resource_uri: str - :param metric_names: The names of the metrics to retrieve. - :type metric_names: list[str] - :keyword timespan: The timespan for which to query the data. This can be a timedelta, - a timedelta and a start datetime, or a start datetime/end datetime. - :paramtype timespan: Optional[Union[~datetime.timedelta, tuple[~datetime.datetime, ~datetime.timedelta], - tuple[~datetime.datetime, ~datetime.datetime]]] - :keyword granularity: The granularity (i.e. timegrain) of the query. - :paramtype granularity: Optional[~datetime.timedelta] - :keyword aggregations: The list of aggregation types to retrieve. Use - `azure.monitor.query.MetricAggregationType` enum to get each aggregation type. - :paramtype aggregations: Optional[list[str]] - :keyword max_results: The maximum number of records to retrieve. - Valid only if 'filter' is specified. - Defaults to 10. - :paramtype max_results: Optional[int] - :keyword order_by: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - Examples: sum asc. - :paramtype order_by: Optional[str] - :keyword filter: The **filter** is used to reduce the set of metric data returned. Default value is None. - - Example: Metric contains metadata A, B and C. - - - Return all time series of C where A = a1 and B = b1 or b2: - - **filter="A eq 'a1' and B eq 'b1' or B eq 'b2' and C eq '*'"** - - - Invalid variant: - - **filter="A eq 'a1' and B eq 'b1' and C eq '*' or B = 'b2'"**. This is invalid because the - logical 'or' operator cannot separate two different metadata names. - - - Return all time series where A = a1, B = b1 and C = c1: - - **filter="A eq 'a1' and B eq 'b1' and C eq 'c1'"** - - - Return all time series where A = a1: - - **filter="A eq 'a1' and B eq '*' and C eq '*'"** - - - Special case: When dimension name or dimension value uses round brackets. Example: When dimension name - is **dim (test) 1**, instead of using **filter="dim (test) 1 eq '*'"** use - **filter="dim %2528test%2529 1 eq '*'"**. - - When dimension name is **dim (test) 3** and dimension value is - **dim3 (test) val**, instead of using **filter="dim (test) 3 eq 'dim3 (test) val'"**, use **filter="dim - %2528test%2529 3 eq 'dim3 %2528test%2529 val'"**. - :paramtype filter: Optional[str] - :keyword metric_namespace: Metric namespace to query metric definitions for. - :paramtype metric_namespace: Optional[str] - :return: A MetricsQueryResult object. - :rtype: ~azure.monitor.query.MetricsQueryResult - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_metrics_query.py - :start-after: [START send_metrics_query] - :end-before: [END send_metrics_query] - :language: python - :dedent: 0 - :caption: Get a response for a single metrics query. - """ - - # Metric names with commas need to be encoded. - metric_names = [x.replace(",", "%2") for x in metric_names] - generated = self._metrics_op.list( - resource_uri, - timespan=construct_iso8601(timespan), - interval=granularity, - metricnames=",".join(metric_names), - aggregation=",".join(aggregations) if aggregations else None, - top=max_results, - orderby=order_by, - filter=filter, - metricnamespace=metric_namespace, - **kwargs - ) - return MetricsQueryResult._from_generated(generated) # pylint: disable=protected-access - - @distributed_trace - def list_metric_namespaces( - self, resource_uri: str, *, start_time: Optional[datetime] = None, **kwargs: Any - ) -> ItemPaged[MetricNamespace]: - """Lists the metric namespaces for the resource. - - :param resource_uri: The identifier of the resource. - :type resource_uri: str - :keyword start_time: The start time from which to query for metric - namespaces. This should be provided as a datetime object. - :paramtype start_time: Optional[~datetime.datetime] - :return: An iterator like instance of either MetricNamespace or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.monitor.query.MetricNamespace] - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_metric_namespaces.py - :start-after: [START send_metric_namespaces_query] - :end-before: [END send_metric_namespaces_query] - :language: python - :dedent: 0 - :caption: Get a response for a single metric namespaces query. - """ - res = self._namespace_op.list( - resource_uri, - start_time=Serializer.serialize_iso(start_time) if start_time else None, - cls=kwargs.pop( - "cls", - lambda objs: [MetricNamespace._from_generated(x) for x in objs], # pylint: disable=protected-access - ), - **kwargs - ) - return cast(ItemPaged[MetricNamespace], res) - - @distributed_trace - def list_metric_definitions( - self, resource_uri: str, *, namespace: Optional[str] = None, **kwargs: Any - ) -> ItemPaged[MetricDefinition]: - """Lists the metric definitions for the resource. - - :param resource_uri: The identifier of the resource. - :type resource_uri: str - :keyword namespace: Metric namespace to query metric definitions for. - :paramtype namespace: Optional[str] - :return: An iterator like instance of either MetricDefinition or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.monitor.query.MetricDefinition] - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/sample_metric_definitions.py - :start-after: [START send_metric_definitions_query] - :end-before: [END send_metric_definitions_query] - :language: python - :dedent: 0 - :caption: Get a response for a single metric definitions query. - """ - res = self._definitions_op.list( - resource_uri, - metricnamespace=namespace, - cls=kwargs.pop( - "cls", - lambda objs: [MetricDefinition._from_generated(x) for x in objs], # pylint: disable=protected-access - ), - **kwargs - ) - return cast(ItemPaged[MetricDefinition], res) - - def close(self) -> None: - """Close the :class:`~azure.monitor.query.MetricsQueryClient` session.""" - return self._client.close() - - def __enter__(self) -> "MetricsQueryClient": - self._client.__enter__() - return self - - def __exit__(self, *args: Any) -> None: - self._client.__exit__(*args) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py index eb9c03e5cd40..8ac497467e01 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_models.py @@ -5,21 +5,15 @@ # license information. # -------------------------------------------------------------------------- # cspell:ignore milli +from collections.abc import Mapping import uuid from datetime import datetime, timedelta -import sys from typing import Any, Optional, List, Union, Tuple, Dict, Iterator, Literal -from ._enums import LogsQueryStatus, MetricAggregationType, MetricClass, MetricNamespaceClassification, MetricUnit +from ._enums import LogsQueryStatus from ._exceptions import LogsQueryError -from ._generated._serialization import Deserializer from ._helpers import construct_iso8601, process_row -if sys.version_info >= (3, 9): - from collections.abc import Mapping -else: - from typing import Mapping - JSON = Mapping[str, Any] # pylint: disable=unsubscriptable-object @@ -110,188 +104,6 @@ def _from_generated(cls, generated) -> "LogsTable": ) -class MetricValue: - """Represents a metric value.""" - - timestamp: datetime - """The timestamp for the metric value.""" - average: Optional[float] = None - """The average value in the time range.""" - minimum: Optional[float] = None - """The least value in the time range.""" - maximum: Optional[float] = None - """The greatest value in the time range.""" - total: Optional[float] = None - """The sum of all of the values in the time range.""" - count: Optional[float] = None - """The number of samples in the time range. Can be used to determine the number of values that - contributed to the average value.""" - - def __init__(self, **kwargs: Any) -> None: - self.timestamp = kwargs["timestamp"] - self.average = kwargs.get("average", None) - self.minimum = kwargs.get("minimum", None) - self.maximum = kwargs.get("maximum", None) - self.total = kwargs.get("total", None) - self.count = kwargs.get("count", None) - - @classmethod - def _from_generated(cls, generated): - if not generated: - return cls() - return cls( - timestamp=Deserializer.deserialize_iso(generated.get("timeStamp")), - average=generated.get("average"), - minimum=generated.get("minimum"), - maximum=generated.get("maximum"), - total=generated.get("total"), - count=generated.get("count"), - ) - - -class TimeSeriesElement: - """A time series result type. The discriminator value is always TimeSeries in this case.""" - - metadata_values: Dict[str, str] - """The metadata values returned if $filter was specified in the call.""" - data: List[MetricValue] - """An array of data points representing the metric values. This is only returned if a result - type of data is specified.""" - - def __init__(self, **kwargs: Any) -> None: - self.metadata_values = kwargs.get("metadata_values", {}) - self.data = kwargs.get("data", []) - - @classmethod - def _from_generated(cls, generated) -> "TimeSeriesElement": - if not generated: - return cls() - return cls( - metadata_values={obj["name"]["value"]: obj.get("value") for obj in generated.get("metadatavalues", [])}, - data=[ - MetricValue._from_generated(val) # pylint: disable=protected-access - for val in generated.get("data", []) - ], - ) - - -class Metric: - """The result data of a single metric name.""" - - id: str - """The metric ID.""" - type: str - """The resource type of the metric resource.""" - name: str - """The name of the metric.""" - unit: str - """The unit of the metric. To access these values, use the MetricUnit enum. - Possible values include "Count", "Bytes", "Seconds", "CountPerSecond", "BytesPerSecond", "Percent", - "MilliSeconds", "ByteSeconds", "Unspecified", "Cores", "MilliCores", "NanoCores", "BitsPerSecond".""" - timeseries: List[TimeSeriesElement] - """The time series returned when a data query is performed.""" - display_description: str - """Detailed description of this metric.""" - - def __init__(self, **kwargs: Any) -> None: - self.id = kwargs["id"] - self.type = kwargs["type"] - self.name = kwargs["name"] - self.unit = kwargs["unit"] - self.timeseries = kwargs["timeseries"] - self.display_description = kwargs["display_description"] - - @classmethod - def _from_generated(cls, generated) -> "Metric": - if not generated: - return cls() - return cls( - id=generated.get("id"), - type=generated.get("type"), - name=generated.get("name", {}).get("value"), - unit=generated.get("unit"), - timeseries=[ - TimeSeriesElement._from_generated(t) # pylint: disable=protected-access - for t in generated.get("timeseries", []) - ], - display_description=generated.get("displayDescription"), - ) - - -class MetricsQueryResult: - """The response to a metrics query.""" - - timespan: str - """Required. The timespan for which the data was retrieved. Its value consists of two datetimes - concatenated, separated by '/'. This may be adjusted in the future and returned back from what - was originally requested.""" - metrics: List[Metric] - """Required. The value of the collection.""" - granularity: Optional[timedelta] = None - """The granularity (window size) for which the metric data was returned in. This may be adjusted - in the future and returned back from what was originally requested. This is not present if a - metadata request was made.""" - namespace: Optional[str] = None - """The namespace of the metrics that has been queried.""" - resource_region: Optional[str] = None - """The region of the resource that has been queried for metrics.""" - cost: Optional[int] = None - """The integer value representing the cost of the query, for data case.""" - - def __init__(self, **kwargs: Any) -> None: - self.timespan = kwargs["timespan"] - self.metrics = kwargs["metrics"] - self.granularity = kwargs.get("granularity", None) - self.namespace = kwargs.get("namespace", None) - self.resource_region = kwargs.get("resource_region", None) - self.cost = kwargs.get("cost", None) - - @classmethod - def _from_generated(cls, generated) -> "MetricsQueryResult": - if not generated: - return cls() - granularity = None - if generated.get("interval"): - granularity = Deserializer.deserialize_duration(generated.get("interval")) - if not generated.get("timespan"): - generated["timespan"] = f"{generated.get('starttime')}/{generated.get('endtime')}" - return cls( - cost=generated.get("cost"), - timespan=generated.get("timespan"), - granularity=granularity, - namespace=generated.get("namespace"), - resource_region=generated.get("resourceregion"), - metrics=MetricsList( - metrics=[ - Metric._from_generated(m) for m in generated.get("value", []) # pylint: disable=protected-access - ] - ), - ) - - -class MetricsList(list): - """Custom list for metrics.""" - - def __init__(self, **kwargs: Any) -> None: - self._metrics = kwargs["metrics"] - self._metric_names = {val.name: ind for ind, val in enumerate(self._metrics)} - - def __iter__(self): - return iter(self._metrics) - - def __len__(self): - return len(self._metrics) - - def __repr__(self): - return repr(self._metrics) - - def __getitem__(self, metric): - try: - return self._metrics[metric] - except TypeError: # TypeError: list indices must be integers or slices, not str - return self._metrics[self._metric_names[metric]] - - class LogsBatchQuery: """A single request in a batch. The batch query API accepts a list of these objects. @@ -406,139 +218,6 @@ def _from_generated(cls, generated) -> "LogsQueryResult": ) -class MetricNamespace: - """Metric namespace class specifies the metadata for a metric namespace.""" - - id: Optional[str] = None - """The ID of the metricNamespace.""" - type: Optional[str] = None - """The type of the namespace.""" - name: Optional[str] = None - """The name of the namespace.""" - fully_qualified_namespace: Optional[str] = None - """The fully qualified namespace name.""" - namespace_classification: Optional[Union[str, MetricNamespaceClassification]] = None - """Kind of namespace. Possible values include "Platform", "Custom", "Qos".""" - - def __init__(self, **kwargs: Any) -> None: - self.id = kwargs.get("id", None) - self.type = kwargs.get("type", None) - self.name = kwargs.get("name", None) - self.fully_qualified_namespace = kwargs.get("fully_qualified_namespace", None) - self.namespace_classification = kwargs.get("namespace_classification", None) - - @classmethod - def _from_generated(cls, generated) -> "MetricNamespace": - if not generated: - return cls() - fully_qualified_namespace = None - if generated.get("properties"): - fully_qualified_namespace = generated["properties"].get("metricNamespaceName") - return cls( - id=generated.get("id"), - type=generated.get("type"), - name=generated.get("name"), - fully_qualified_namespace=fully_qualified_namespace, - namespace_classification=generated.get("classification"), - ) - - -class MetricAvailability: - """Metric availability specifies the time grain (aggregation interval or frequency) - and the retention period for that time grain. - """ - - granularity: Optional[timedelta] = None - """The time grain specifies the aggregation interval for the metric.""" - retention: Optional[timedelta] = None - """The retention period for the metric at the specified timegrain.""" - - def __init__(self, **kwargs: Any) -> None: - self.granularity = kwargs.get("granularity", None) - self.retention = kwargs.get("retention", None) - - @classmethod - def _from_generated(cls, generated) -> "MetricAvailability": - if not generated: - return cls() - granularity, retention = None, None - if generated.get("timeGrain"): - granularity = Deserializer.deserialize_duration(generated["timeGrain"]) - if generated.get("retention"): - retention = Deserializer.deserialize_duration(generated["retention"]) - return cls(granularity=granularity, retention=retention) - - -class MetricDefinition: # pylint: disable=too-many-instance-attributes - """Metric definition class specifies the metadata for a metric.""" - - dimension_required: Optional[bool] = None - """Flag to indicate whether the dimension is required.""" - resource_id: Optional[str] = None - """The resource identifier of the resource that emitted the metric.""" - namespace: Optional[str] = None - """The namespace the metric belongs to.""" - name: Optional[str] = None - """The name and the display name of the metric, i.e. it is a localizable string.""" - unit: Optional[Union[str, MetricUnit]] = None - """The unit of the metric. Possible values include "Count", "Bytes", "Seconds", "CountPerSecond", - "BytesPerSecond", "Percent", "MilliSeconds", "ByteSeconds", "Unspecified", "Cores", "MilliCores", - "NanoCores", "BitsPerSecond".""" - primary_aggregation_type: Optional[Union[str, MetricAggregationType]] = None - """The primary aggregation type value defining how to use the values for display. Possible values - include "None", "Average", "Count", "Minimum", "Maximum", "Total".""" - metric_class: Optional[Union[str, MetricClass]] = None - """The class of the metric. Possible values include "Availability", "Transactions", "Errors", - "Latency", "Saturation".""" - supported_aggregation_types: Optional[List[Union[str, MetricAggregationType]]] = None - """The collection of what aggregation types are supported.""" - metric_availabilities: Optional[List[MetricAvailability]] = None - """The collection of what aggregation intervals are available to be queried.""" - id: Optional[str] = None - """The resource identifier of the metric definition.""" - dimensions: Optional[List[str]] = None - """The name and the display name of the dimension, i.e. it is a localizable string.""" - - def __init__(self, **kwargs: Any) -> None: - self.dimension_required = kwargs.get("dimension_required", None) - self.resource_id = kwargs.get("resource_id", None) - self.namespace = kwargs.get("namespace", None) - self.name = kwargs.get("name", None) - self.unit = kwargs.get("unit", None) - self.primary_aggregation_type = kwargs.get("primary_aggregation_type", None) - self.supported_aggregation_types = kwargs.get("supported_aggregation_types", None) - self.metric_availabilities = kwargs.get("metric_availabilities", None) - self.id = kwargs.get("id", None) - self.dimensions = kwargs.get("dimensions", None) - self.metric_class = kwargs.get("metric_class", None) - - @classmethod - def _from_generated(cls, generated) -> "MetricDefinition": - if not generated: - return cls() - dimensions, metric_class = None, None - if generated.get("dimensions"): - dimensions = [d["value"] for d in generated["dimensions"]] - if generated.get("metricClass"): - metric_class = MetricClass(generated["metricClass"]) - return cls( - dimension_required=generated.get("isDimensionRequired"), - resource_id=generated.get("resourceId"), - namespace=generated.get("namespace"), - name=generated.get("name", {}).get("value"), - unit=generated.get("unit"), - primary_aggregation_type=generated.get("primaryAggregationType"), - supported_aggregation_types=generated.get("supportedAggregationTypes"), - metric_class=metric_class, - metric_availabilities=[ - MetricAvailability._from_generated(val) # pylint: disable=protected-access - for val in generated.get("metricAvailabilities", []) - ], - id=generated.get("id"), - dimensions=dimensions, - ) - - class LogsQueryPartialResult: """The LogsQueryPartialResult type is returned when the response of a query is a partial success (or partial failure). diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py similarity index 82% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py index 3a8ca7f7491d..933fcd7d1b55 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/operations/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,14 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import MetricsBatchOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "MetricsBatchOperations", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py new file mode 100644 index 000000000000..c585b83bf549 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_operations.py @@ -0,0 +1,457 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from .. import models as _models +from .._configuration import MonitorQueryLogsClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Serializer +from .._utils.utils import ClientMixinABC + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_monitor_query_logs_execute_request( + workspace_id: str, *, prefer: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/workspaces/{workspaceId}/query" + path_format_arguments = { + "workspaceId": _SERIALIZER.url("workspace_id", workspace_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + if prefer is not None: + _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_monitor_query_logs_execute_with_resource_id_request( # pylint: disable=name-too-long + resource_id: str, *, prefer: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/{resourceId}/query" + path_format_arguments = { + "resourceId": _SERIALIZER.url("resource_id", resource_id, "str", skip_quote=True), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct headers + if prefer is not None: + _headers["Prefer"] = _SERIALIZER.header("prefer", prefer, "str") + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +def build_monitor_query_logs_batch_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/$batch" + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, headers=_headers, **kwargs) + + +class _MonitorQueryLogsClientOperationsMixin( + ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], MonitorQueryLogsClientConfiguration] +): + + @overload + def _execute( + self, + workspace_id: str, + body: _models._models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + def _execute( + self, + workspace_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + def _execute( + self, + workspace_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + + @distributed_trace + def _execute( + self, + workspace_id: str, + body: Union[_models._models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models._models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the + `Analytics query syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.query.models._models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.query.models._models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models._models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_request( + workspace_id=workspace_id, + prefer=prefer, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _models._models.QueryResults, response.json() # pylint: disable=protected-access + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def _execute_with_resource_id( + self, + resource_id: str, + body: _models._models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + def _execute_with_resource_id( + self, + resource_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + def _execute_with_resource_id( + self, + resource_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + + @distributed_trace + def _execute_with_resource_id( + self, + resource_id: str, + body: Union[_models._models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models._models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the + `Analytics query syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.query.models._models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.query.models._models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models._models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_with_resource_id_request( + resource_id=resource_id, + prefer=prefer, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _models._models.QueryResults, response.json() # pylint: disable=protected-access + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def _batch( + self, body: _models._models.BatchRequest, *, content_type: str = "application/json", **kwargs: Any + ) -> _models._models.BatchResponse: ... + @overload + def _batch( + self, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models._models.BatchResponse: ... + @overload + def _batch( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models._models.BatchResponse: ... + + @distributed_trace + def _batch( + self, body: Union[_models._models.BatchRequest, JSON, IO[bytes]], **kwargs: Any + ) -> _models._models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Is one of the following types: BatchRequest, JSON, + IO[bytes] Required. + :type body: ~azure.monitor.query.models._models.BatchRequest or JSON or IO[bytes] + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.query.models._models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models._models.BatchResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_batch_request( + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _models._models.BatchResponse, response.json() # pylint: disable=protected-access + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_patch.py similarity index 61% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_patch.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/aio/operations/_patch.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_operations/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_patch.py similarity index 82% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/_patch.py index 80eac3ff80b6..82d8fd346164 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_logs_query_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_patch.py @@ -1,33 +1,50 @@ -# -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" from datetime import timedelta, datetime -from typing import Any, Union, Sequence, Dict, List, cast, Tuple, Optional, MutableMapping +from typing import Any, Union, Sequence, Dict, List, cast, Tuple, Optional, MutableMapping, TYPE_CHECKING from urllib.parse import urlparse -from azure.core.credentials import TokenCredential from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator import distributed_trace -from ._generated._client import MonitorQueryClient +from ._client import MonitorQueryLogsClient as GeneratedClient +from ._sdk_moniker import SDK_MONIKER +from ._models import LogsBatchQuery, LogsQueryResult, LogsQueryPartialResult, LogsTable, LogsTableRow +from ._enums import LogsQueryStatus +from ._exceptions import LogsQueryError from ._helpers import ( - get_authentication_policy, construct_iso8601, order_results, process_error, process_prefer, ) -from ._models import LogsBatchQuery, LogsQueryResult, LogsQueryPartialResult -from ._exceptions import LogsQueryError -from ._version import SDK_MONIKER + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + +__all__: List[str] = [ + "LogsQueryClient", + "LogsBatchQuery", + "LogsQueryResult", + "LogsQueryPartialResult", + "LogsQueryError", + "LogsQueryStatus", + "LogsTable", + "LogsTableRow", +] + JSON = MutableMapping[str, Any] -class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-keyword +class LogsQueryClient(GeneratedClient): """LogsQueryClient. Use this client to collect and organize log and performance data from monitored resources. Data from different sources such as platform logs from Azure services, log and performance data from virtual machines agents, and usage and performance data from @@ -38,8 +55,13 @@ class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-key :param credential: The credential to authenticate the client. :type credential: ~azure.core.credentials.TokenCredential - :keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io/v1'. - :paramtype endpoint: Optional[str] + :keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io'. + :paramtype endpoint: str + :keyword audience: The audience to use when requesting tokens for Microsoft Entra ID. Defaults to the endpoint. + :paramtype audience: str + :keyword api_version: The service API version. Default value is "v1". Note that overriding this default value may + result in unsupported behavior. + :paramtype api_version: str .. admonition:: Example: @@ -60,22 +82,35 @@ class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-key :caption: Creating the LogsQueryClient for use with a sovereign cloud (i.e. non-public cloud). """ - def __init__(self, credential: TokenCredential, **kwargs: Any) -> None: - endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io/v1") + def __init__(self, credential: "TokenCredential", **kwargs: Any) -> None: + endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io") + api_version = kwargs.pop("api_version", None) + if not endpoint.startswith("https://") and not endpoint.startswith("http://"): endpoint = "https://" + endpoint - parsed_endpoint = urlparse(endpoint) - audience = kwargs.pop("audience", f"{parsed_endpoint.scheme}://{parsed_endpoint.netloc}") + + # Handle backward compatibility: if endpoint includes api version path, extract it + if endpoint.endswith("/v1"): + if api_version is None: + api_version = "v1" + parsed_endpoint = urlparse(endpoint) + endpoint = f"{parsed_endpoint.scheme}://{parsed_endpoint.netloc}" + elif api_version is None: + api_version = "v1" # Default api_version + + audience = kwargs.pop("audience", endpoint) + scope = audience.rstrip("/") + "/.default" + credential_scopes = kwargs.pop("credential_scopes", [scope]) self._endpoint = endpoint - auth_policy = kwargs.pop("authentication_policy", None) + kwargs.setdefault("sdk_moniker", SDK_MONIKER) - self._client = MonitorQueryClient( + super().__init__( credential=credential, - authentication_policy=auth_policy or get_authentication_policy(credential, audience), - endpoint=self._endpoint, + endpoint=endpoint, + api_version=api_version, + credential_scopes=credential_scopes, **kwargs, ) - self._query_op = self._client.query @distributed_trace def query_workspace( @@ -134,7 +169,7 @@ def query_workspace( generated_response: JSON = {} try: - generated_response = self._query_op.execute(workspace_id=workspace_id, body=body, prefer=prefer, **kwargs) + generated_response = self._execute(workspace_id=workspace_id, body=body, prefer=prefer, **kwargs) except HttpResponseError as err: process_error(err, LogsQueryError) @@ -182,7 +217,7 @@ def query_batch( queries = [cast(LogsBatchQuery, q)._to_generated() for q in queries] # pylint: disable=protected-access request_order = [req["id"] for req in queries] batch = {"requests": queries} - generated = self._query_op.batch(batch, **kwargs) + generated = self._batch(batch, **kwargs) mapping = {item["id"]: item for item in generated["responses"]} return order_results( request_order, @@ -250,7 +285,7 @@ def query_resource( generated_response: JSON = {} try: - generated_response = self._query_op.resource_execute( + generated_response = self._execute_with_resource_id( resource_id=resource_id, body=body, prefer=prefer, **kwargs ) except HttpResponseError as err: @@ -265,13 +300,11 @@ def query_resource( ) return response - def close(self) -> None: - """Close the :class:`~azure.monitor.query.LogsQueryClient` session.""" - return self._client.close() - def __enter__(self) -> "LogsQueryClient": - self._client.__enter__() - return self +def patch_sdk(): + """Do not remove from this file. - def __exit__(self, *args: Any) -> None: - self._client.__exit__(*args) + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_sdk_moniker.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_sdk_moniker.py new file mode 100644 index 000000000000..95d10b4c7f63 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_sdk_moniker.py @@ -0,0 +1,7 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +from ._version import VERSION + +SDK_MONIKER = f"monitor-query/{VERSION}" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py new file mode 100644 index 000000000000..49d5c7259389 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/model_base.py @@ -0,0 +1,1232 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py similarity index 93% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py index e2ad51869908..eb86ea23c965 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/_serialization.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/serialization.py @@ -1,28 +1,10 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pyright: reportUnnecessaryTypeIgnoreComment=false @@ -48,9 +30,7 @@ IO, Mapping, Callable, - TypeVar, MutableMapping, - Type, List, ) @@ -61,13 +41,13 @@ import xml.etree.ElementTree as ET import isodate # type: ignore +from typing_extensions import Self from azure.core.exceptions import DeserializationError, SerializationError from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -ModelType = TypeVar("ModelType", bound="Model") JSON = MutableMapping[str, Any] @@ -185,73 +165,7 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], except NameError: _long_type = int - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0. - - :param datetime.datetime dt: The datetime - :returns: The offset - :rtype: datetime.timedelta - """ - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation. - - :param datetime.datetime dt: The datetime - :returns: The timestamp representation - :rtype: str - """ - return "Z" - - def dst(self, dt): - """No daylight saving for UTC. - - :param datetime.datetime dt: The datetime - :returns: The daylight saving time - :rtype: datetime.timedelta - """ - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset) -> None: - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore +TZ_UTC = datetime.timezone.utc _FLATTEN = re.compile(r"(? ModelType: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( - cls: Type[ModelType], + cls, data: Any, key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, - ) -> ModelType: + ) -> Self: """Parse a dict using given key extractor return a model. By default consider key @@ -479,8 +393,8 @@ def from_dict( :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -563,7 +477,7 @@ def _decode_attribute_map_key(key): return key.replace("\\.", ".") -class Serializer(object): # pylint: disable=too-many-public-methods +class Serializer: # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -626,7 +540,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict - :raises: SerializationError if serialization fails. + :raises SerializationError: if serialization fails. :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) @@ -736,8 +650,8 @@ def body(self, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized request body """ @@ -781,8 +695,8 @@ def url(self, name, data, data_type, **kwargs): :param str data_type: The type to be serialized from. :rtype: str :returns: The serialized URL path - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None """ try: output = self.serialize_data(data, data_type, **kwargs) @@ -805,8 +719,8 @@ def query(self, name, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized query parameter """ try: @@ -835,8 +749,8 @@ def header(self, name, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized header """ try: @@ -855,9 +769,9 @@ def serialize_data(self, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. :returns: The serialized data. :rtype: str, int, float, bool, dict, list """ @@ -1192,7 +1106,7 @@ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: str - :raises: TypeError if format invalid. + :raises TypeError: if format invalid. :return: serialized rfc """ try: @@ -1218,7 +1132,7 @@ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: str - :raises: SerializationError if format invalid. + :raises SerializationError: if format invalid. :return: serialized iso """ if isinstance(attr, str): @@ -1251,7 +1165,7 @@ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: int - :raises: SerializationError if format invalid + :raises SerializationError: if format invalid :return: serialied unix """ if isinstance(attr, int): @@ -1429,7 +1343,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name ) ) @@ -1441,7 +1355,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument return children[0] -class Deserializer(object): +class Deserializer: """Response object model deserializer. :param dict classes: Class type dictionary for deserializing complex types. @@ -1488,7 +1402,7 @@ def __call__(self, target_obj, response_data, content_type=None): :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1502,7 +1416,7 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1717,7 +1631,7 @@ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return- :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1799,7 +1713,7 @@ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return :param dict attr: Dictionary to be deserialized. :return: Deserialized object. :rtype: dict - :raises: TypeError if non-builtin datatype encountered. + :raises TypeError: if non-builtin datatype encountered. """ if attr is None: return None @@ -1845,7 +1759,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. + :raises TypeError: if string format is not valid. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1936,7 +1850,7 @@ def deserialize_bytearray(attr): :param str attr: response string to be deserialized. :return: Deserialized bytearray :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1949,7 +1863,7 @@ def deserialize_base64(attr): :param str attr: response string to be deserialized. :return: Deserialized base64 string :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1964,7 +1878,7 @@ def deserialize_decimal(attr): :param str attr: response string to be deserialized. :return: Deserialized decimal - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. :rtype: decimal """ if isinstance(attr, ET.Element): @@ -1982,7 +1896,7 @@ def deserialize_long(attr): :param str attr: response string to be deserialized. :return: Deserialized int :rtype: long or int - :raises: ValueError if string format invalid. + :raises ValueError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1995,7 +1909,7 @@ def deserialize_duration(attr): :param str attr: response string to be deserialized. :return: Deserialized duration :rtype: TimeDelta - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2013,7 +1927,7 @@ def deserialize_date(attr): :param str attr: response string to be deserialized. :return: Deserialized date :rtype: Date - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2029,7 +1943,7 @@ def deserialize_time(attr): :param str attr: response string to be deserialized. :return: Deserialized time :rtype: datetime.time - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2044,14 +1958,14 @@ def deserialize_rfc(attr): :param str attr: response string to be deserialized. :return: Deserialized RFC datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) @@ -2067,7 +1981,7 @@ def deserialize_iso(attr): :param str attr: response string to be deserialized. :return: Deserialized ISO datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2105,7 +2019,7 @@ def deserialize_unix(attr): :param int attr: Object to be serialized. :return: Deserialized datetime :rtype: Datetime - :raises: DeserializationError if format invalid + :raises DeserializationError: if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/utils.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/utils.py new file mode 100644 index 000000000000..35c9c836f85f --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_utils/utils.py @@ -0,0 +1,25 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Generic, TYPE_CHECKING, TypeVar + +if TYPE_CHECKING: + from .serialization import Deserializer, Serializer + + +TClient = TypeVar("TClient") +TConfig = TypeVar("TConfig") + + +class ClientMixinABC(ABC, Generic[TClient, TConfig]): + """DO NOT use this class. It is for internal typing use only.""" + + _client: TClient + _config: TConfig + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py index db64f53613bf..8f2350dd3b0c 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/_version.py @@ -1,9 +1,9 @@ # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.4.2" -SDK_MONIKER = f"monitor-query/{VERSION}" +VERSION = "2.0.0" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/__init__.py index 0f035c83ddd7..d984b517c76e 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/__init__.py @@ -2,10 +2,28 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._logs_query_client_async import LogsQueryClient -from ._metrics_query_client_async import MetricsQueryClient -from ._metrics_client_async import MetricsClient +from typing import TYPE_CHECKING -__all__ = ["LogsQueryClient", "MetricsQueryClient", "MetricsClient"] +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import MonitorQueryLogsClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "MonitorQueryLogsClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py similarity index 69% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py index 90b421c008ce..e3975d45c2fc 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_client.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_client.py @@ -2,42 +2,45 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable +from typing import Any, Awaitable, TYPE_CHECKING from typing_extensions import Self from azure.core import AsyncPipelineClient from azure.core.pipeline import policies from azure.core.rest import AsyncHttpResponse, HttpRequest -from .._serialization import Deserializer, Serializer -from ._configuration import MonitorBatchMetricsClientConfiguration -from .operations import MetricsBatchOperations +from .._utils.serialization import Deserializer, Serializer +from ._configuration import MonitorQueryLogsClientConfiguration +from ._operations._operations import _MonitorQueryLogsClientOperationsMixin +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential -class MonitorBatchMetricsClient: - """Azure Monitor Batch Metrics Python Client. - :ivar metrics_batch: MetricsBatchOperations operations - :vartype metrics_batch: monitor_batch_metrics_client.aio.operations.MetricsBatchOperations - :param endpoint: The regional endpoint to use, for example - https://eastus.metrics.monitor.azure.com. The region should match the region of the requested - resources. For global resources, the region should be 'global'. Required. - :type endpoint: str - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str +class MonitorQueryLogsClient(_MonitorQueryLogsClientOperationsMixin): + """MonitorQueryLogsClient. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword endpoint: The Log Analytics service endpoint. Default value is + "https://api.loganalytics.io". + :paramtype endpoint: str + :keyword api_version: The service API version. Known values are "v1" and None. Default value is + "v1". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str or ~azure.monitor.query.models.Versions """ - def __init__( # pylint: disable=missing-client-constructor-parameter-credential - self, endpoint: str, **kwargs: Any + def __init__( + self, credential: "AsyncTokenCredential", *, endpoint: str = "https://api.loganalytics.io", **kwargs: Any ) -> None: - _endpoint = "{endpoint}" - self._config = MonitorBatchMetricsClientConfiguration(endpoint=endpoint, **kwargs) + _endpoint = "{endpoint}/{apiVersion}" + self._config = MonitorQueryLogsClientConfiguration(credential=credential, endpoint=endpoint, **kwargs) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -60,7 +63,6 @@ def __init__( # pylint: disable=missing-client-constructor-parameter-credential self._serialize = Serializer() self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.metrics_batch = MetricsBatchOperations(self._client, self._config, self._serialize, self._deserialize) def send_request( self, request: HttpRequest, *, stream: bool = False, **kwargs: Any @@ -85,6 +87,7 @@ def send_request( request_copy = deepcopy(request) path_format_arguments = { "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py similarity index 53% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py index edd48bcec8b1..d9eadf4816e5 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/aio/_configuration.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_configuration.py @@ -2,40 +2,48 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any +from typing import Any, TYPE_CHECKING from azure.core.pipeline import policies -VERSION = "unknown" +from .._version import VERSION +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential -class MonitorBatchMetricsClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for MonitorBatchMetricsClient. + +class MonitorQueryLogsClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for MonitorQueryLogsClient. Note that all parameters used to create this instance are saved as instance attributes. - :param endpoint: The regional endpoint to use, for example - https://eastus.metrics.monitor.azure.com. The region should match the region of the requested - resources. For global resources, the region should be 'global'. Required. + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param endpoint: The Log Analytics service endpoint. Default value is + "https://api.loganalytics.io". :type endpoint: str - :keyword api_version: Api Version. Default value is "2024-02-01". Note that overriding this - default value may result in unsupported behavior. - :paramtype api_version: str + :keyword api_version: The service API version. Known values are "v1" and None. Default value is + "v1". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str or ~azure.monitor.query.models.Versions """ - def __init__(self, endpoint: str, **kwargs: Any) -> None: - api_version: str = kwargs.pop("api_version", "2024-02-01") + def __init__( + self, credential: "AsyncTokenCredential", endpoint: str = "https://api.loganalytics.io", **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "v1") - if endpoint is None: - raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + self.credential = credential self.endpoint = endpoint self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://api.loganalytics.io/.default"]) kwargs.setdefault("sdk_moniker", "monitor-query/{}".format(VERSION)) self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) @@ -50,3 +58,7 @@ def _configure(self, **kwargs: Any) -> None: self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py deleted file mode 100644 index a32542728d54..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_helpers_async.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from azure.core.credentials_async import AsyncTokenCredential -from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy - - -def get_authentication_policy(credential: AsyncTokenCredential, audience: str) -> AsyncBearerTokenCredentialPolicy: - """Returns the correct authentication policy. - - :param credential: The credential to use for authentication with the service. - :type credential: ~azure.core.credentials.AsyncTokenCredential - :param str audience: The audience for the token. - :returns: The correct authentication policy. - :rtype: ~azure.core.pipeline.policies.AsyncBearerTokenCredentialPolicy - """ - if credential is None: - raise ValueError("Parameter 'credential' must not be None.") - scope = audience.rstrip("/") + "/.default" - if hasattr(credential, "get_token"): - return AsyncBearerTokenCredentialPolicy(credential, scope) - - raise TypeError("Unsupported credential") diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_client_async.py deleted file mode 100644 index f3d5e57d814f..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_client_async.py +++ /dev/null @@ -1,201 +0,0 @@ -# -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from datetime import timedelta, datetime -from json import loads -from typing import Any, List, MutableMapping, Sequence, Optional, Union, Tuple - -from azure.core.credentials_async import AsyncTokenCredential -from azure.core.tracing.decorator_async import distributed_trace_async - -from .._generated.metrics.batch.aio._client import MonitorBatchMetricsClient -from .._models import MetricsQueryResult -from .._enums import MetricAggregationType -from ._helpers_async import get_authentication_policy -from .._helpers import get_timespan_iso8601_endpoints, get_subscription_id_from_resource -from .._version import SDK_MONIKER - -JSON = MutableMapping[str, Any] - - -class MetricsClient: # pylint: disable=client-accepts-api-version-keyword - """MetricsClient should be used for performing metrics queries on multiple monitored resources in the - same region. A credential with authorization at the subscription level is required when using this client. - - :param str endpoint: The regional endpoint to use, for example - https://eastus.metrics.monitor.azure.com. The region should match the region of the requested - resources. For global resources, the region should be 'global'. Required. - :param credential: The credential to authenticate the client. - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword str audience: The audience to use when requesting tokens for Microsoft Entra ID. Defaults to the public - cloud audience (https://metrics.monitor.azure.com). - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_metrics_client_async] - :end-before: [END create_metrics_client_async] - :language: python - :dedent: 4 - :caption: Creating the asynchronous MetricsClient with a TokenCredential. - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_metrics_client_sovereign_cloud_async] - :end-before: [END create_metrics_client_sovereign_cloud_async] - :language: python - :dedent: 4 - :caption: Creating the MetricsClient for use with a sovereign cloud (i.e. non-public cloud). - """ - - def __init__(self, endpoint: str, credential: AsyncTokenCredential, **kwargs: Any) -> None: - self._endpoint = endpoint - if not self._endpoint.startswith("https://") and not self._endpoint.startswith("http://"): - self._endpoint = "https://" + self._endpoint - audience = kwargs.pop("audience", "https://metrics.monitor.azure.com") - - authentication_policy = kwargs.pop("authentication_policy", None) or get_authentication_policy( - credential, audience - ) - kwargs.setdefault("sdk_moniker", SDK_MONIKER) - self._client = MonitorBatchMetricsClient( - credential=credential, endpoint=self._endpoint, authentication_policy=authentication_policy, **kwargs - ) - self._batch_metrics_op = self._client.metrics_batch - - @distributed_trace_async - async def query_resources( - self, - *, - resource_ids: Sequence[str], - metric_namespace: str, - metric_names: Sequence[str], - timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]] = None, - granularity: Optional[timedelta] = None, - aggregations: Optional[Sequence[Union[MetricAggregationType, str]]] = None, - max_results: Optional[int] = None, - order_by: Optional[str] = None, - filter: Optional[str] = None, - roll_up_by: Optional[str] = None, - **kwargs: Any, - ) -> List[MetricsQueryResult]: - """Lists the metric values for multiple resources. - - :keyword resource_ids: A list of resource IDs to query metrics for. Required. - :paramtype resource_ids: list[str] - :keyword metric_namespace: Metric namespace that contains the requested metric names. Required. - :paramtype metric_namespace: str - :keyword metric_names: The names of the metrics to retrieve. Required. - :paramtype metric_names: list[str] - :keyword timespan: The timespan for which to query the data. This can be a timedelta, - a tuple of a start datetime with timedelta, or a tuple with start and end datetimes. - :paramtype timespan: Optional[Union[~datetime.timedelta, tuple[~datetime.datetime, ~datetime.timedelta], - tuple[~datetime.datetime, ~datetime.datetime]]] - :keyword granularity: The granularity (i.e. timegrain) of the query. - :paramtype granularity: Optional[~datetime.timedelta] - :keyword aggregations: The list of aggregation types to retrieve. Use - `azure.monitor.query.MetricAggregationType` enum to get each aggregation type. - :paramtype aggregations: Optional[list[Union[~azure.monitor.query.MetricAggregationType, str]]] - :keyword max_results: The maximum number of records to retrieve. - Valid only if 'filter' is specified. Defaults to 10. - :paramtype max_results: Optional[int] - :keyword order_by: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. Examples: 'sum asc', 'maximum desc'. - :paramtype order_by: Optional[str] - :keyword filter: The **filter** is used to reduce the set of metric data returned. Default value is None. - - Example: Metric contains metadata A, B and C. - - - Return all time series of C where A = a1 and B = b1 or b2: - - **filter="A eq 'a1' and B eq 'b1' or B eq 'b2' and C eq '*'"** - - - Invalid variant: - - **filter="A eq 'a1' and B eq 'b1' and C eq '*' or B = 'b2'"**. This is invalid because the - logical 'or' operator cannot separate two different metadata names. - - - Return all time series where A = a1, B = b1 and C = c1: - - **filter="A eq 'a1' and B eq 'b1' and C eq 'c1'"** - - - Return all time series where A = a1: - - **filter="A eq 'a1' and B eq '*' and C eq '*'"** - - - Special case: When dimension name or dimension value uses round brackets. Example: When dimension name - is **dim (test) 1**, instead of using **filter="dim (test) 1 eq '*'"** use - **filter="dim %2528test%2529 1 eq '*'"**. - - When dimension name is **dim (test) 3** and dimension value is - **dim3 (test) val**, instead of using **filter="dim (test) 3 eq 'dim3 (test) val'"**, use **filter="dim - %2528test%2529 3 eq 'dim3 %2528test%2529 val'"**. - :paramtype filter: str - :keyword roll_up_by: Dimension name(s) to rollup results by. For example if you only want to see - metric values with a filter like 'City eq Seattle or City eq Tacoma' but don't want to see - separate values for each city, you can specify 'City' to see the results for Seattle - and Tacoma rolled up into one timeseries. - :paramtype roll_up_by: str - :return: A list of MetricsQueryResult objects. - :rtype: list[~azure.monitor.query.MetricsQueryResult] - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_metrics_query_multiple_async.py - :start-after: [START send_metrics_batch_query_async] - :end-before: [END send_metrics_batch_query_async] - :language: python - :dedent: 0 - :caption: Get a response for a batch metrics query. - """ - if not resource_ids: - raise ValueError("resource_ids must be provided and must not be empty.") - - # Metric names with commas need to be encoded. - metric_names = [x.replace(",", "%2") for x in metric_names] - - start_time, end_time = get_timespan_iso8601_endpoints(timespan) - resource_id_json: JSON = {"resourceids": list(resource_ids)} - subscription_id = get_subscription_id_from_resource(resource_ids[0]) - - generated = await self._batch_metrics_op.batch( - subscription_id, - resource_id_json, - metricnamespace=metric_namespace, - metricnames=metric_names, - starttime=start_time, - endtime=end_time, - interval=granularity, - aggregation=",".join(aggregations) if aggregations else None, - top=max_results, - orderby=order_by, - filter=filter, - rollupby=roll_up_by, # cspell:ignore rollupby - **kwargs, - ) - - # In rare cases, the generated value is a JSON string instead of a dict. This potentially stems from a bug in - # the service. This check handles that case. - if isinstance(generated, str): - generated = loads(generated) - - return [ - MetricsQueryResult._from_generated(value) # pylint: disable=protected-access - for value in generated["values"] - ] - - async def __aenter__(self) -> "MetricsClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *args: Any) -> None: - await self._client.__aexit__(*args) - - async def close(self) -> None: - """Close the client session.""" - await self._client.__aexit__() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py deleted file mode 100644 index 6a2cd57c73d0..000000000000 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_metrics_query_client_async.py +++ /dev/null @@ -1,248 +0,0 @@ -# -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- -from datetime import timedelta, datetime -from typing import Any, cast, Optional, Tuple, Union, Sequence - -from azure.core.async_paging import AsyncItemPaged -from azure.core.credentials_async import AsyncTokenCredential -from azure.core.tracing.decorator import distributed_trace -from azure.core.tracing.decorator_async import distributed_trace_async - -from .._generated._serialization import Serializer -from .._generated.metrics.aio._client import MonitorMetricsClient - -from .._models import MetricsQueryResult, MetricDefinition, MetricNamespace -from ._helpers_async import get_authentication_policy -from .._helpers import construct_iso8601 -from .._version import SDK_MONIKER - - -class MetricsQueryClient(object): # pylint: disable=client-accepts-api-version-keyword - """MetricsQueryClient - - :param credential: The credential to authenticate the client - :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword endpoint: The endpoint to connect to. Defaults to 'https://management.azure.com'. - :paramtype endpoint: Optional[str] - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_metrics_query_client_async] - :end-before: [END create_metrics_query_client_async] - :language: python - :dedent: 4 - :caption: Creating the asynchronous MetricsQueryClient with a TokenCredential. - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_authentication_async.py - :start-after: [START create_metrics_query_client_sovereign_cloud_async] - :end-before: [END create_metrics_query_client_sovereign_cloud_async] - :language: python - :dedent: 4 - :caption: Creating the MetricsQueryClient for use with a sovereign cloud (i.e. non-public cloud). - """ - - def __init__(self, credential: AsyncTokenCredential, **kwargs: Any) -> None: - self._endpoint = kwargs.pop("endpoint", "https://management.azure.com") - if not self._endpoint.startswith("https://") and not self._endpoint.startswith("http://"): - self._endpoint = "https://" + self._endpoint - audience = kwargs.pop("audience", self._endpoint) - authentication_policy = kwargs.pop("authentication_policy", None) or get_authentication_policy( - credential, audience - ) - kwargs.setdefault("sdk_moniker", SDK_MONIKER) - self._client = MonitorMetricsClient( - credential=credential, endpoint=self._endpoint, authentication_policy=authentication_policy, **kwargs - ) - - self._metrics_op = self._client.metrics - self._namespace_op = self._client.metric_namespaces - self._definitions_op = self._client.metric_definitions - - @distributed_trace_async - async def query_resource( - self, - resource_uri: str, - metric_names: Sequence[str], - *, - timespan: Optional[Union[timedelta, Tuple[datetime, timedelta], Tuple[datetime, datetime]]] = None, - granularity: Optional[timedelta] = None, - aggregations: Optional[Sequence[str]] = None, - max_results: Optional[int] = None, - order_by: Optional[str] = None, - filter: Optional[str] = None, - metric_namespace: Optional[str] = None, - **kwargs: Any - ) -> MetricsQueryResult: - """Lists the metric values for a resource. - - **Note**: Although the start_time, end_time, duration are optional parameters, it is highly - recommended to specify the timespan. If not, the entire dataset is queried. - - :param resource_uri: The identifier of the resource. - :type resource_uri: str - :param metric_names: The names of the metrics to retrieve. - :type metric_names: list - :keyword timespan: The timespan for which to query the data. This can be a timedelta, - a timedelta and a start datetime, or a start datetime/end datetime. - :paramtype timespan: Optional[Union[~datetime.timedelta, tuple[~datetime.datetime, ~datetime.timedelta], - tuple[~datetime.datetime, ~datetime.datetime]]] - :keyword granularity: The interval (i.e. timegrain) of the query. - :paramtype granularity: Optional[~datetime.timedelta] - :keyword aggregations: The list of aggregation types to retrieve. - Use `azure.monitor.query.MetricAggregationType` enum to get each aggregation type. - :paramtype aggregations: Optional[list[str]] - :keyword max_results: The maximum number of records to retrieve. - Valid only if 'filter' is specified. - Defaults to 10. - :paramtype max_results: Optional[int] - :keyword order_by: The aggregation to use for sorting results and the direction of the sort. - Only one order can be specified. - Examples: sum asc. - :paramtype order_by: Optional[str] - :keyword filter: The **filter** is used to reduce the set of metric data returned. Default value is None. - - Example: Metric contains metadata A, B and C. - - - Return all time series of C where A = a1 and B = b1 or b2: - - **filter="A eq 'a1' and B eq 'b1' or B eq 'b2' and C eq '*'"** - - - Invalid variant: - - **filter="A eq 'a1' and B eq 'b1' and C eq '*' or B = 'b2'"**. This is invalid because the - logical 'or' operator cannot separate two different metadata names. - - - Return all time series where A = a1, B = b1 and C = c1: - - **filter="A eq 'a1' and B eq 'b1' and C eq 'c1'"** - - - Return all time series where A = a1: - - **filter="A eq 'a1' and B eq '*' and C eq '*'"** - - - Special case: When dimension name or dimension value uses round brackets. Example: When dimension name - is **dim (test) 1**, instead of using **filter="dim (test) 1 eq '*'"** use - **filter="dim %2528test%2529 1 eq '*'"**. - - When dimension name is **dim (test) 3** and dimension value is - **dim3 (test) val**, instead of using **filter="dim (test) 3 eq 'dim3 (test) val'"**, use **filter="dim - %2528test%2529 3 eq 'dim3 %2528test%2529 val'"**. - :paramtype filter: Optional[str] - :keyword metric_namespace: Metric namespace to query metric definitions for. - :paramtype metric_namespace: Optional[str] - :return: A MetricsQueryResult object. - :rtype: ~azure.monitor.query.MetricsQueryResult - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_metrics_query_async.py - :start-after: [START send_metrics_query_async] - :end-before: [END send_metrics_query_async] - :language: python - :dedent: 0 - :caption: Get a response for a single metrics query. - """ - - # Metric names with commas need to be encoded. - metric_names = [x.replace(",", "%2") for x in metric_names] - generated = await self._metrics_op.list( - resource_uri, - timespan=construct_iso8601(timespan), - interval=granularity, - metricnames=",".join(metric_names), - aggregation=",".join(aggregations) if aggregations else None, - top=max_results, - orderby=order_by, - filter=filter, - metricnamespace=metric_namespace, - **kwargs - ) - return MetricsQueryResult._from_generated(generated) # pylint: disable=protected-access - - @distributed_trace - def list_metric_namespaces( - self, resource_uri: str, *, start_time: Optional[datetime] = None, **kwargs: Any - ) -> AsyncItemPaged[MetricNamespace]: - """Lists the metric namespaces for the resource. - - :param resource_uri: The identifier of the resource. - :type resource_uri: str - :keyword start_time: The start time from which to query for metric - namespaces. This should be provided as a datetime object. - :paramtype start_time: Optional[~datetime.datetime] - :return: An iterator like instance of either MetricNamespace or the result of cls(response) - :rtype: ~azure.core.paging.AsyncItemPaged[~azure.monitor.query.MetricNamespace] - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_metric_namespaces_async.py - :start-after: [START send_metric_namespaces_query_async] - :end-before: [END send_metric_namespaces_query_async] - :language: python - :dedent: 0 - :caption: Get a response for a single metric namespaces query. - """ - res = self._namespace_op.list( - resource_uri, - start_time=Serializer.serialize_iso(start_time) if start_time else None, - cls=kwargs.pop( - "cls", - lambda objs: [MetricNamespace._from_generated(x) for x in objs], # pylint: disable=protected-access - ), - **kwargs - ) - return cast(AsyncItemPaged[MetricNamespace], res) - - @distributed_trace - def list_metric_definitions( - self, resource_uri: str, *, namespace: Optional[str] = None, **kwargs: Any - ) -> AsyncItemPaged[MetricDefinition]: - """Lists the metric definitions for the resource. - - :param resource_uri: The identifier of the resource. - :type resource_uri: str - :keyword namespace: Metric namespace to query metric definitions for. - :paramtype namespace: Optional[str] - :return: An iterator like instance of either MetricDefinition or the result of cls(response) - :rtype: ~azure.core.paging.AsyncItemPaged[~azure.monitor.query.MetricDefinition] - :raises ~azure.core.exceptions.HttpResponseError: - - .. admonition:: Example: - - .. literalinclude:: ../samples/async_samples/sample_metric_definitions_async.py - :start-after: [START send_metric_definitions_query_async] - :end-before: [END send_metric_definitions_query_async] - :language: python - :dedent: 0 - :caption: Get a response for a single metric definitions query. - """ - res = self._definitions_op.list( - resource_uri, - metricnamespace=namespace, - cls=kwargs.pop( - "cls", - lambda objs: [MetricDefinition._from_generated(x) for x in objs], # pylint: disable=protected-access - ), - **kwargs - ) - return cast(AsyncItemPaged[MetricDefinition], res) - - async def __aenter__(self) -> "MetricsQueryClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *args: Any) -> None: - await self._client.__aexit__(*args) - - async def close(self) -> None: - """Close the :class:`~azure.monitor.query.aio.MetricsQueryClient` session.""" - await self._client.__aexit__() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py similarity index 76% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py index e4231c981f36..933fcd7d1b55 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/operations/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,16 +12,11 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import QueryOperations # type: ignore -from ._operations import MetadataOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk -__all__ = [ - "QueryOperations", - "MetadataOperations", -] +__all__ = [] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py new file mode 100644 index 000000000000..420c5456135a --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_operations.py @@ -0,0 +1,390 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload + +from azure.core import AsyncPipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ... import models as _models +from ..._operations._operations import ( + build_monitor_query_logs_batch_request, + build_monitor_query_logs_execute_request, + build_monitor_query_logs_execute_with_resource_id_request, +) +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.utils import ClientMixinABC +from .._configuration import MonitorQueryLogsClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class _MonitorQueryLogsClientOperationsMixin( + ClientMixinABC[AsyncPipelineClient[HttpRequest, AsyncHttpResponse], MonitorQueryLogsClientConfiguration] +): + + @overload + async def _execute( + self, + workspace_id: str, + body: _models._models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + async def _execute( + self, + workspace_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + async def _execute( + self, + workspace_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + + @distributed_trace_async + async def _execute( + self, + workspace_id: str, + body: Union[_models._models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models._models.QueryResults: + """Execute an Analytics query. + + Executes an Analytics query for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param workspace_id: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :type workspace_id: str + :param body: The Analytics query. Learn more about the + `Analytics query syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.query.models._models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.query.models._models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models._models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_request( + workspace_id=workspace_id, + prefer=prefer, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _models._models.QueryResults, response.json() # pylint: disable=protected-access + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def _execute_with_resource_id( + self, + resource_id: str, + body: _models._models.QueryBody, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + async def _execute_with_resource_id( + self, + resource_id: str, + body: JSON, + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + @overload + async def _execute_with_resource_id( + self, + resource_id: str, + body: IO[bytes], + *, + prefer: Optional[str] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> _models._models.QueryResults: ... + + @distributed_trace_async + async def _execute_with_resource_id( + self, + resource_id: str, + body: Union[_models._models.QueryBody, JSON, IO[bytes]], + *, + prefer: Optional[str] = None, + **kwargs: Any + ) -> _models._models.QueryResults: + """Execute an Analytics query using resource ID. + + Executes an Analytics query for data in the context of a resource. + `Here `_ + is an example for using POST with an Analytics query. + + :param resource_id: The identifier of the resource. Required. + :type resource_id: str + :param body: The Analytics query. Learn more about the + `Analytics query syntax + `_. Is + one of the following types: QueryBody, JSON, IO[bytes] Required. + :type body: ~azure.monitor.query.models._models.QueryBody or JSON or IO[bytes] + :keyword prefer: Optional. The prefer header to set server timeout, query statistics and + visualization information. Default value is None. + :paramtype prefer: str + :return: QueryResults. The QueryResults is compatible with MutableMapping + :rtype: ~azure.monitor.query.models._models.QueryResults + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models._models.QueryResults] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_execute_with_resource_id_request( + resource_id=resource_id, + prefer=prefer, + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _models._models.QueryResults, response.json() # pylint: disable=protected-access + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def _batch( + self, body: _models._models.BatchRequest, *, content_type: str = "application/json", **kwargs: Any + ) -> _models._models.BatchResponse: ... + @overload + async def _batch( + self, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models._models.BatchResponse: ... + @overload + async def _batch( + self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models._models.BatchResponse: ... + + @distributed_trace_async + async def _batch( + self, body: Union[_models._models.BatchRequest, JSON, IO[bytes]], **kwargs: Any + ) -> _models._models.BatchResponse: + """Execute a batch of Analytics queries. + + Executes a batch of Analytics queries for data. + `Here `_ + is an example for using POST with an Analytics query. + + :param body: The batch request body. Is one of the following types: BatchRequest, JSON, + IO[bytes] Required. + :type body: ~azure.monitor.query.models._models.BatchRequest or JSON or IO[bytes] + :return: BatchResponse. The BatchResponse is compatible with MutableMapping + :rtype: ~azure.monitor.query.models._models.BatchResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_models._models.BatchResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_monitor_query_logs_batch_request( + content_type=content_type, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + "apiVersion": self._serialize.url("self._config.api_version", self._config.api_version, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_models._models.ErrorResponse, response.json()) + raise HttpResponseError(response=response, model=error) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _models._models.BatchResponse, response.json() # pylint: disable=protected-access + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_patch.py similarity index 61% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_patch.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_patch.py index f7dd32510333..8bcb627aa475 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/_patch.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_operations/_patch.py @@ -1,7 +1,8 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_patch.py similarity index 84% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_patch.py index d997b0fbdb80..053ae7f2dee3 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_logs_query_client_async.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/aio/_patch.py @@ -1,28 +1,35 @@ -# -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" from datetime import datetime, timedelta -from typing import Any, cast, Tuple, Union, Sequence, Dict, List, Optional, MutableMapping +from typing import Any, cast, Tuple, Union, Sequence, Dict, List, Optional, MutableMapping, TYPE_CHECKING from urllib.parse import urlparse -from azure.core.credentials_async import AsyncTokenCredential from azure.core.exceptions import HttpResponseError from azure.core.tracing.decorator_async import distributed_trace_async -from .._generated.aio._client import MonitorQueryClient +from ._client import MonitorQueryLogsClient as GeneratedClient from .._helpers import construct_iso8601, order_results, process_error, process_prefer from .._models import LogsQueryResult, LogsBatchQuery, LogsQueryPartialResult -from ._helpers_async import get_authentication_policy from .._exceptions import LogsQueryError -from .._version import SDK_MONIKER +from .._sdk_moniker import SDK_MONIKER + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential # pylint: disable=unused-import + + +__all__: List[str] = ["LogsQueryClient"] JSON = MutableMapping[str, Any] -class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-keyword +class LogsQueryClient(GeneratedClient): """LogsQueryClient. Use this client to collect and organize log and performance data from monitored resources. Data from different sources such as platform logs from Azure services, log and performance data from virtual machines agents, and usage and performance data from @@ -33,8 +40,13 @@ class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-key :param credential: The credential to authenticate the client :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io/v1'. - :paramtype endpoint: Optional[str] + :keyword endpoint: The endpoint to connect to. Defaults to 'https://api.loganalytics.io'. + :paramtype endpoint: str + :keyword audience: The audience to use when requesting tokens for Microsoft Entra ID. Defaults to the endpoint. + :paramtype audience: str + :keyword api_version: The service API version. Default value is "v1". Note that overriding this default value may + result in unsupported behavior. + :paramtype api_version: str .. admonition:: Example: @@ -55,23 +67,35 @@ class LogsQueryClient(object): # pylint: disable=client-accepts-api-version-key :caption: Creating the LogsQueryClient for use with a sovereign cloud (i.e. non-public cloud). """ - def __init__(self, credential: AsyncTokenCredential, **kwargs: Any) -> None: - endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io/v1") + def __init__(self, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + endpoint = kwargs.pop("endpoint", "https://api.loganalytics.io") + api_version = kwargs.pop("api_version", None) + if not endpoint.startswith("https://") and not endpoint.startswith("http://"): endpoint = "https://" + endpoint - parsed_endpoint = urlparse(endpoint) - # Assume audience is the base URL of the endpoint, unless a value is explicitly passed in. - audience = kwargs.pop("audience", f"{parsed_endpoint.scheme}://{parsed_endpoint.netloc}") + + # Handle backward compatibility: if endpoint includes api version path, extract it + if endpoint.endswith("/v1"): + if api_version is None: + api_version = "v1" + parsed_endpoint = urlparse(endpoint) + endpoint = f"{parsed_endpoint.scheme}://{parsed_endpoint.netloc}" + elif api_version is None: + api_version = "v1" # Default api_version + + audience = kwargs.pop("audience", endpoint) + scope = audience.rstrip("/") + "/.default" + credential_scopes = kwargs.pop("credential_scopes", [scope]) self._endpoint = endpoint - auth_policy = kwargs.pop("authentication_policy", None) + kwargs.setdefault("sdk_moniker", SDK_MONIKER) - self._client = MonitorQueryClient( + super().__init__( credential=credential, - authentication_policy=auth_policy or get_authentication_policy(credential, audience), endpoint=self._endpoint, + api_version=api_version, + credential_scopes=credential_scopes, **kwargs, ) - self._query_op = self._client.query @distributed_trace_async async def query_workspace( @@ -130,9 +154,7 @@ async def query_workspace( generated_response: JSON = {} try: - generated_response = await self._query_op.execute( - workspace_id=workspace_id, body=body, prefer=prefer, **kwargs - ) + generated_response = await self._execute(workspace_id=workspace_id, body=body, prefer=prefer, **kwargs) except HttpResponseError as err: process_error(err, LogsQueryError) response: Union[LogsQueryResult, LogsQueryPartialResult] @@ -179,7 +201,7 @@ async def query_batch( queries = [cast(LogsBatchQuery, q)._to_generated() for q in queries] # pylint: disable=protected-access request_order = [req["id"] for req in queries] batch = {"requests": queries} - generated = await self._query_op.batch(batch, **kwargs) + generated = await self._batch(batch, **kwargs) mapping = {item["id"]: item for item in generated["responses"]} return order_results( request_order, @@ -251,7 +273,7 @@ async def query_resource( generated_response: JSON = {} try: - generated_response = await self._query_op.resource_execute( + generated_response = await self._execute_with_resource_id( resource_id=resource_id, body=body, prefer=prefer, **kwargs ) except HttpResponseError as err: @@ -266,13 +288,11 @@ async def query_resource( ) return response - async def __aenter__(self) -> "LogsQueryClient": - await self._client.__aenter__() - return self - async def __aexit__(self, *args: Any) -> None: - await self._client.__aexit__(*args) +def patch_sdk(): + """Do not remove from this file. - async def close(self) -> None: - """Close the :class:`~azure.monitor.query.aio.LogsQueryClient` session.""" - await self._client.__aexit__() + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py similarity index 83% rename from sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py rename to sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py index 3a8ca7f7491d..76cc9379a831 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/_generated/metrics/batch/operations/__init__.py +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/__init__.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=wrong-import-position @@ -12,14 +12,16 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import MetricsBatchOperations # type: ignore +from ._enums import ( # type: ignore + _ColumnType, +) from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "MetricsBatchOperations", + "_ColumnType", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py new file mode 100644 index 000000000000..d42e92a47163 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_enums.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class _ColumnType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The data type of a column.""" + + BOOL = "bool" + """Boolean data type""" + DATETIME = "datetime" + """DateTime data type""" + DYNAMIC = "dynamic" + """Dynamic data type""" + INT = "int" + """Integer data type""" + LONG = "long" + """Long integer data type""" + REAL = "real" + """Real/floating point data type""" + STRING = "string" + """String data type""" + GUID = "guid" + """GUID data type""" + DECIMAL = "decimal" + """Decimal data type""" + TIMESPAN = "timespan" + """Timespan data type""" + + +class Versions(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Service API versions.""" + + V1 = "v1" + """The V1 API version.""" diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py new file mode 100644 index 000000000000..99472104c0c5 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_models.py @@ -0,0 +1,532 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_field + +if TYPE_CHECKING: + from .. import models as _models + + +class BatchQueryRequest(_Model): + """A single request in a batch. + + :ivar id: Unique ID corresponding to each request in the batch. Required. + :vartype id: str + :ivar headers: Headers of the request. Can use prefer header to set server timeout and to + query statistics and visualization information. + :vartype headers: dict[str, str] + :ivar body: The Analytics query. Learn more about the + `Analytics query syntax + `_. + Required. + :vartype body: ~azure.monitor.query.models._models.QueryBody + :ivar path: The path for the batch query request. Required. Default value is "/query". + :vartype path: str + :ivar method: The method of a single request in a batch. Required. Default value is "POST". + :vartype method: str + :ivar workspace: Primary Workspace ID of the query. This is the Workspace ID from the + Properties + blade in the Azure portal. Required. + :vartype workspace: str + """ + + id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID corresponding to each request in the batch. Required.""" + headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Headers of the request. Can use prefer header to set server timeout and to + query statistics and visualization information.""" + body: "_models._models.QueryBody" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The Analytics query. Learn more about the + `Analytics query syntax + `_. + Required.""" + path: Literal["/query"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The path for the batch query request. Required. Default value is \"/query\".""" + method: Literal["POST"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The method of a single request in a batch. Required. Default value is \"POST\".""" + workspace: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Primary Workspace ID of the query. This is the Workspace ID from the Properties + blade in the Azure portal. Required.""" + + @overload + def __init__( + self, + *, + id: str, # pylint: disable=redefined-builtin + body: "_models._models.QueryBody", + workspace: str, + headers: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.path: Literal["/query"] = "/query" + self.method: Literal["POST"] = "POST" + + +class BatchQueryResponse(_Model): + """Contains the batch query response and the headers, id, and status of the request. + + :ivar id: Unique ID corresponding to each request in the batch. + :vartype id: str + :ivar status: The HTTP status code of the response. + :vartype status: int + :ivar body: Contains the tables, columns & rows resulting from a query. + :vartype body: ~azure.monitor.query.models._models.BatchQueryResults + :ivar headers: Dictionary of . + :vartype headers: dict[str, str] + """ + + id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Unique ID corresponding to each request in the batch.""" + status: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The HTTP status code of the response.""" + body: Optional["_models._models.BatchQueryResults"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Contains the tables, columns & rows resulting from a query.""" + headers: Optional[Dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Dictionary of .""" + + @overload + def __init__( + self, + *, + id: Optional[str] = None, # pylint: disable=redefined-builtin + status: Optional[int] = None, + body: Optional["_models._models.BatchQueryResults"] = None, + headers: Optional[Dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchQueryResults(_Model): + """Contains the tables, columns & rows resulting from a query. + + :ivar tables: The results of the query in tabular format. + :vartype tables: list[~azure.monitor.query.models._models.Table] + :ivar statistics: Statistics represented in JSON format. + :vartype statistics: dict[str, any] + :ivar render: Visualization data in JSON format. + :vartype render: dict[str, any] + :ivar error: The code and message for an error. + :vartype error: ~azure.monitor.query.models._models.ErrorInfo + """ + + tables: Optional[List["_models._models.Table"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The results of the query in tabular format.""" + statistics: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Statistics represented in JSON format.""" + render: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Visualization data in JSON format.""" + error: Optional["_models._models.ErrorInfo"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The code and message for an error.""" + + @overload + def __init__( + self, + *, + tables: Optional[List["_models._models.Table"]] = None, + statistics: Optional[Dict[str, Any]] = None, + render: Optional[Dict[str, Any]] = None, + error: Optional["_models._models.ErrorInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchRequest(_Model): + """An array of requests. + + :ivar requests: An single request in a batch. Required. + :vartype requests: list[~azure.monitor.query.models._models.BatchQueryRequest] + """ + + requests: List["_models._models.BatchQueryRequest"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An single request in a batch. Required.""" + + @overload + def __init__( + self, + *, + requests: List["_models._models.BatchQueryRequest"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class BatchResponse(_Model): + """Response to a batch query. + + :ivar responses: An array of responses corresponding to each individual request in a batch. + :vartype responses: list[~azure.monitor.query.models._models.BatchQueryResponse] + """ + + responses: Optional[List["_models._models.BatchQueryResponse"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """An array of responses corresponding to each individual request in a batch.""" + + @overload + def __init__( + self, + *, + responses: Optional[List["_models._models.BatchQueryResponse"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Column(_Model): + """A column in a table. + + :ivar name: The name of this column. Required. + :vartype name: str + :ivar type: The data type of this column. Required. Known values are: "bool", "datetime", + "dynamic", "int", "long", "real", "string", "guid", "decimal", and "timespan". + :vartype type: str or ~azure.monitor.query.models._ColumnType + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of this column. Required.""" + type: Union[str, "_models._ColumnType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The data type of this column. Required. Known values are: \"bool\", \"datetime\", \"dynamic\", + \"int\", \"long\", \"real\", \"string\", \"guid\", \"decimal\", and \"timespan\".""" + + @overload + def __init__( + self, + *, + name: str, + type: Union[str, "_models._ColumnType"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorDetail(_Model): + """Error details. + + :ivar code: The error's code. Required. + :vartype code: str + :ivar message: A human readable error message. Required. + :vartype message: str + :ivar target: Indicates which property in the request is responsible for the error. + :vartype target: str + :ivar value: Indicates which value in 'target' is responsible for the error. + :vartype value: str + :ivar resources: Indicates resources which were responsible for the error. + :vartype resources: list[str] + :ivar additional_properties: Additional properties that can be provided on the error details + object. + :vartype additional_properties: dict[str, any] + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error's code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human readable error message. Required.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates which property in the request is responsible for the error.""" + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates which value in 'target' is responsible for the error.""" + resources: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Indicates resources which were responsible for the error.""" + additional_properties: Optional[Dict[str, Any]] = rest_field( + name="additionalProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional properties that can be provided on the error details object.""" + + @overload + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + value: Optional[str] = None, + resources: Optional[List[str]] = None, + additional_properties: Optional[Dict[str, Any]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorInfo(_Model): + """The code and message for an error. + + :ivar code: A machine readable error code. Required. + :vartype code: str + :ivar message: A human readable error message. Required. + :vartype message: str + :ivar details: error details. + :vartype details: list[~azure.monitor.query.models._models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: ~azure.monitor.query.models._models.ErrorInfo + :ivar additional_properties: Additional properties that can be provided on the error info + object. + :vartype additional_properties: dict[str, any] + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A machine readable error code. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human readable error message. Required.""" + details: Optional[List["_models._models.ErrorDetail"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """error details.""" + innererror: Optional["_models._models.ErrorInfo"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Inner error details if they exist.""" + additional_properties: Optional[Dict[str, Any]] = rest_field( + name="additionalProperties", visibility=["read", "create", "update", "delete", "query"] + ) + """Additional properties that can be provided on the error info object.""" + + @overload + def __init__( + self, + *, + code: str, + message: str, + details: Optional[List["_models._models.ErrorDetail"]] = None, + innererror: Optional["_models._models.ErrorInfo"] = None, + additional_properties: Optional[Dict[str, Any]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorResponse(_Model): + """Contains details when the response code indicates an error. + + :ivar error: The error details. Required. + :vartype error: ~azure.monitor.query.models._models.ErrorInfo + """ + + error: "_models._models.ErrorInfo" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error details. Required.""" + + @overload + def __init__( + self, + *, + error: "_models._models.ErrorInfo", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QueryBody(_Model): + """The Analytics query. Learn more about the + `Analytics query syntax + `_. + + :ivar query: The query to execute. Required. + :vartype query: str + :ivar timespan: Optional. The timespan over which to query data. This is an ISO8601 time period + value. This timespan is applied in addition to any that are specified in the + query expression. + :vartype timespan: str + :ivar workspaces: A list of workspaces to query in addition to the primary workspace. + :vartype workspaces: list[str] + """ + + query: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The query to execute. Required.""" + timespan: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Optional. The timespan over which to query data. This is an ISO8601 time period + value. This timespan is applied in addition to any that are specified in the + query expression.""" + workspaces: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A list of workspaces to query in addition to the primary workspace.""" + + @overload + def __init__( + self, + *, + query: str, + timespan: Optional[str] = None, + workspaces: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class QueryResults(_Model): + """Contains the tables, columns & rows resulting from a query. + + :ivar tables: The results of the query in tabular format. Required. + :vartype tables: list[~azure.monitor.query.models._models.Table] + :ivar statistics: Statistics represented in JSON format. + :vartype statistics: dict[str, any] + :ivar render: Visualization data in JSON format. + :vartype render: dict[str, any] + :ivar error: The code and message for an error. + :vartype error: ~azure.monitor.query.models._models.ErrorInfo + """ + + tables: List["_models._models.Table"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The results of the query in tabular format. Required.""" + statistics: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Statistics represented in JSON format.""" + render: Optional[Dict[str, Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Visualization data in JSON format.""" + error: Optional["_models._models.ErrorInfo"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The code and message for an error.""" + + @overload + def __init__( + self, + *, + tables: List["_models._models.Table"], + statistics: Optional[Dict[str, Any]] = None, + render: Optional[Dict[str, Any]] = None, + error: Optional["_models._models.ErrorInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Table(_Model): + """Contains the columns and rows for one table in a query response. + + :ivar name: The name of the table. Required. + :vartype name: str + :ivar columns: The list of columns in this table. Required. + :vartype columns: list[~azure.monitor.query.models._models.Column] + :ivar rows: The resulting rows from this query. Required. + :vartype rows: list[list[any]] + """ + + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the table. Required.""" + columns: List["_models._models.Column"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The list of columns in this table. Required.""" + rows: List[List[Any]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The resulting rows from this query. Required.""" + + @overload + def __init__( + self, + *, + name: str, + columns: List["_models._models.Column"], + rows: List[List[Any]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_patch.py b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_patch.py new file mode 100644 index 000000000000..0413c691f7f6 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/models/_patch.py @@ -0,0 +1,23 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +from . import _models # pylint: disable=unused-import + +__all__: List[str] = ["_models"] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/monitor/azure-monitor-query/azure/monitor/query/py.typed b/sdk/monitor/azure-monitor-query/azure/monitor/query/py.typed index e69de29bb2d1..e5aff4f83af8 100644 --- a/sdk/monitor/azure-monitor-query/azure/monitor/query/py.typed +++ b/sdk/monitor/azure-monitor-query/azure/monitor/query/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/monitor/azure-monitor-query/dev_requirements.txt b/sdk/monitor/azure-monitor-query/dev_requirements.txt index f72755a0d6ec..1e2deccd63fa 100644 --- a/sdk/monitor/azure-monitor-query/dev_requirements.txt +++ b/sdk/monitor/azure-monitor-query/dev_requirements.txt @@ -1,5 +1,5 @@ -e ../../../tools/azure-sdk-tools --e ../../core/azure-core --e ../../identity/azure-identity +../../core/azure-core +../../identity/azure-identity azure-mgmt-loganalytics aiohttp>=3.0 diff --git a/sdk/monitor/azure-monitor-query/samples/README.md b/sdk/monitor/azure-monitor-query/samples/README.md index 2063ca37031e..af17bd0e0b71 100644 --- a/sdk/monitor/azure-monitor-query/samples/README.md +++ b/sdk/monitor/azure-monitor-query/samples/README.md @@ -31,20 +31,11 @@ For examples on authenticating with the Azure Monitor service, see [sample_authe - [Split a large query into multiple smaller queries to avoid hitting service limits](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/notebooks/sample_large_query.ipynb) - [Detect anomalies in Azure Monitor log data using machine learning techniques](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/notebooks/sample_machine_learning_sklearn.ipynb) - -### Metrics query samples - -- [Send a query using MetricsQueryClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metrics_query.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_async.py)) -- [Send a query to multiple resources at once using MetricsClient](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metrics_query_multiple.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_multiple_async.py)) -- [Get a list of metric namespaces](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metric_namespaces.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_namespaces_async.py)) -- [Get a list of metric definitions](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/sample_metric_definitions.py) ([async sample](https://github.com/Azure/azure-sdk-for-python/blob/main/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_definitions_async.py)) - ## Prerequisites -- Python 3.8 or later +- Python 3.9 or later - An [Azure subscription][azure_subscription] - To query Logs, you need an [Azure Log Analytics workspace][azure_monitor_create_using_portal]. -- To query Metrics, you need an Azure resource of any kind (Storage Account, Key Vault, Cosmos DB, etc.). ## Setup diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_authentication_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_authentication_async.py index df2c540179e3..bcd7d74eb0b8 100644 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_authentication_async.py +++ b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_authentication_async.py @@ -1,10 +1,10 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ FILE: sample_authentication_async.py DESCRIPTION: - This sample demonstrates how to authenticate to the Azure Monitor service using both - LogQueryClient and MetricsQueryClient. + This sample demonstrates how to authenticate to the Azure Monitor service using LogsQueryClient asynchronously. USAGE: python sample_authentication_async.py @@ -35,59 +35,9 @@ async def create_logs_query_client_sovereign_cloud_async(): # [END create_logs_query_client_sovereign_cloud_async] -async def create_metrics_query_client_async(): - # [START create_metrics_query_client_async] - from azure.identity.aio import DefaultAzureCredential - from azure.monitor.query.aio import MetricsQueryClient - - credential = DefaultAzureCredential() - client = MetricsQueryClient(credential) - # [END create_metrics_query_client_async] - - -async def create_metrics_query_client_sovereign_cloud_async(): - # [START create_metrics_query_client_sovereign_cloud_async] - from azure.identity import AzureAuthorityHosts - from azure.identity.aio import DefaultAzureCredential - from azure.monitor.query.aio import MetricsQueryClient - - credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) - client = MetricsQueryClient(credential, endpoint="https://management.usgovcloudapi.net") - # [END create_metrics_query_client_sovereign_cloud_async] - - -async def create_metrics_client_async(): - # [START create_metrics_client_async] - from azure.identity.aio import DefaultAzureCredential - from azure.monitor.query.aio import MetricsClient - - credential = DefaultAzureCredential() - client = MetricsClient("https://eastus.metrics.monitor.azure.com", credential) - # [END create_metrics_client_async] - - -async def create_metrics_client_sovereign_cloud_async(): - # [START create_metrics_client_sovereign_cloud_async] - from azure.identity import AzureAuthorityHosts - from azure.identity.aio import DefaultAzureCredential - from azure.monitor.query.aio import MetricsClient - - credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) - client = MetricsClient( - "https://usgovvirginia.metrics.monitor.azure.us", - credential, - audience="https://metrics.monitor.azure.us", - ) - # [END create_metrics_client_sovereign_cloud_async] - - async def main(): await create_logs_query_client_async() await create_logs_query_client_sovereign_cloud_async() - await create_metrics_query_client_async() - await create_metrics_query_client_sovereign_cloud_async() - await create_metrics_client_async() - await create_metrics_client_sovereign_cloud_async() if __name__ == "__main__": diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_batch_query_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_batch_query_async.py index 31319dccd8b4..1be3588aed55 100644 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_batch_query_async.py +++ b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_batch_query_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_log_query_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_log_query_async.py index c9892b4dcca3..019eaa85e4e9 100644 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_log_query_async.py +++ b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_log_query_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_logs_query_visualization_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_logs_query_visualization_async.py index e93551f57ec4..e3b24396d697 100644 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_logs_query_visualization_async.py +++ b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_logs_query_visualization_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. # cspell:ignore xtitle, ytitle, ymax diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_definitions_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_definitions_async.py deleted file mode 100644 index 690a9fcbc9c4..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_definitions_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metric_definitions_async.py -DESCRIPTION: - This sample demonstrates listing all the metric definitions of a resource. -USAGE: - python sample_metric_definitions_async.py - Set the environment variables with your own values before running the sample: - 1) METRICS_RESOURCE_URI - The resource URI of the resource for which the metrics are being queried. - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, an Event Grid account resource URI is taken. -""" -import asyncio - -# [START send_metric_definitions_query_async] -import os - -from azure.core.exceptions import HttpResponseError -from azure.identity.aio import DefaultAzureCredential -from azure.monitor.query.aio import MetricsQueryClient - - -async def list_definitions(): - credential = DefaultAzureCredential() - client = MetricsQueryClient(credential) - - metrics_uri = os.environ["METRICS_RESOURCE_URI"] - async with client: - try: - response = client.list_metric_definitions(metrics_uri) - async for item in response: - print(item.name) - if item.metric_availabilities: - for availability in item.metric_availabilities: - print(availability.granularity) - except HttpResponseError as err: - print("something fatal happened") - print(err) - await credential.close() - - -# [END send_metric_definitions_query_async] - -if __name__ == "__main__": - asyncio.run(list_definitions()) diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_namespaces_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_namespaces_async.py deleted file mode 100644 index 0dcd0ae8215b..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metric_namespaces_async.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metric_namespaces_async.py -DESCRIPTION: - This sample demonstrates listing all the metric namespaces of a resource. -USAGE: - python sample_metric_namespaces_async.py - Set the environment variables with your own values before running the sample: - 1) METRICS_RESOURCE_URI - The resource URI of the resource for which the metrics are being queried. - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, a Storage account resource URI is taken. -""" -import asyncio - -# [START send_metric_namespaces_query_async] -import os - -from azure.core.exceptions import HttpResponseError -from azure.identity.aio import DefaultAzureCredential -from azure.monitor.query.aio import MetricsQueryClient - - -async def list_namespaces(): - credential = DefaultAzureCredential() - client = MetricsQueryClient(credential) - - metrics_uri = os.environ["METRICS_RESOURCE_URI"] - async with client: - try: - response = client.list_metric_namespaces(metrics_uri) - async for item in response: - print(item.fully_qualified_namespace) - print(item.type) - except HttpResponseError as err: - print("something fatal happened") - print(err) - await credential.close() - - -# [END send_metric_namespaces_query_async] - -if __name__ == "__main__": - asyncio.run(list_namespaces()) diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_async.py deleted file mode 100644 index 8961f920bde3..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metrics_query_async.py -DESCRIPTION: - This sample demonstrates authenticating the MetricsQueryClient and retrieving the "Ingress" - metric along with the "Average" aggregation type. The query will execute over a timespan - of 2 hours with a granularity of 15 minutes. -USAGE: - python sample_metrics_query_async.py - Set the environment variables with your own values before running the sample: - 1) METRICS_RESOURCE_URI - The resource URI of the resource for which the metrics are being queried. - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, a Storage account resource URI is taken. -""" -import asyncio - -# [START send_metrics_query_async] -from datetime import timedelta -import os - -from azure.core.exceptions import HttpResponseError -from azure.identity.aio import DefaultAzureCredential -from azure.monitor.query.aio import MetricsQueryClient -from azure.monitor.query import MetricAggregationType - - -async def query_metrics(): - credential = DefaultAzureCredential() - client = MetricsQueryClient(credential) - - metrics_uri = os.environ["METRICS_RESOURCE_URI"] - async with client: - try: - response = await client.query_resource( - metrics_uri, - metric_names=["Ingress"], - timespan=timedelta(hours=2), - granularity=timedelta(minutes=15), - aggregations=[MetricAggregationType.AVERAGE], - ) - - for metric in response.metrics: - print(metric.name) - for time_series_element in metric.timeseries: - for metric_value in time_series_element.data: - print(metric_value.timestamp) - except HttpResponseError as err: - print("something fatal happened") - print(err) - await credential.close() - - -# [END send_metrics_query_async] - -if __name__ == "__main__": - asyncio.run(query_metrics()) diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_multiple_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_multiple_async.py deleted file mode 100644 index 57e1f6114a6d..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_metrics_query_multiple_async.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metrics_query_multiple_async.py -DESCRIPTION: - This sample demonstrates authenticating the MetricsClient and retrieving the "Ingress" - metric along with the "Average" aggregation type for multiple resources. - The query will execute over a timespan of 2 hours with a granularity of 5 minutes. -USAGE: - python sample_metrics_query_multiple_async.py - 1) AZURE_METRICS_ENDPOINT - The regional metrics endpoint to use (i.e. https://westus3.metrics.monitor.azure.com) - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, storage account resources are queried for metrics. -""" -import asyncio - -# [START send_metrics_batch_query_async] -from datetime import timedelta -import os - -from azure.core.exceptions import HttpResponseError -from azure.identity.aio import DefaultAzureCredential -from azure.monitor.query import MetricAggregationType -from azure.monitor.query.aio import MetricsClient - - -async def query_metrics_batch(): - endpoint = os.environ["AZURE_METRICS_ENDPOINT"] - - credential = DefaultAzureCredential() - client = MetricsClient(endpoint, credential) - - resource_ids = [ - "/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts/", - "/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts/", - ] - async with client: - try: - response = await client.query_resources( - resource_ids=resource_ids, - metric_namespace="Microsoft.Storage/storageAccounts", - metric_names=["Ingress"], - timespan=timedelta(hours=2), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.AVERAGE], - ) - - for metrics_query_result in response: - for metric in metrics_query_result.metrics: - print(metric.name + " -- " + metric.display_description) - for time_series_element in metric.timeseries: - for metric_value in time_series_element.data: - print("The ingress at {} is {}".format(metric_value.timestamp, metric_value.average)) - except HttpResponseError as err: - print("something fatal happened") - print(err) - await credential.close() - - -# [END send_metrics_batch_query_async] - -if __name__ == "__main__": - asyncio.run(query_metrics_batch()) diff --git a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_resource_logs_query_async.py b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_resource_logs_query_async.py index 6873c54d6ebd..06953f34f943 100644 --- a/sdk/monitor/azure-monitor-query/samples/async_samples/sample_resource_logs_query_async.py +++ b/sdk/monitor/azure-monitor-query/samples/async_samples/sample_resource_logs_query_async.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_authentication.py b/sdk/monitor/azure-monitor-query/samples/sample_authentication.py index aa56af872331..698b5954fbe2 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_authentication.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_authentication.py @@ -1,10 +1,10 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ FILE: sample_authentication.py DESCRIPTION: - This sample demonstrates how to authenticate to the Azure Monitor service using both - LogQueryClient and MetricsQueryClient. + This sample demonstrates how to authenticate to the Azure Monitor service using LogsQueryClient. USAGE: python sample_authentication.py @@ -33,54 +33,6 @@ def create_logs_query_client_sovereign_cloud(): # [END create_logs_query_client_sovereign_cloud] -def create_metrics_query_client(): - # [START create_metrics_query_client] - from azure.identity import DefaultAzureCredential - from azure.monitor.query import MetricsQueryClient - - credential = DefaultAzureCredential() - client = MetricsQueryClient(credential) - # [END create_metrics_query_client] - - -def create_metrics_query_client_sovereign_cloud(): - # [START create_metrics_query_client_sovereign_cloud] - from azure.identity import AzureAuthorityHosts, DefaultAzureCredential - from azure.monitor.query import MetricsQueryClient - - credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) - client = MetricsQueryClient(credential, endpoint="https://management.usgovcloudapi.net") - # [END create_metrics_query_client_sovereign_cloud] - - -def create_metrics_client(): - # [START create_metrics_client] - from azure.identity import DefaultAzureCredential - from azure.monitor.query import MetricsClient - - credential = DefaultAzureCredential() - client = MetricsClient("https://eastus.metrics.monitor.azure.com", credential) - # [END create_metrics_client] - - -def create_metrics_client_sovereign_cloud(): - # [START create_metrics_client_sovereign_cloud] - from azure.identity import AzureAuthorityHosts, DefaultAzureCredential - from azure.monitor.query import MetricsClient - - credential = DefaultAzureCredential(authority=AzureAuthorityHosts.AZURE_GOVERNMENT) - client = MetricsClient( - "https://usgovvirginia.metrics.monitor.azure.us", - credential, - audience="https://metrics.monitor.azure.us", - ) - # [END create_metrics_client_sovereign_cloud] - - if __name__ == "__main__": create_logs_query_client() create_logs_query_client_sovereign_cloud() - create_metrics_query_client() - create_metrics_query_client_sovereign_cloud() - create_metrics_client() - create_metrics_client_sovereign_cloud() diff --git a/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py b/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py index cfe870ae9144..f59347f04280 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_batch_query.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_log_query_multiple_workspaces.py b/sdk/monitor/azure-monitor-query/samples/sample_log_query_multiple_workspaces.py index 9c643e6222be..7460288ae8eb 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_log_query_multiple_workspaces.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_log_query_multiple_workspaces.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_logs_query_key_value_form.py b/sdk/monitor/azure-monitor-query/samples/sample_logs_query_key_value_form.py index 6b77788f5492..d39f9c3fbaf5 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_logs_query_key_value_form.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_logs_query_key_value_form.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_logs_query_visualization.py b/sdk/monitor/azure-monitor-query/samples/sample_logs_query_visualization.py index ff9e82016b6d..cdedf8dcfae1 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_logs_query_visualization.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_logs_query_visualization.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. # cspell:ignore xtitle, ytitle, ymax diff --git a/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query.py b/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query.py index b1979690d7e0..25fdd5e70049 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query_partial_result.py b/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query_partial_result.py index 6381db8f973c..39d48af1ccbf 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query_partial_result.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_logs_single_query_partial_result.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_metric_definitions.py b/sdk/monitor/azure-monitor-query/samples/sample_metric_definitions.py deleted file mode 100644 index ab4a1460857d..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/sample_metric_definitions.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metric_definitions.py -DESCRIPTION: - This sample demonstrates listing all the metric definitions of a resource. -USAGE: - python sample_metric_definitions.py - Set the environment variables with your own values before running the sample: - 1) METRICS_RESOURCE_URI - The resource URI of the resource for which the metrics are being queried. - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, an Event Grid account resource URI is taken. -""" -# [START send_metric_definitions_query] -import os - -from azure.identity import DefaultAzureCredential -from azure.monitor.query import MetricsQueryClient - - -credential = DefaultAzureCredential() -client = MetricsQueryClient(credential) - -metrics_uri = os.environ["METRICS_RESOURCE_URI"] -response = client.list_metric_definitions(metrics_uri, namespace="microsoft.eventgrid/topics") - -for item in response: - print(item.name) - if item.metric_availabilities: - for availability in item.metric_availabilities: - print(availability.granularity) - -# [END send_metric_definitions_query] diff --git a/sdk/monitor/azure-monitor-query/samples/sample_metric_namespaces.py b/sdk/monitor/azure-monitor-query/samples/sample_metric_namespaces.py deleted file mode 100644 index 73fcf0f977e9..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/sample_metric_namespaces.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metric_namespaces.py -DESCRIPTION: - This sample demonstrates listing all the metric namespaces of a resource. -USAGE: - python sample_metric_namespaces.py - Set the environment variables with your own values before running the sample: - 1) METRICS_RESOURCE_URI - The resource URI of the resource for which the metrics are being queried. - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, a Storage account resource URI is taken. -""" -# [START send_metric_namespaces_query] -import os - -from azure.identity import DefaultAzureCredential -from azure.monitor.query import MetricsQueryClient - - -credential = DefaultAzureCredential() -client = MetricsQueryClient(credential) - -metrics_uri = os.environ["METRICS_RESOURCE_URI"] -response = client.list_metric_namespaces(metrics_uri) - -for item in response: - print(item.fully_qualified_namespace) - print(item.type) - -# [END send_metric_namespaces_query] diff --git a/sdk/monitor/azure-monitor-query/samples/sample_metrics_query.py b/sdk/monitor/azure-monitor-query/samples/sample_metrics_query.py deleted file mode 100644 index 73a7aaf2ce0f..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/sample_metrics_query.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metrics_query.py -DESCRIPTION: - This sample demonstrates authenticating the MetricsQueryClient and retrieving the "Ingress" - metric along with the "Average" aggregation type. The query will execute over a timespan - of 2 hours with a granularity of 5 minutes. -USAGE: - python sample_metrics_query.py - Set the environment variables with your own values before running the sample: - 1) METRICS_RESOURCE_URI - The resource uri of the resource for which the metrics are being queried. - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, a Storage account resource URI is taken. -""" - -# [START send_metrics_query] -from datetime import timedelta -import os - -from azure.identity import DefaultAzureCredential -from azure.monitor.query import MetricsQueryClient, MetricAggregationType - - -credential = DefaultAzureCredential() -client = MetricsQueryClient(credential) - -metrics_uri = os.environ["METRICS_RESOURCE_URI"] -response = client.query_resource( - metrics_uri, - metric_names=["Ingress"], - timespan=timedelta(hours=2), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.AVERAGE], -) - -for metric in response.metrics: - print(metric.name + " -- " + metric.display_description) - for time_series_element in metric.timeseries: - for metric_value in time_series_element.data: - print("The ingress at {} is {}".format(metric_value.timestamp, metric_value.average)) -# [END send_metrics_query] diff --git a/sdk/monitor/azure-monitor-query/samples/sample_metrics_query_multiple.py b/sdk/monitor/azure-monitor-query/samples/sample_metrics_query_multiple.py deleted file mode 100644 index 620756a9822a..000000000000 --- a/sdk/monitor/azure-monitor-query/samples/sample_metrics_query_multiple.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -""" -FILE: sample_metrics_query_multiple.py -DESCRIPTION: - This sample demonstrates authenticating the MetricsClient and retrieving the "Ingress" - metric along with the "Average" aggregation type for multiple resources. - The query will execute over a timespan of 2 hours with a granularity of 5 minutes. -USAGE: - python sample_metrics_query_multiple.py - 1) AZURE_METRICS_ENDPOINT - The regional metrics endpoint to use (i.e. https://westus3.metrics.monitor.azure.com) - - This example uses DefaultAzureCredential, which requests a token from Azure Active Directory. - For more information on DefaultAzureCredential, see https://learn.microsoft.com/python/api/overview/azure/identity-readme?view=azure-python#defaultazurecredential. - - In this example, storage account resources are queried for metrics. -""" - -# [START send_metrics_batch_query] -from datetime import timedelta -import os - -from azure.core.exceptions import HttpResponseError -from azure.identity import DefaultAzureCredential -from azure.monitor.query import MetricsClient, MetricAggregationType - - -endpoint = os.environ["AZURE_METRICS_ENDPOINT"] - -credential = DefaultAzureCredential() -client = MetricsClient(endpoint, credential) - -resource_ids = [ - "/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts/", - "/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts/", -] - -try: - response = client.query_resources( - resource_ids=resource_ids, - metric_namespace="Microsoft.Storage/storageAccounts", - metric_names=["Ingress"], - timespan=timedelta(hours=2), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.AVERAGE], - ) - - for metrics_query_result in response: - for metric in metrics_query_result.metrics: - print(metric.name + " -- " + metric.display_description) - for time_series_element in metric.timeseries: - for metric_value in time_series_element.data: - print("The ingress at {} is {}".format(metric_value.timestamp, metric_value.average)) -except HttpResponseError as err: - print("something fatal happened") - print(err) -# [END send_metrics_batch_query] diff --git a/sdk/monitor/azure-monitor-query/samples/sample_resource_logs_query.py b/sdk/monitor/azure-monitor-query/samples/sample_resource_logs_query.py index 8f313b81a7eb..6cade1882f3f 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_resource_logs_query.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_resource_logs_query.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_server_timeout.py b/sdk/monitor/azure-monitor-query/samples/sample_server_timeout.py index 75726eafef03..2a8f8ee1ecad 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_server_timeout.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_server_timeout.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/samples/sample_single_log_query_without_pandas.py b/sdk/monitor/azure-monitor-query/samples/sample_single_log_query_without_pandas.py index 60f7ca9d8000..8210d4c9b144 100644 --- a/sdk/monitor/azure-monitor-query/samples/sample_single_log_query_without_pandas.py +++ b/sdk/monitor/azure-monitor-query/samples/sample_single_log_query_without_pandas.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """ diff --git a/sdk/monitor/azure-monitor-query/setup.py b/sdk/monitor/azure-monitor-query/setup.py index 58cf6b19d014..7b75d47ded79 100644 --- a/sdk/monitor/azure-monitor-query/setup.py +++ b/sdk/monitor/azure-monitor-query/setup.py @@ -1,76 +1,48 @@ -#!/usr/bin/env python - -# ------------------------------------------------------------------------- +# coding=utf-8 +# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- + +import os import re -import os.path -from io import open -from setuptools import find_packages, setup +from setuptools import setup, find_packages + -# Change the PACKAGE_NAME only to change folder and different name PACKAGE_NAME = "azure-monitor-query" PACKAGE_PPRINT_NAME = "Azure Monitor Query" +PACKAGE_NAMESPACE = "azure.monitor.query" -# a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace("-", ".") - -# azure v0.x is not compatible with this package -# azure v0.x used to have a __version__ attribute (newer versions don't) -try: - import azure - - try: - ver = azure.__version__ - raise Exception( - "This package is incompatible with azure=={}. ".format(ver) + 'Uninstall it with "pip uninstall azure".' - ) - except AttributeError: - pass -except ImportError: - pass +# a.b.c => a/b/c +package_folder_path = PACKAGE_NAMESPACE.replace(".", "/") # Version extraction inspired from 'requests' -with open( - ( - os.path.join(package_folder_path, "version.py") - if os.path.exists(os.path.join(package_folder_path, "version.py")) - else os.path.join(package_folder_path, "_version.py") - ), - "r", -) as fd: +with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: raise RuntimeError("Cannot find version information") -with open("README.md", encoding="utf-8") as f: - readme = f.read() -with open("CHANGELOG.md", encoding="utf-8") as f: - changelog = f.read() setup( name=PACKAGE_NAME, version=version, - description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), - long_description=readme + "\n\n" + changelog, + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", license="MIT License", author="Microsoft Corporation", author_email="azpysdkhelp@microsoft.com", - url="https://github.com/Azure/azure-sdk-for-python", + url="https://github.com/Azure/azure-sdk-for-python/tree/main/sdk", keywords="azure, azure sdk", classifiers=[ "Development Status :: 5 - Production/Stable", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -78,12 +50,10 @@ "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", ], - python_requires=">=3.8", zip_safe=False, packages=find_packages( exclude=[ "tests", - "samples", # Exclude packages that will be covered by PEP420 or nspkg "azure", "azure.monitor", @@ -91,7 +61,12 @@ ), include_package_data=True, package_data={ - "pytyped": ["py.typed"], + "azure.monitor.query": ["py.typed"], }, - install_requires=["azure-core>=1.28.0", "isodate>=0.6.0", "typing-extensions>=4.0.1"], + install_requires=[ + "isodate>=0.6.1", + "azure-core>=1.30.0", + "typing-extensions>=4.6.0", + ], + python_requires=">=3.9", ) diff --git a/sdk/monitor/azure-monitor-query/swagger/README.md b/sdk/monitor/azure-monitor-query/swagger/README.md deleted file mode 100644 index 4ee5183ca20f..000000000000 --- a/sdk/monitor/azure-monitor-query/swagger/README.md +++ /dev/null @@ -1,111 +0,0 @@ -# Azure Monitor Query Client for Python - -> see https://aka.ms/autorest - -### Configuration - -```yaml -title: MonitorQueryClient -description: Azure Monitor Query Python Client -generated-metadata: false -license-header: MICROSOFT_MIT_NO_VERSION -package-name: azure-monitor-query -no-namespace-folders: true -python: true -version-tolerant: true -python3-only: true -black: true -clear-output-folder: true -modelerfour: - lenient-model-deduplication: true -``` - -## Batch execution - -```yaml -batch: - - tag: release_query - - tag: release_metrics - - tag: release_metrics_batch -``` - -## Query - -These settings apply only when `--tag=release_query` is specified on the command line. - -```yaml $(tag) == 'release_query' -input-file: - - https://github.com/Azure/azure-rest-api-specs/blob/0b64ca7cbe3af8cd13228dfb783a16b8272b8be2/specification/operationalinsights/data-plane/Microsoft.OperationalInsights/stable/2022-10-27/OperationalInsights.json -output-folder: ../azure/monitor/query/_generated -title: MonitorQueryClient -description: Azure Monitor Query Python Client -``` - -## Metrics - -These settings apply only when `--tag=release_metrics` is specified on the command line. - -```yaml $(tag) == 'release_metrics' -input-file: - - https://github.com/Azure/azure-rest-api-specs/blob/0b64ca7cbe3af8cd13228dfb783a16b8272b8be2/specification/monitor/resource-manager/Microsoft.Insights/stable/2024-02-01/metricDefinitions_API.json - - https://github.com/Azure/azure-rest-api-specs/blob/0b64ca7cbe3af8cd13228dfb783a16b8272b8be2/specification/monitor/resource-manager/Microsoft.Insights/stable/2024-02-01/metrics_API.json - - https://github.com/Azure/azure-rest-api-specs/blob/0b64ca7cbe3af8cd13228dfb783a16b8272b8be2/specification/monitor/resource-manager/Microsoft.Insights/stable/2024-02-01/metricNamespaces_API.json -output-folder: ../azure/monitor/query/_generated/metrics -title: MonitorMetricsClient -description: Azure Monitor Metrics Python Client -``` - -### Metrics Batch - -These settings apply only when `--tag=release_metrics` is specified on the command line. - -```yaml $(tag) == 'release_metrics_batch' -input-file: - - https://github.com/Azure/azure-rest-api-specs/blob/0b64ca7cbe3af8cd13228dfb783a16b8272b8be2/specification/monitor/data-plane/Microsoft.Insights/stable/2024-02-01/metricBatch.json -output-folder: ../azure/monitor/query/_generated/metrics/batch -title: MonitorBatchMetricsClient -description: Azure Monitor Batch Metrics Python Client -``` - -### Remove metadata operations - -``` yaml -directive: -- from: swagger-document - where: $ - transform: > - delete $.securityDefinitions -``` - -### Make properties required - -``` yaml -directive: -- from: swagger-document - where: $.definitions.column - transform: > - $.required = ["name", "type"] -``` - -### Remove subscription scoped operations - -``` yaml -directive: - - remove-operation: MetricDefinitions_ListAtSubscriptionScope - - remove-operation: Metrics_ListAtSubscriptionScope - - remove-operation: Metrics_ListAtSubscriptionScopePost -``` - -### Interval adjustments - -Currently, the value for `default`` is erroneously being set to the parameter default in the generated method: https://github.com/Azure/autorest.python/issues/2062 -Also, the interval parameter in the spec does not use the "duration" format due to the need to support the "FULL" keyword which is not a valid ISO 8601 duration. In the Python SDK, we want the interval parameter to be `timedelta` only, so we add the "duration" format. - -``` yaml -directive: -- from: swagger-document - where: $.parameters[IntervalParameter] - transform: > - delete $.default; - $.format = "duration"; -``` diff --git a/sdk/monitor/azure-monitor-query/tests/base_testcase.py b/sdk/monitor/azure-monitor-query/tests/base_testcase.py index 7c67f9dca21a..1ab2c346e469 100644 --- a/sdk/monitor/azure-monitor-query/tests/base_testcase.py +++ b/sdk/monitor/azure-monitor-query/tests/base_testcase.py @@ -18,14 +18,6 @@ "AzureUSGovernment": "https://api.loganalytics.us/v1", } -METRICS_CLIENT_ENVIRONMENT_AUDIENCE_MAP = { - "AzureCloud": "https://metrics.monitor.azure.com", - "AzureChinaCloud": "https://metrics.monitor.azure.cn", - "AzureUSGovernment": "https://metrics.monitor.azure.us", -} - -TLD_MAP = {"AzureCloud": "com", "AzureChinaCloud": "cn", "AzureUSGovernment": "us"} - class AzureMonitorQueryLogsTestCase(AzureRecordedTestCase): @@ -37,33 +29,3 @@ def get_client(self, client_class, credential): kwargs["endpoint"] = LOGS_ENVIRONMENT_ENDPOINT_MAP[environment] return self.create_client_from_credential(client_class, credential, **kwargs) - - -class MetricsQueryClientTestCase(AzureRecordedTestCase): - - def get_client(self, client_class, credential): - - kwargs = {} - arm_url = os.getenv(ENV_MONITOR_RESOURCE_MANAGER_URL) - if arm_url: - kwargs["endpoint"] = arm_url - - return self.create_client_from_credential(client_class, credential, **kwargs) - - -class MetricsClientTestCase(AzureRecordedTestCase): - - def get_client(self, client_class, credential, endpoint=None): - - environment = os.getenv(ENV_MONITOR_ENVIRONMENT) - kwargs = {} - tld = "com" - if environment: - kwargs["audience"] = METRICS_CLIENT_ENVIRONMENT_AUDIENCE_MAP.get(environment) - tld = TLD_MAP.get(environment, "com") - - if not endpoint: - region = os.getenv(ENV_MONITOR_LOCATION) or "westus2" - kwargs["endpoint"] = f"https://{region}.metrics.monitor.azure.{tld}" - - return self.create_client_from_credential(client_class, credential, **kwargs) diff --git a/sdk/monitor/azure-monitor-query/tests/conftest.py b/sdk/monitor/azure-monitor-query/tests/conftest.py index 592e643fc94f..2100a6dc10c9 100644 --- a/sdk/monitor/azure-monitor-query/tests/conftest.py +++ b/sdk/monitor/azure-monitor-query/tests/conftest.py @@ -10,14 +10,11 @@ # Environment variable keys ENV_METRICS_RESOURCE_ID = "METRICS_RESOURCE_ID" -ENV_SUBSCRIPTION_ID = "AZURE_SUBSCRIPTION_ID" +ENV_SUBSCRIPTION_ID = "MONITOR_SUBSCRIPTION_ID" ENV_WORKSPACE_ID = "AZURE_MONITOR_WORKSPACE_ID" ENV_SECONDARY_WORKSPACE_ID = "AZURE_MONITOR_SECONDARY_WORKSPACE_ID" ENV_DCR_ID = "AZURE_MONITOR_DCR_ID" ENV_TABLE_NAME = "AZURE_MONITOR_TABLE_NAME" -ENV_TENANT_ID = "AZURE_TENANT_ID" -ENV_CLIENT_ID = "AZURE_CLIENT_ID" -ENV_CLIENT_SECRET = "AZURE_CLIENT_SECRET" # Fake values TEST_ID = "00000000-0000-0000-0000-000000000000" @@ -32,9 +29,6 @@ def add_sanitizers(test_proxy, environment_variables): ENV_SUBSCRIPTION_ID: TEST_ID, ENV_WORKSPACE_ID: TEST_ID, ENV_SECONDARY_WORKSPACE_ID: TEST_ID, - ENV_TENANT_ID: TEST_ID, - ENV_CLIENT_ID: TEST_ID, - ENV_CLIENT_SECRET: TEST_ID, ENV_TABLE_NAME: TEST_TABLE_NAME, ENV_DCR_ID: TEST_ID, } diff --git a/sdk/monitor/azure-monitor-query/tests/perfstress_tests/README.md b/sdk/monitor/azure-monitor-query/tests/perfstress_tests/README.md index 1a277d80bc53..790101a6db2e 100644 --- a/sdk/monitor/azure-monitor-query/tests/perfstress_tests/README.md +++ b/sdk/monitor/azure-monitor-query/tests/perfstress_tests/README.md @@ -38,7 +38,6 @@ These options are available for all perf tests: The tests currently written for the T2 SDK: - `LogsPerfTest` queries a single query. - `LogsBatchPerfTest` queries multiple queries using the batch operation. -- `MetricsPerfTest` to test a metrics query on eventgrid resource ## Example command ```cmd diff --git a/sdk/monitor/azure-monitor-query/tests/perfstress_tests/metric_query.py b/sdk/monitor/azure-monitor-query/tests/perfstress_tests/metric_query.py deleted file mode 100644 index 1538774dd6b0..000000000000 --- a/sdk/monitor/azure-monitor-query/tests/perfstress_tests/metric_query.py +++ /dev/null @@ -1,55 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -------------------------------------------------------------------------- - -import asyncio -from datetime import datetime, timezone -from devtools_testutils.perfstress_tests import PerfStressTest - -from azure.monitor.query import MetricsQueryClient as SyncMetricsQueryClient, MetricAggregationType -from azure.monitor.query.aio import MetricsQueryClient as AsyncMetricsQueryClient - -from azure.identity import DefaultAzureCredential as SyncDefaultAzureCredential -from azure.identity.aio import DefaultAzureCredential as AsyncDefaultAzureCredential - - -class MetricsPerfTest(PerfStressTest): - def __init__(self, arguments): - super().__init__(arguments) - - # auth configuration - self.metrics_uri = self.get_from_env("METRICS_RESOURCE_URI") - self.names = ["MatchedEventCount"] - self.aggregations = [MetricAggregationType.COUNT] - - # Create clients - self.metrics_client = SyncMetricsQueryClient(credential=SyncDefaultAzureCredential()) - self.async_metrics_client = AsyncMetricsQueryClient(credential=AsyncDefaultAzureCredential()) - - async def close(self): - """This is run after cleanup. - - Use this to close any open handles or clients. - """ - await self.async_metrics_client.close() - await super().close() - - def run_sync(self): - """The synchronous perf test. - - Try to keep this minimal and focused. Using only a single client API. - Avoid putting any ancillary logic (e.g. generating UUIDs), and put this in the setup/init instead - so that we're only measuring the client API call. - """ - self.metrics_client.query_resource(self.metrics_uri, self.names, aggregations=self.aggregations) - - async def run_async(self): - """The asynchronous perf test. - - Try to keep this minimal and focused. Using only a single client API. - Avoid putting any ancillary logic (e.g. generating UUIDs), and put this in the setup/init instead - so that we're only measuring the client API call. - """ - await self.async_metrics_client.query_resource(self.metrics_uri, self.names, aggregations=self.aggregations) diff --git a/sdk/monitor/azure-monitor-query/tests/test_helpers.py b/sdk/monitor/azure-monitor-query/tests/test_helpers.py index 9930163d9b34..6da15c1eaeb6 100644 --- a/sdk/monitor/azure-monitor-query/tests/test_helpers.py +++ b/sdk/monitor/azure-monitor-query/tests/test_helpers.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See LICENSE.txt in the project root for @@ -6,7 +7,7 @@ from datetime import datetime, timedelta import pytest -from azure.monitor.query._helpers import get_subscription_id_from_resource, get_timespan_iso8601_endpoints +from azure.monitor.query._helpers import get_subscription_id_from_resource def test_get_subscription_id_from_resource(): @@ -30,20 +31,3 @@ def test_get_subscription_id_from_resource(): with pytest.raises(ValueError): get_subscription_id_from_resource("") - - -def test_get_timespan_iso6801_endpoints(): - start, end = datetime(2020, 1, 1), datetime(2020, 1, 2) - iso_start, iso_end = get_timespan_iso8601_endpoints((start, end)) - assert iso_start == "2020-01-01T00:00:00.000Z" - assert iso_end == "2020-01-02T00:00:00.000Z" - - start, delta = datetime(2020, 1, 1), timedelta(days=3) - iso_start, iso_end = get_timespan_iso8601_endpoints((start, delta)) - assert iso_start == "2020-01-01T00:00:00.000Z" - assert iso_end == "2020-01-04T00:00:00.000Z" - - start, delta = datetime(2020, 1, 4), timedelta(days=-3) - iso_start, iso_end = get_timespan_iso8601_endpoints((start, delta)) - assert iso_start == "2020-01-01T00:00:00.000Z" - assert iso_end == "2020-01-04T00:00:00.000Z" diff --git a/sdk/monitor/azure-monitor-query/tests/test_logs_client.py b/sdk/monitor/azure-monitor-query/tests/test_logs_client.py index 9e44601866c2..4bac77713ecd 100644 --- a/sdk/monitor/azure-monitor-query/tests/test_logs_client.py +++ b/sdk/monitor/azure-monitor-query/tests/test_logs_client.py @@ -196,7 +196,6 @@ def test_logs_single_query_additional_workspaces(self, recorded_test, monitor_in assert response is not None assert len(response.tables[0].rows) == 2 - @pytest.mark.skip("Flaky deserialization issues with msrest. Re-enable after removing msrest dependency.") @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") def test_logs_query_batch_additional_workspaces(self, monitor_info): client = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) @@ -298,12 +297,13 @@ def test_logs_resource_query_additional_options(self, recorded_test, monitor_inf def test_client_different_endpoint(self): credential = self.get_credential(LogsQueryClient) - endpoint = "https://api.loganalytics.azure.cn/v1" + endpoint = "https://api.loganalytics.azure.cn" client = LogsQueryClient(credential, endpoint=endpoint) assert client._endpoint == endpoint - assert "https://api.loganalytics.azure.cn/.default" in client._client._config.authentication_policy._scopes + assert client._config.authentication_policy + assert "https://api.loganalytics.azure.cn/.default" in client._config.authentication_policy._scopes def test_client_user_agent(self): client: LogsQueryClient = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient)) - assert f"monitor-query/{VERSION}" in client._client._config.user_agent_policy.user_agent + assert f"monitor-query/{VERSION}" in client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-query/tests/test_logs_client_async.py b/sdk/monitor/azure-monitor-query/tests/test_logs_client_async.py index 43a37859b2d4..0dd18d5fdaa7 100644 --- a/sdk/monitor/azure-monitor-query/tests/test_logs_client_async.py +++ b/sdk/monitor/azure-monitor-query/tests/test_logs_client_async.py @@ -74,7 +74,7 @@ async def test_logs_server_timeout(self, recorded_test, monitor_info): @pytest.mark.asyncio async def test_logs_query_batch_raises_on_no_timespan(self, monitor_info): with pytest.raises(TypeError): - LogsBatchQuery( + LogsBatchQuery( # type: ignore workspace_id=monitor_info["workspace_id"], query="AzureActivity | summarize count()", ) @@ -128,7 +128,6 @@ async def test_logs_single_query_additional_workspaces_async(self, recorded_test assert response assert len(response.tables[0].rows) == 2 - @pytest.mark.skip("Flaky deserialization issues with msrest. Re-enable after removing msrest dependency.") @pytest.mark.live_test_only("Issues recording dynamic 'id' values in requests/responses") @pytest.mark.asyncio async def test_logs_query_batch_additional_workspaces(self, monitor_info): @@ -258,14 +257,15 @@ async def test_logs_resource_query_additional_options(self, recorded_test, monit @pytest.mark.asyncio async def test_client_different_endpoint(self): credential = self.get_credential(LogsQueryClient, is_async=True) - endpoint = "https://api.loganalytics.azure.cn/v1" + endpoint = "https://api.loganalytics.azure.cn/" client = LogsQueryClient(credential, endpoint=endpoint) assert client._endpoint == endpoint - assert "https://api.loganalytics.azure.cn/.default" in client._client._config.authentication_policy._scopes + assert client._config.authentication_policy + assert "https://api.loganalytics.azure.cn/.default" in client._config.authentication_policy._scopes @pytest.mark.asyncio async def test_client_user_agent(self): client: LogsQueryClient = self.get_client(LogsQueryClient, self.get_credential(LogsQueryClient, is_async=True)) async with client: - assert f"monitor-query/{VERSION}" in client._client._config.user_agent_policy.user_agent + assert f"monitor-query/{VERSION}" in client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-query/tests/test_logs_timespans.py b/sdk/monitor/azure-monitor-query/tests/test_logs_timespans.py index 4a00fbc9d07e..d2ea1a8dd2d3 100644 --- a/sdk/monitor/azure-monitor-query/tests/test_logs_timespans.py +++ b/sdk/monitor/azure-monitor-query/tests/test_logs_timespans.py @@ -3,10 +3,9 @@ # Licensed under the MIT License. See LICENSE.txt in the project root for # license information. # ------------------------------------------------------------------------- -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import json -from msrest.serialization import UTC import pytest from azure.monitor.query import LogsQueryClient @@ -91,4 +90,13 @@ def test_duration_to_iso8601(self): assert construct_iso8601(timespan=d7) == "PT172800.0S" with pytest.raises(ValueError, match="timespan must be a timedelta or a tuple."): - construct_iso8601(timespan=(datetime.now(UTC()))) + construct_iso8601(timespan=(datetime.now(timezone.utc))) + + def test_iso8601_start_end(self): + start = datetime(2022, 11, 7, 1, 3, 7, 584426, tzinfo=timezone.utc) + end = datetime(2022, 11, 8, 1, 3, 7, 584426, tzinfo=timezone.utc) + duration = timedelta(days=1) + + assert construct_iso8601(timespan=(start, end)) == "2022-11-07T01:03:07.584426Z/2022-11-08T01:03:07.584426Z" + assert construct_iso8601(timespan=(start, duration)) == "2022-11-07T01:03:07.584426Z/PT86400.0S" + assert construct_iso8601(timespan=duration) == "PT86400.0S" diff --git a/sdk/monitor/azure-monitor-query/tests/test_metrics_client.py b/sdk/monitor/azure-monitor-query/tests/test_metrics_client.py deleted file mode 100644 index 6c3af77d71e7..000000000000 --- a/sdk/monitor/azure-monitor-query/tests/test_metrics_client.py +++ /dev/null @@ -1,60 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE.txt in the project root for -# license information. -# ------------------------------------------------------------------------- -from datetime import timedelta - -from azure.monitor.query import MetricsClient, MetricAggregationType -from azure.monitor.query._version import VERSION - -from base_testcase import MetricsClientTestCase - - -METRIC_NAME = "requests/count" -METRIC_RESOURCE_PROVIDER = "Microsoft.Insights/components" - - -class TestMetricsClient(MetricsClientTestCase): - - def test_batch_metrics_auth(self, recorded_test, monitor_info): - client: MetricsClient = self.get_client(MetricsClient, self.get_credential(MetricsClient)) - responses = client.query_resources( - resource_ids=[monitor_info["metrics_resource_id"]], - metric_namespace=METRIC_RESOURCE_PROVIDER, - metric_names=[METRIC_NAME], - aggregations=[MetricAggregationType.COUNT], - ) - assert responses - assert len(responses) == 1 - - def test_batch_metrics_granularity(self, recorded_test, monitor_info): - client: MetricsClient = self.get_client(MetricsClient, self.get_credential(MetricsClient)) - responses = client.query_resources( - resource_ids=[monitor_info["metrics_resource_id"]], - metric_namespace=METRIC_RESOURCE_PROVIDER, - metric_names=[METRIC_NAME], - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - assert responses - for response in responses: - assert response.granularity == timedelta(minutes=5) - response.metrics - metric = response.metrics[METRIC_NAME] - assert metric.timeseries - for t in metric.timeseries: - assert t.metadata_values is not None - - def test_client_different_endpoint(self): - credential = self.get_credential(MetricsClient) - endpoint = "https://usgovvirginia.metrics.monitor.azure.us" - audience = "https://metrics.monitor.azure.us" - client = MetricsClient(endpoint, credential, audience=audience) - - assert client._endpoint == endpoint - assert f"{audience}/.default" in client._client._config.authentication_policy._scopes - - def test_client_user_agent(self): - client: MetricsClient = self.get_client(MetricsClient, self.get_credential(MetricsClient)) - assert f"monitor-query/{VERSION}" in client._client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-query/tests/test_metrics_client_async.py b/sdk/monitor/azure-monitor-query/tests/test_metrics_client_async.py deleted file mode 100644 index d2acff056766..000000000000 --- a/sdk/monitor/azure-monitor-query/tests/test_metrics_client_async.py +++ /dev/null @@ -1,70 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE.txt in the project root for -# license information. -# ------------------------------------------------------------------------- -from datetime import timedelta - -import pytest - -from azure.monitor.query import MetricAggregationType -from azure.monitor.query._version import VERSION -from azure.monitor.query.aio import MetricsClient - - -from base_testcase import MetricsClientTestCase - - -METRIC_NAME = "requests/count" -METRIC_RESOURCE_PROVIDER = "Microsoft.Insights/components" - - -class TestMetricsClientAsync(MetricsClientTestCase): - - @pytest.mark.asyncio - async def test_batch_metrics_auth(self, recorded_test, monitor_info): - client: MetricsClient = self.get_client(MetricsClient, self.get_credential(MetricsClient, is_async=True)) - async with client: - responses = await client.query_resources( - resource_ids=[monitor_info["metrics_resource_id"]], - metric_namespace=METRIC_RESOURCE_PROVIDER, - metric_names=[METRIC_NAME], - aggregations=[MetricAggregationType.COUNT], - ) - assert responses - assert len(responses) == 1 - - @pytest.mark.asyncio - async def test_batch_metrics_granularity(self, recorded_test, monitor_info): - client: MetricsClient = self.get_client(MetricsClient, self.get_credential(MetricsClient, is_async=True)) - async with client: - responses = await client.query_resources( - resource_ids=[monitor_info["metrics_resource_id"]], - metric_namespace=METRIC_RESOURCE_PROVIDER, - metric_names=[METRIC_NAME], - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - assert responses - for response in responses: - assert response.granularity == timedelta(minutes=5) - metric = response.metrics[METRIC_NAME] - assert metric.timeseries - for t in metric.timeseries: - assert t.metadata_values is not None - - @pytest.mark.asyncio - async def test_client_different_endpoint(self): - credential = self.get_credential(MetricsClient, is_async=True) - endpoint = "https://usgovvirginia.metrics.monitor.azure.us" - audience = "https://metrics.monitor.azure.us" - client = MetricsClient(endpoint, credential, audience=audience) - - assert client._endpoint == endpoint - assert f"{audience}/.default" in client._client._config.authentication_policy._scopes - - @pytest.mark.asyncio - async def test_client_user_agent(self): - client: MetricsClient = self.get_client(MetricsClient, self.get_credential(MetricsClient, is_async=True)) - async with client: - assert f"monitor-query/{VERSION}" in client._client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-query/tests/test_metrics_query_client.py b/sdk/monitor/azure-monitor-query/tests/test_metrics_query_client.py deleted file mode 100644 index 725ebd01652e..000000000000 --- a/sdk/monitor/azure-monitor-query/tests/test_metrics_query_client.py +++ /dev/null @@ -1,125 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE.txt in the project root for -# license information. -# ------------------------------------------------------------------------- -from datetime import timedelta -from unittest import mock - -from azure.monitor.query import MetricsQueryClient, MetricAggregationType, Metric -from azure.monitor.query._version import VERSION - -from base_testcase import MetricsQueryClientTestCase - - -METRIC_NAME = "requests/count" -METRIC_RESOURCE_PROVIDER = "Microsoft.Insights/components" - - -class TestMetricsQueryClient(MetricsQueryClientTestCase): - - def test_metrics_auth(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - response = client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - aggregations=[MetricAggregationType.COUNT], - ) - assert response - assert response.metrics - - def test_metrics_granularity(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - response = client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - assert response - assert response.granularity == timedelta(minutes=5) - metric = response.metrics[METRIC_NAME] - assert metric.timeseries - for t in metric.timeseries: - assert t.metadata_values is not None - - def test_metrics_filter(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - response = client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - filter="request/success eq '0'", - aggregations=[MetricAggregationType.COUNT], - ) - assert response - metric = response.metrics[METRIC_NAME] - for t in metric.timeseries: - assert t.metadata_values is not None - - def test_metrics_list(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - response = client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - assert response - metrics = response.metrics - assert len(metrics) == 1 - assert metrics[0].__class__ == Metric - assert metrics[METRIC_NAME].__class__ == Metric - assert metrics[METRIC_NAME] == metrics[0] - - def test_metrics_list_with_commas(self): - """Commas in metric names should be encoded as %2.""" - - with mock.patch("azure.monitor.query._generated.metrics.operations.MetricsOperations.list") as mock_list: - mock_list.return_value = {"foo": "bar"} - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - client.query_resource( - "resource", - metric_names=["metric1,metric2", "foo,test,test"], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - - assert "metricnames" in mock_list.call_args[1] - assert mock_list.call_args[1]["metricnames"] == "metric1%2metric2,foo%2test%2test" - - def test_metrics_namespaces(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - - response = client.list_metric_namespaces(monitor_info["metrics_resource_id"]) - - assert response is not None - for item in response: - assert item - - def test_metrics_definitions(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - response = client.list_metric_definitions( - monitor_info["metrics_resource_id"], namespace=METRIC_RESOURCE_PROVIDER - ) - - assert response is not None - for item in response: - assert item - - def test_client_different_endpoint(self): - credential = self.get_credential(MetricsQueryClient) - endpoint = "https://management.chinacloudapi.cn" - client = MetricsQueryClient(credential, endpoint=endpoint) - - assert client._endpoint == endpoint - assert f"{endpoint}/.default" in client._client._config.authentication_policy._scopes - - def test_client_user_agent(self): - client: MetricsQueryClient = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient)) - assert f"monitor-query/{VERSION}" in client._client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-query/tests/test_metrics_query_client_async.py b/sdk/monitor/azure-monitor-query/tests/test_metrics_query_client_async.py deleted file mode 100644 index cd5ebf5fb3e6..000000000000 --- a/sdk/monitor/azure-monitor-query/tests/test_metrics_query_client_async.py +++ /dev/null @@ -1,149 +0,0 @@ -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE.txt in the project root for -# license information. -# ------------------------------------------------------------------------- -from datetime import timedelta -import sys -from unittest import mock - -import pytest - -from azure.monitor.query import MetricAggregationType, Metric -from azure.monitor.query._version import VERSION -from azure.monitor.query.aio import MetricsQueryClient - -from base_testcase import MetricsQueryClientTestCase - - -METRIC_NAME = "requests/count" -METRIC_RESOURCE_PROVIDER = "Microsoft.Insights/components" - - -class TestMetricsQueryClientAsync(MetricsQueryClientTestCase): - - @pytest.mark.asyncio - async def test_metrics_auth(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - async with client: - response = await client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - aggregations=[MetricAggregationType.COUNT], - ) - assert response - assert response.metrics - - @pytest.mark.asyncio - async def test_metrics_granularity(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - async with client: - response = await client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - assert response - assert response.granularity == timedelta(minutes=5) - metric = response.metrics[METRIC_NAME] - assert metric.timeseries - for t in metric.timeseries: - assert t.metadata_values is not None - - @pytest.mark.asyncio - async def test_metrics_filter(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - async with client: - response = await client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - filter="request/success eq '0'", - aggregations=[MetricAggregationType.COUNT], - ) - assert response - metric = response.metrics[METRIC_NAME] - for t in metric.timeseries: - assert t.metadata_values is not None - - @pytest.mark.asyncio - async def test_metrics_list(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - async with client: - response = await client.query_resource( - monitor_info["metrics_resource_id"], - metric_names=[METRIC_NAME], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - assert response - metrics = response.metrics - assert len(metrics) == 1 - assert metrics[0].__class__ == Metric - assert metrics[METRIC_NAME].__class__ == Metric - assert metrics[METRIC_NAME] == metrics[0] - - @pytest.mark.asyncio - @pytest.mark.skipif(sys.version_info < (3, 8), reason="async mocks work differently in Python <= 3.7") - async def test_metrics_list_with_commas(self): - """Commas in metric names should be encoded as %2.""" - - with mock.patch("azure.monitor.query._generated.metrics.aio.operations.MetricsOperations.list") as mock_list: - mock_list.return_value = {"foo": "bar"} - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - async with client: - await client.query_resource( - "resource", - metric_names=["metric1,metric2", "foo,test,test"], - timespan=timedelta(days=1), - granularity=timedelta(minutes=5), - aggregations=[MetricAggregationType.COUNT], - ) - - assert "metricnames" in mock_list.call_args[1] - assert mock_list.call_args[1]["metricnames"] == "metric1%2metric2,foo%2test%2test" - - @pytest.mark.asyncio - async def test_metrics_namespaces(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - - async with client: - response = client.list_metric_namespaces(monitor_info["metrics_resource_id"]) - - assert response is not None - async for item in response: - assert item - - @pytest.mark.asyncio - async def test_metrics_definitions(self, recorded_test, monitor_info): - client = self.get_client(MetricsQueryClient, self.get_credential(MetricsQueryClient, is_async=True)) - - async with client: - response = client.list_metric_definitions( - monitor_info["metrics_resource_id"], namespace=METRIC_RESOURCE_PROVIDER - ) - - assert response is not None - async for item in response: - assert item - - @pytest.mark.asyncio - async def test_client_different_endpoint(self): - credential = self.get_credential(MetricsQueryClient) - endpoint = "https://management.chinacloudapi.cn" - client = MetricsQueryClient(credential, endpoint=endpoint) - - assert client._endpoint == endpoint - assert f"{endpoint}/.default" in client._client._config.authentication_policy._scopes - - @pytest.mark.asyncio - async def test_client_user_agent(self): - credential = self.get_credential(MetricsQueryClient, is_async=True) - client: MetricsQueryClient = self.get_client(MetricsQueryClient, credential) - async with client: - assert f"monitor-query/{VERSION}" in client._client._config.user_agent_policy.user_agent diff --git a/sdk/monitor/azure-monitor-query/tsp-location.yaml b/sdk/monitor/azure-monitor-query/tsp-location.yaml new file mode 100644 index 000000000000..2d4f3eb22554 --- /dev/null +++ b/sdk/monitor/azure-monitor-query/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/monitor/Monitor.Query.Logs +commit: fa3a001758bd80c44ec4aaf7b387f5e8e24f8287 +repo: Azure/azure-rest-api-specs +additionalDirectories: From f2524e0c7f14dd65a17da82e0ac96a358596ecc7 Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Tue, 29 Jul 2025 18:11:07 +0000 Subject: [PATCH 2/3] Update changelog Signed-off-by: Paul Van Eck --- sdk/monitor/azure-monitor-query/CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/monitor/azure-monitor-query/CHANGELOG.md b/sdk/monitor/azure-monitor-query/CHANGELOG.md index 050498cec586..af42cc25d77c 100644 --- a/sdk/monitor/azure-monitor-query/CHANGELOG.md +++ b/sdk/monitor/azure-monitor-query/CHANGELOG.md @@ -7,8 +7,9 @@ ### Breaking Changes - `MetricsClient` and `MetricsQueryClient` have been removed from the `azure-monitor-query` package. This is part of the migration to split logs and metrics functionality into separate packages. ([#42205](https://github.com/Azure/azure-sdk-for-python/pull/42205)) - - The `MetricsClient` has been moved to the new `azure-monitor-querymetrics` + - The `MetricsClient` has been moved to the new `azure-monitor-querymetrics` package. - Functionality provided by `MetricsQueryClient` can be access through the `azure-mgmt-monitor` package. + - For more details, see the [migration guide](https://aka.ms/azsdk/python/monitor/query/migration). ### Bugs Fixed From 7764de7b5e9feae5c88506f18b9d539343bc5c58 Mon Sep 17 00:00:00 2001 From: Paul Van Eck Date: Tue, 29 Jul 2025 21:39:22 +0000 Subject: [PATCH 3/3] Remove unnecessary test Signed-off-by: Paul Van Eck --- .../azure-monitor-query/tests/test_helpers.py | 33 ------------------- 1 file changed, 33 deletions(-) delete mode 100644 sdk/monitor/azure-monitor-query/tests/test_helpers.py diff --git a/sdk/monitor/azure-monitor-query/tests/test_helpers.py b/sdk/monitor/azure-monitor-query/tests/test_helpers.py deleted file mode 100644 index 6da15c1eaeb6..000000000000 --- a/sdk/monitor/azure-monitor-query/tests/test_helpers.py +++ /dev/null @@ -1,33 +0,0 @@ -# pylint: disable=line-too-long,useless-suppression -# ------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See LICENSE.txt in the project root for -# license information. -# ------------------------------------------------------------------------- -from datetime import datetime, timedelta -import pytest - -from azure.monitor.query._helpers import get_subscription_id_from_resource - - -def test_get_subscription_id_from_resource(): - assert ( - get_subscription_id_from_resource( - "/subscriptions/00000000-1111-2222-3333-000000000000/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm" - ) - == "00000000-1111-2222-3333-000000000000" - ) - - # Test witout preceding slash - assert ( - get_subscription_id_from_resource( - "subscriptions/00000000-1111-2222-3333-000000000000/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm" - ) - == "00000000-1111-2222-3333-000000000000" - ) - - with pytest.raises(ValueError): - get_subscription_id_from_resource("/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm") - - with pytest.raises(ValueError): - get_subscription_id_from_resource("")