diff --git a/src/quantum/HISTORY.rst b/src/quantum/HISTORY.rst index b050e5dd1db..b0e6da8c957 100644 --- a/src/quantum/HISTORY.rst +++ b/src/quantum/HISTORY.rst @@ -3,6 +3,11 @@ Release History =============== +1.0.0b12 ++++++++++++++++ +* Added support for Data Plane v2 including specifying priority parameter as part of job params when submitting a job +* Removed container creation logic when retrieving linked storage account from the service + 1.0.0b11 +++++++++++++++ * Remove `__import__('pkg_resources').declare_namespace(__name__)` to fix the namespace package issue. diff --git a/src/quantum/azext_quantum/_breaking_change.py b/src/quantum/azext_quantum/_breaking_change.py new file mode 100644 index 00000000000..d100bbd5a8c --- /dev/null +++ b/src/quantum/azext_quantum/_breaking_change.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from azure.cli.core.breaking_change import register_argument_deprecate + +register_argument_deprecate('quantum execute', '--location', target_version="May 2026") +register_argument_deprecate('quantum run', '--location', target_version="May 2026") +register_argument_deprecate('quantum job submit', '--location', target_version="May 2026") +register_argument_deprecate('quantum job cancel', '--location', target_version="May 2026") +register_argument_deprecate('quantum job list', '--location', target_version="May 2026") +register_argument_deprecate('quantum job output', '--location', target_version="May 2026") +register_argument_deprecate('quantum job show', '--location', target_version="May 2026") +register_argument_deprecate('quantum job wait', '--location', target_version="May 2026") +register_argument_deprecate('quantum target list', '--location', target_version="May 2026") +register_argument_deprecate('quantum workspace set', '--location', target_version="May 2026") +register_argument_deprecate('quantum workspace quotas', '--location', target_version="May 2026") diff --git a/src/quantum/azext_quantum/_client_factory.py b/src/quantum/azext_quantum/_client_factory.py index b5b8897afa5..db219bc6803 100644 --- a/src/quantum/azext_quantum/_client_factory.py +++ b/src/quantum/azext_quantum/_client_factory.py @@ -8,6 +8,8 @@ import os from ._location_helper import normalize_location from .__init__ import CLI_REPORTED_VERSION +from .vendored_sdks.azure_quantum_python._client import WorkspaceClient +from .vendored_sdks.azure_mgmt_quantum import AzureQuantumManagementClient def is_env(name): @@ -38,9 +40,8 @@ def get_appid(): # Control Plane clients -def cf_quantum_mgmt(cli_ctx, *_): +def cf_quantum_mgmt(cli_ctx, *_) -> AzureQuantumManagementClient: from azure.cli.core.commands.client_factory import get_mgmt_service_client - from .vendored_sdks.azure_mgmt_quantum import AzureQuantumManagementClient client = get_mgmt_service_client(cli_ctx, AzureQuantumManagementClient, base_url_bound=False) # Add user agent on the management client to include extension information client._config.user_agent_policy.add_user_agent(get_appid()) @@ -61,22 +62,26 @@ def cf_offerings(cli_ctx, *_): # Data Plane clients -def cf_quantum(cli_ctx, subscription_id=None, location=None): - from .vendored_sdks.azure_quantum import ServicesClient - creds = _get_data_credentials(cli_ctx, subscription_id) - return ServicesClient(location, creds) +def cf_quantum(cli_ctx, subscription: str, resource_group: str, ws_name: str, endpoint: str | None) -> WorkspaceClient: + creds = _get_data_credentials(cli_ctx, subscription) + if not endpoint: + client = cf_workspaces(cli_ctx) + ws = client.get(resource_group, ws_name) + endpoint = ws.properties.endpoint_uri + ws_cl = WorkspaceClient(endpoint, creds) + return ws_cl -def cf_providers(cli_ctx, subscription_id=None, location=None): - return cf_quantum(cli_ctx, subscription_id, location).providers +def cf_providers(cli_ctx, subscription: str, resource_group: str, ws_name: str, endpoint: str | None): + return cf_quantum(cli_ctx, subscription, resource_group, ws_name, endpoint).services.providers -def cf_jobs(cli_ctx, subscription_id=None, location=None): - return cf_quantum(cli_ctx, subscription_id, location).jobs +def cf_jobs(cli_ctx, subscription: str, resource_group: str, ws_name: str, endpoint: str | None): + return cf_quantum(cli_ctx, subscription, resource_group, ws_name, endpoint).services.jobs -def cf_quotas(cli_ctx, subscription_id=None, location=None): - return cf_quantum(cli_ctx, subscription_id, location).quotas +def cf_quotas(cli_ctx, subscription: str, resource_group: str, ws_name: str, endpoint: str | None): + return cf_quantum(cli_ctx, subscription, resource_group, ws_name, endpoint).services.quotas # Helper clients diff --git a/src/quantum/azext_quantum/_validators.py b/src/quantum/azext_quantum/_validators.py index edf8859fdb7..34e642d2c84 100644 --- a/src/quantum/azext_quantum/_validators.py +++ b/src/quantum/azext_quantum/_validators.py @@ -9,14 +9,13 @@ from .operations.target import TargetInfo -def validate_workspace_internal(cmd, namespace, require_location): +def validate_workspace_info(cmd, namespace): """ - Internal implementation to validate workspace info parameters with an optional location + Makes sure all parameters for a workspace are available. """ group = getattr(namespace, 'resource_group_name', None) name = getattr(namespace, 'workspace_name', None) - location = getattr(namespace, 'location', None) - ws = WorkspaceInfo(cmd, group, name, location) + ws = WorkspaceInfo(cmd, group, name) if not ws.subscription: raise ValueError("Missing subscription argument") @@ -24,22 +23,6 @@ def validate_workspace_internal(cmd, namespace, require_location): raise ValueError("Missing resource-group argument") if not ws.name: raise ValueError("Missing workspace-name argument") - if require_location and not ws.location: - raise ValueError("Missing location argument") - - -def validate_workspace_info(cmd, namespace): - """ - Makes sure all parameters for a workspace are available including location. - """ - validate_workspace_internal(cmd, namespace, True) - - -def validate_workspace_info_no_location(cmd, namespace): - """ - Makes sure all parameters for a workspace are available, not including location. - """ - validate_workspace_internal(cmd, namespace, False) def validate_target_info(cmd, namespace): diff --git a/src/quantum/azext_quantum/commands.py b/src/quantum/azext_quantum/commands.py index 2932ee4c95b..7aa79493759 100644 --- a/src/quantum/azext_quantum/commands.py +++ b/src/quantum/azext_quantum/commands.py @@ -9,7 +9,7 @@ from collections import OrderedDict from azure.cli.core.commands import CliCommandType -from ._validators import validate_workspace_info, validate_target_info, validate_workspace_and_target_info, validate_workspace_info_no_location, validate_provider_and_sku_info +from ._validators import validate_workspace_info, validate_target_info, validate_workspace_and_target_info, validate_provider_and_sku_info logger = logging.getLogger(__name__) @@ -126,9 +126,9 @@ def load_command_table(self, _): with self.command_group('quantum workspace', workspace_ops) as w: w.command('create', 'create') - w.command('delete', 'delete', validator=validate_workspace_info_no_location) + w.command('delete', 'delete', validator=validate_workspace_info) w.command('list', 'list') - w.show_command('show', validator=validate_workspace_info_no_location) + w.show_command('show', validator=validate_workspace_info) w.command('set', 'set', validator=validate_workspace_info) w.command('clear', 'clear') w.command('quotas', 'quotas', validator=validate_workspace_info) diff --git a/src/quantum/azext_quantum/operations/job.py b/src/quantum/azext_quantum/operations/job.py index adf5dcf8493..a2bed9a417a 100644 --- a/src/quantum/azext_quantum/operations/job.py +++ b/src/quantum/azext_quantum/operations/job.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long,redefined-builtin,bare-except,inconsistent-return-statements,too-many-locals,too-many-branches,too-many-statements +# pylint: disable=line-too-long,redefined-builtin,bare-except,inconsistent-return-statements,too-many-locals,too-many-branches,too-many-statements,unused-argument import json import logging @@ -11,12 +11,14 @@ import uuid import knack.log +from azure.core.exceptions import HttpResponseError from azure.cli.core.azclierror import (FileOperationError, AzureInternalError, InvalidArgumentValueError, AzureResponseError, RequiredArgumentMissingError) from ..vendored_sdks.azure_quantum_python.workspace import Workspace from ..vendored_sdks.azure_quantum_python.storage import upload_blob +from ..vendored_sdks.azure_quantum_python._client.models import Priority from ..vendored_sdks.azure_storage_blob import BlobClient, ContainerClient from .._client_factory import cf_jobs, _get_data_credentials from .._list_helper import repack_response_json @@ -33,6 +35,7 @@ JOB_SUBMIT_DOC_LINK_MSG = "See https://learn.microsoft.com/cli/azure/quantum/job?view=azure-cli-latest#az-quantum-job-submit" ERROR_MSG_INVALID_ORDER_ARGUMENT = "The --order argument is not valid: Specify either asc or desc" ERROR_MSG_MISSING_ORDERBY_ARGUMENT = "The --order argument is not valid without an --orderby argument" +ERROR_MSG_INVALID_PRIORITY_ARGUMENT = f'The "priority" parameter is not valid. Known values are: {", ".join(p.value for p in Priority)}.' JOB_LIST_DOC_LINK_MSG = "See https://learn.microsoft.com/cli/azure/quantum/job?view=azure-cli-latest#az-quantum-job-list" # Job types @@ -43,14 +46,14 @@ knack_logger = knack.log.get_logger(__name__) -def list(cmd, resource_group_name, workspace_name, location, job_type=None, item_type=None, provider_id=None, +def list(cmd, resource_group_name, workspace_name, location=None, job_type=None, item_type=None, provider_id=None, target_id=None, job_status=None, created_after=None, created_before=None, job_name=None, skip=None, top=None, orderby=None, order=None): """ Get the list of jobs in a Quantum Workspace. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_jobs(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) query = _construct_filter_query(job_type, item_type, provider_id, target_id, job_status, created_after, created_before, job_name) orderby_expression = _construct_orderby_expression(orderby, order) @@ -142,13 +145,17 @@ def get(cmd, job_id, resource_group_name=None, workspace_name=None, location=Non """ Get the job's status and details. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) return client.get(job_id) def _has_completed(job): - return job.status in ("Succeeded", "Failed", "Cancelled") + return job.status in ("Succeeded", "Failed", "Cancelled", "Completed") + + +def _has_succeeded(job): + return job.status == "Succeeded" or job.status == "Completed" def _convert_numeric_params(job_params): @@ -166,20 +173,20 @@ def _convert_numeric_params(job_params): pass -def submit(cmd, resource_group_name, workspace_name, location, target_id, job_input_file, job_input_format, +def submit(cmd, resource_group_name, workspace_name, target_id, job_input_file, job_input_format, location=None, job_name=None, shots=None, storage=None, job_params=None, target_capability=None, job_output_format=None, entry_point=None): """ Submit QIR or a pass-through job to run on Azure Quantum. """ # Get workspace, target, and provider information - ws_info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) + ws_info = WorkspaceInfo(cmd, resource_group_name, workspace_name) if ws_info is None: raise AzureInternalError("Failed to get workspace information.") target_info = TargetInfo(cmd, target_id) if target_info is None: raise AzureInternalError("Failed to get target information.") - provider_id = get_provider(cmd, target_info.target_id, resource_group_name, workspace_name, location) + provider_id = get_provider(cmd, target_info.target_id, resource_group_name, workspace_name) if provider_id is None: raise AzureInternalError(f"Failed to find a Provider ID for the specified Target ID, {target_info.target_id}") @@ -205,6 +212,7 @@ def submit(cmd, resource_group_name, workspace_name, location, target_id, job_in # metadata = None tags = [] + priority = None if job_params is not None: if "metadata" in job_params.keys(): metadata = job_params["metadata"] @@ -221,6 +229,15 @@ def submit(cmd, resource_group_name, workspace_name, location, target_id, job_in if not isinstance(tags, list_type): raise InvalidArgumentValueError('The "tags" parameter is not valid.') + if "priority" in job_params.keys(): + priority = job_params["priority"] + del job_params["priority"] + try: + if priority is not None: + priority = Priority(priority).value + except ValueError: + raise InvalidArgumentValueError(ERROR_MSG_INVALID_PRIORITY_ARGUMENT) + # Extract content type and content encoding from --job-parameters, then remove those parameters from job_params, since # they should not be included in the "inputParams" parameter of job_details. Content type and content encoding are # parameters of the upload_blob function. These parameters are accepted in three case-formats: kebab-case, snake_case, @@ -275,7 +292,7 @@ def submit(cmd, resource_group_name, workspace_name, location, target_id, job_in resource_id = "/subscriptions/" + ws_info.subscription + "/resourceGroups/" + ws_info.resource_group + "/providers/Microsoft.Quantum/Workspaces/" + ws_info.name credential = _get_data_credentials(cmd.cli_ctx, ws_info.subscription) - workspace = Workspace(resource_id=resource_id, location=location, credential=credential) + workspace = Workspace(resource_id=resource_id, credential=credential) container_uri = workspace.get_container_uri(job_id=job_id) container_client = ContainerClient.from_container_url(container_uri) @@ -327,7 +344,7 @@ def submit(cmd, resource_group_name, workspace_name, location, target_id, job_in job_params["shots"] = DEFAULT_SHOTS # Submit the job - client = cf_jobs(cmd.cli_ctx, ws_info.subscription, ws_info.location) + client = cf_jobs(cmd.cli_ctx, ws_info.subscription, ws_info.resource_group, ws_info.name, ws_info.endpoint) job_details = {'name': job_name, 'containerUri': container_uri, 'inputDataFormat': job_input_format, @@ -337,20 +354,22 @@ def submit(cmd, resource_group_name, workspace_name, location, target_id, job_in 'target': target_info.target_id, 'metadata': metadata, 'tags': tags} + if priority: + job_details['priority'] = priority knack_logger.warning("Submitting job...") - return client.create_or_replace(ws_info.subscription, ws_info.resource_group, ws_info.name, job_id, job_details).as_dict() + return client.create(ws_info.subscription, ws_info.resource_group, ws_info.name, job_id, job_details).as_dict() -def output(cmd, job_id, resource_group_name, workspace_name, location): +def output(cmd, job_id, resource_group_name, workspace_name, location=None): """ Get the results of running a job. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_jobs(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) job = client.get(info.subscription, info.resource_group, info.name, job_id) - if job.status != "Succeeded": + if not _has_succeeded(job): return job # If "-o table" is specified, this allows transform_output() in commands.py # to format the output, so the error info is shown. If "-o json" or no "-o" # parameter is specified, then the full JSON job output is displayed, being @@ -359,14 +378,14 @@ def output(cmd, job_id, resource_group_name, workspace_name, location): return _get_job_output(job) -def wait(cmd, job_id, resource_group_name, workspace_name, location, max_poll_wait_secs=5): +def wait(cmd, job_id, resource_group_name, workspace_name, location=None, max_poll_wait_secs=5): """ Place the CLI in a waiting state until the job finishes running. """ import time - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_jobs(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) # TODO: LROPoller... wait_indicators_used = False @@ -388,51 +407,56 @@ def wait(cmd, job_id, resource_group_name, workspace_name, location, max_poll_wa return job.as_dict() -def job_show(cmd, job_id, resource_group_name, workspace_name, location): +def job_show(cmd, job_id, resource_group_name, workspace_name, location=None): """ Get the job's status and details. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_jobs(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) job = client.get(info.subscription, info.resource_group, info.name, job_id) return job.as_dict() -def run(cmd, resource_group_name, workspace_name, location, target_id, job_input_file, job_input_format, +def run(cmd, resource_group_name, workspace_name, target_id, job_input_file, job_input_format, location=None, job_name=None, shots=None, storage=None, job_params=None, target_capability=None, job_output_format=None, entry_point=None): """ Submit a job to run on Azure Quantum, and wait for the result. """ - job = submit(cmd, resource_group_name, workspace_name, location, target_id, job_input_file, job_input_format, + job = submit(cmd, resource_group_name, workspace_name, target_id, job_input_file, job_input_format, location, job_name, shots, storage, job_params, target_capability, job_output_format, entry_point) logger.warning("Job id: %s", job["id"]) logger.debug(job) - job = wait(cmd, job["id"], resource_group_name, workspace_name, location) + job = wait(cmd, job["id"], resource_group_name, workspace_name) logger.debug(job) - return output(cmd, job["id"], resource_group_name, workspace_name, location) + return output(cmd, job["id"], resource_group_name, workspace_name) -def cancel(cmd, job_id, resource_group_name, workspace_name, location): +def cancel(cmd, job_id, resource_group_name, workspace_name, location=None): """ Request to cancel a job on Azure Quantum if it hasn't completed. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_jobs(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_jobs(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) job = client.get(info.subscription, info.resource_group, info.name, job_id) if _has_completed(job): print(f"Job {job_id} has already completed with status: {job.status}.") return - # If the job hasn't succeeded or failed, attempt to cancel. - client.delete(info.subscription, info.resource_group, info.name, job_id) # JobOperations.cancel has been replaced with .delete in the updated DP client + try: + client.cancel(info.subscription, info.resource_group, info.name, job_id) + except HttpResponseError as e: + # because of historical behavior of the service, the 204 No Content response is returned when cancellation request is succeeded. + # while backend is not updated to return 200 according to a guideline, let's handle that here to align with typespecs + if e.status_code != 204: + raise # Wait for the job status to complete or be reported as cancelled - return wait(cmd, job_id, info.resource_group, info.name, info.location) + return wait(cmd, job_id, info.resource_group, info.name) def _get_job_output(job): diff --git a/src/quantum/azext_quantum/operations/target.py b/src/quantum/azext_quantum/operations/target.py index bf8be93216c..90fdb284b30 100644 --- a/src/quantum/azext_quantum/operations/target.py +++ b/src/quantum/azext_quantum/operations/target.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long,redefined-builtin +# pylint: disable=line-too-long,redefined-builtin,unused-argument from .._client_factory import cf_providers from .._list_helper import repack_response_json @@ -49,12 +49,12 @@ def set(cmd, target_id): return info -def list(cmd, resource_group_name, workspace_name, location): +def list(cmd, resource_group_name, workspace_name, location=None): """ Get the list of providers and their targets in an Azure Quantum workspace. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_providers(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_providers(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) response = client.list(info.subscription, info.resource_group, info.name) return repack_response_json(response) @@ -79,12 +79,12 @@ def target_show(cmd, target_id): return info -def get_provider(cmd, target_id, resource_group_name, workspace_name, location): +def get_provider(cmd, target_id, resource_group_name, workspace_name): """ Get the the Provider ID for a specific target """ provider_id = None - provider_list = list(cmd, resource_group_name, workspace_name, location) + provider_list = list(cmd, resource_group_name, workspace_name) if provider_list is not None: for item in provider_list: for target_item in item["targets"]: diff --git a/src/quantum/azext_quantum/operations/workspace.py b/src/quantum/azext_quantum/operations/workspace.py index aff8f0c40c1..113afb218f1 100644 --- a/src/quantum/azext_quantum/operations/workspace.py +++ b/src/quantum/azext_quantum/operations/workspace.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -# pylint: disable=line-too-long,redefined-builtin,unnecessary-comprehension, too-many-locals, too-many-statements, too-many-nested-blocks +# pylint: disable=line-too-long,redefined-builtin,unnecessary-comprehension, too-many-locals, too-many-statements, too-many-nested-blocks, unused-argument import os.path import json @@ -48,7 +48,7 @@ class WorkspaceInfo: - def __init__(self, cmd, resource_group_name=None, workspace_name=None, location=None): + def __init__(self, cmd, resource_group_name=None, workspace_name=None, endpoint=None): from azure.cli.core.commands.client_factory import get_subscription_id # Hierarchically selects the value for the given key. @@ -63,22 +63,22 @@ def select_value(key, value): self.subscription = get_subscription_id(cmd.cli_ctx) self.resource_group = select_value('group', resource_group_name) self.name = select_value('workspace', workspace_name) - self.location = select_value('location', location) + self.endpoint = select_value('endpoint', endpoint) def clear(self): self.subscription = '' self.resource_group = '' self.name = '' - self.location = '' + self.endpoint = '' - def save(self, cmd): + def save(self, cmd, endpoint=''): from azure.cli.core.util import ConfiguredDefaultSetter # Save in the global [defaults] section of the .azure\config file with ConfiguredDefaultSetter(cmd.cli_ctx.config, False): cmd.cli_ctx.config.set_value(cmd.cli_ctx.config.defaults_section_name, 'group', self.resource_group) cmd.cli_ctx.config.set_value(cmd.cli_ctx.config.defaults_section_name, 'workspace', self.name) - cmd.cli_ctx.config.set_value(cmd.cli_ctx.config.defaults_section_name, 'location', self.location) + cmd.cli_ctx.config.set_value(cmd.cli_ctx.config.defaults_section_name, 'endpoint', endpoint) def _show_tip(msg): @@ -206,7 +206,7 @@ def create(cmd, resource_group_name, workspace_name, location, storage_account, raise RequiredArgumentMissingError("A quantum workspace requires a valid storage account.") if not location: raise RequiredArgumentMissingError("A location for the new quantum workspace is required.") - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) if not info.resource_group: raise ResourceNotFoundError("Please run 'az quantum workspace set' first to select a default resource group.") quantum_workspace = _get_basic_quantum_workspace(location, info, storage_account) @@ -335,32 +335,32 @@ def get(cmd, resource_group_name=None, workspace_name=None): Get the details of the given (or current) Azure Quantum workspace. """ client = cf_workspaces(cmd.cli_ctx) - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, None) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) if (not info.resource_group) or (not info.name): raise ResourceNotFoundError("Please run 'az quantum workspace set' first to select a default Quantum Workspace.") ws = client.get(info.resource_group, info.name) return ws -def quotas(cmd, resource_group_name, workspace_name, location): +def quotas(cmd, resource_group_name, workspace_name, location=None): """ List the quotas for the given (or current) Azure Quantum workspace. """ - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) - client = cf_quotas(cmd.cli_ctx, info.subscription, info.location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) + client = cf_quotas(cmd.cli_ctx, info.subscription, info.resource_group, info.name, info.endpoint) response = client.list(info.subscription, info.resource_group, info.name) return repack_response_json(response) -def set(cmd, workspace_name, resource_group_name, location): +def set(cmd, workspace_name, resource_group_name, location=None): """ Set the default Azure Quantum workspace. """ client = cf_workspaces(cmd.cli_ctx) - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, location) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) ws = client.get(info.resource_group, info.name) if ws: - info.save(cmd) + info.save(cmd, ws.properties.endpoint_uri) return ws @@ -378,7 +378,7 @@ def list_keys(cmd, resource_group_name=None, workspace_name=None): List Azure Quantum workspace api keys. """ client = cf_workspace(cmd.cli_ctx) - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, None) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) if (not info.resource_group) or (not info.name): raise ResourceNotFoundError("Please run 'az quantum workspace set' first to select a default Quantum Workspace.") @@ -391,7 +391,7 @@ def regenerate_keys(cmd, resource_group_name=None, workspace_name=None, key_type Regenerate Azure Quantum workspace api keys. """ client = cf_workspace(cmd.cli_ctx) - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, None) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) if (not info.resource_group) or (not info.name): raise ResourceNotFoundError("Please run 'az quantum workspace set' first to select a default Quantum Workspace.") @@ -413,7 +413,7 @@ def enable_keys(cmd, resource_group_name=None, workspace_name=None, enable_key=N Update the default Azure Quantum workspace. """ client = cf_workspaces(cmd.cli_ctx) - info = WorkspaceInfo(cmd, resource_group_name, workspace_name, None) + info = WorkspaceInfo(cmd, resource_group_name, workspace_name) if (not info.resource_group) or (not info.name): raise ResourceNotFoundError("Please run 'az quantum workspace set' first to select a default Quantum Workspace.") @@ -426,7 +426,8 @@ def enable_keys(cmd, resource_group_name=None, workspace_name=None, enable_key=N ws.properties.api_key_enabled = True elif (enable_key in ["False", "false"]): ws.properties.api_key_enabled = False - ws = client.begin_create_or_update(info.resource_group, info.name, ws) - if ws: - info.save(cmd) + lropoller = client.begin_create_or_update(info.resource_group, info.name, ws) + if lropoller: + ws = lropoller.result() + info.save(cmd, ws.properties.endpoint_uri) return ws diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_client.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_client.py deleted file mode 100644 index 2bd9c456a8a..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_client.py +++ /dev/null @@ -1,141 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from copy import deepcopy -from typing import Any, TYPE_CHECKING, Union -from typing_extensions import Self - -from azure.core import PipelineClient -from azure.core.credentials import AzureKeyCredential -from azure.core.pipeline import policies -from azure.core.rest import HttpRequest, HttpResponse - -from ._configuration import ServicesClientConfiguration -from ._serialization import Deserializer, Serializer -from .operations import ( - JobsOperations, - ProvidersOperations, - QuotasOperations, - SessionsOperations, - StorageOperations, - TopLevelItemsOperations, -) - -if TYPE_CHECKING: - from azure.core.credentials import TokenCredential - - -class ServicesClient: - """Azure Quantum Workspace Services. - - :ivar jobs: JobsOperations operations - :vartype jobs: azure.quantum.operations.JobsOperations - :ivar sessions: SessionsOperations operations - :vartype sessions: azure.quantum.operations.SessionsOperations - :ivar providers: ProvidersOperations operations - :vartype providers: azure.quantum.operations.ProvidersOperations - :ivar storage: StorageOperations operations - :vartype storage: azure.quantum.operations.StorageOperations - :ivar quotas: QuotasOperations operations - :vartype quotas: azure.quantum.operations.QuotasOperations - :ivar top_level_items: TopLevelItemsOperations operations - :vartype top_level_items: azure.quantum.operations.TopLevelItemsOperations - :param region: The Azure region where the Azure Quantum Workspace is located. Required. - :type region: str - :param credential: Credential used to authenticate requests to the service. Is either a - TokenCredential type or a AzureKeyCredential type. Required. - :type credential: ~azure.core.credentials.TokenCredential or - ~azure.core.credentials.AzureKeyCredential - :keyword service_base_url: The Azure Quantum service base url. Default value is - "quantum.azure.com". - :paramtype service_base_url: str - :keyword api_version: The API version to use for this operation. Default value is - "2024-10-01-preview". Note that overriding this default value may result in unsupported - behavior. - :paramtype api_version: str - """ - - def __init__( - self, - region: str, - credential: Union["TokenCredential", AzureKeyCredential], - *, - service_base_url: str = "quantum.azure.com", - **kwargs: Any - ) -> None: - _endpoint = "https://{region}.{serviceBaseUrl}" - self._config = ServicesClientConfiguration( - region=region, credential=credential, service_base_url=service_base_url, **kwargs - ) - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) - - self._serialize = Serializer() - self._deserialize = Deserializer() - self._serialize.client_side_validation = False - self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.sessions = SessionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.providers = ProvidersOperations(self._client, self._config, self._serialize, self._deserialize) - self.storage = StorageOperations(self._client, self._config, self._serialize, self._deserialize) - self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) - self.top_level_items = TopLevelItemsOperations(self._client, self._config, self._serialize, self._deserialize) - - def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: - """Runs the network request through the client's chained policies. - - >>> from azure.core.rest import HttpRequest - >>> request = HttpRequest("GET", "https://www.example.org/") - - >>> response = client.send_request(request) - - - For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request - - :param request: The network request you want to make. Required. - :type request: ~azure.core.rest.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to False. - :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.rest.HttpResponse - """ - - request_copy = deepcopy(request) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - - request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) - return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore - - def close(self) -> None: - self._client.close() - - def __enter__(self) -> Self: - self._client.__enter__() - return self - - def __exit__(self, *exc_details: Any) -> None: - self._client.__exit__(*exc_details) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/__init__.py deleted file mode 100644 index 6f076e508af..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/__init__.py +++ /dev/null @@ -1,76 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - - -from ._models import ( # type: ignore - BlobDetails, - CostEstimate, - InnerError, - ItemDetails, - JobDetails, - JsonPatchObject, - ProviderStatus, - QuantumComputingData, - Quota, - SasUriResponse, - SessionDetails, - TargetStatus, - UsageEvent, - WorkspaceItemError, -) - -from ._enums import ( # type: ignore - DimensionScope, - ItemType, - JobStatus, - JobType, - JsonPatchOperation, - MeterPeriod, - ProviderAvailability, - SessionJobFailurePolicy, - SessionStatus, - TargetAvailability, -) -from ._patch import __all__ as _patch_all -from ._patch import * -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "BlobDetails", - "CostEstimate", - "InnerError", - "ItemDetails", - "JobDetails", - "JsonPatchObject", - "ProviderStatus", - "QuantumComputingData", - "Quota", - "SasUriResponse", - "SessionDetails", - "TargetStatus", - "UsageEvent", - "WorkspaceItemError", - "DimensionScope", - "ItemType", - "JobStatus", - "JobType", - "JsonPatchOperation", - "MeterPeriod", - "ProviderAvailability", - "SessionJobFailurePolicy", - "SessionStatus", - "TargetAvailability", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore -_patch_sdk() diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_enums.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_enums.py deleted file mode 100644 index ed37eaf1804..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_enums.py +++ /dev/null @@ -1,130 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from enum import Enum -from azure.core import CaseInsensitiveEnumMeta - - -class DimensionScope(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The scope at which the quota is applied to.""" - - WORKSPACE = "Workspace" - """The quota is applied to the Quantum Workspace.""" - SUBSCRIPTION = "Subscription" - """The quota is applied to the Azure Subscription.""" - - -class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of the workspace item.""" - - JOB = "Job" - """A program, problem, or application submitted for processing.""" - SESSION = "Session" - """A logical grouping of jobs.""" - - -class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the job.""" - - WAITING = "Waiting" - """The job is waiting in the queue to be executed.""" - EXECUTING = "Executing" - """The job is being executed.""" - SUCCEEDED = "Succeeded" - """The job completed with success.""" - FAILED = "Failed" - """The job completed with failure.""" - CANCELLED = "Cancelled" - """The job was cancelled.""" - - -class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of the job.""" - - UNKNOWN = "Unknown" - """Unknown job type.""" - QUANTUM_COMPUTING = "QuantumComputing" - """Quantum Computing job type.""" - OPTIMIZATION = "Optimization" - """Optimization job type.""" - - -class JsonPatchOperation(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The operation to be performed.""" - - ADD = "add" - """Add value operation.""" - REMOVE = "remove" - """Remove value operation.""" - REPLACE = "replace" - """Replace value operation.""" - MOVE = "move" - """Move value operation.""" - COPY = "copy" - """Copy value operation.""" - TEST = "test" - """Test value operation.""" - - -class MeterPeriod(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The time period in which the quota's underlying meter is accumulated. Based on calendar year. - 'None' is used for concurrent quotas. - """ - - NONE = "None" - """The meter period is instantaneous. Used for concurrent quotas.""" - MONTHLY = "Monthly" - """The meter period is per month.""" - - -class ProviderAvailability(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Provider availability.""" - - AVAILABLE = "Available" - """Provider is available.""" - DEGRADED = "Degraded" - """Provider is available with degraded experience.""" - UNAVAILABLE = "Unavailable" - """Provider is unavailable.""" - - -class SessionJobFailurePolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Policy controlling the behavior of the Session when a job in the session fails.""" - - ABORT = "Abort" - """New jobs submitted after a job fails will be rejected.""" - CONTINUE = "Continue" - """New jobs submitted after a job fails will be accepted.""" - - -class SessionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The status of the session.""" - - WAITING = "Waiting" - """The session is waiting in the queue to be executed.""" - EXECUTING = "Executing" - """The session is being executed.""" - SUCCEEDED = "Succeeded" - """The session completed with success.""" - FAILED = "Failed" - """The session completed with failure.""" - FAILURE_S_ = "Failure(s)" - """The session completed with some failures.""" - TIMED_OUT = "TimedOut" - """The session timed out.""" - - -class TargetAvailability(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Target availability.""" - - AVAILABLE = "Available" - """Target is available.""" - DEGRADED = "Degraded" - """Target is available with degraded experience.""" - UNAVAILABLE = "Unavailable" - """Target is unavailable.""" diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_models.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_models.py deleted file mode 100644 index 64e64c5f8e1..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_models.py +++ /dev/null @@ -1,692 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=useless-super-delegation - -import datetime -from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload - -from azure.core.exceptions import ODataV4Format - -from .. import _model_base -from .._model_base import rest_discriminator, rest_field -from ._enums import ItemType - -if TYPE_CHECKING: - from .. import models as _models - - -class BlobDetails(_model_base.Model): - """The details (name and container) of the blob to store or download data. - - All required parameters must be populated in order to send to server. - - :ivar container_name: The container name. Required. - :vartype container_name: str - :ivar blob_name: The blob name. - :vartype blob_name: str - """ - - container_name: str = rest_field(name="containerName", visibility=["read", "create"]) - """The container name. Required.""" - blob_name: Optional[str] = rest_field(name="blobName", visibility=["read", "create"]) - """The blob name.""" - - @overload - def __init__( - self, - *, - container_name: str, - blob_name: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class CostEstimate(_model_base.Model): - """The job cost billed by the provider. The final cost on your bill might be slightly different - due to added taxes and currency conversion rates. - - - :ivar currency_code: The currency code. Required. - :vartype currency_code: str - :ivar events: List of usage events. - :vartype events: list[~azure.quantum.models.UsageEvent] - :ivar estimated_total: The estimated total. Required. - :vartype estimated_total: float - """ - - currency_code: str = rest_field(name="currencyCode") - """The currency code. Required.""" - events: Optional[List["_models.UsageEvent"]] = rest_field() - """List of usage events.""" - estimated_total: float = rest_field(name="estimatedTotal") - """The estimated total. Required.""" - - @overload - def __init__( - self, - *, - currency_code: str, - estimated_total: float, - events: Optional[List["_models.UsageEvent"]] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class InnerError(_model_base.Model): - """An object containing more specific information about the error. As per Microsoft One API - guidelines - - https://github.com/Microsoft/api-guidelines/blob/vNext/Guidelines.md#7102-error-condition-responses. - - :ivar code: One of a server-defined set of error codes. - :vartype code: str - :ivar innererror: Inner error. - :vartype innererror: ~azure.quantum.models.InnerError - """ - - code: Optional[str] = rest_field() - """One of a server-defined set of error codes.""" - innererror: Optional["_models.InnerError"] = rest_field() - """Inner error.""" - - @overload - def __init__( - self, - *, - code: Optional[str] = None, - innererror: Optional["_models.InnerError"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ItemDetails(_model_base.Model): - """A workspace item. - - You probably want to use the sub-classes and not this class directly. Known sub-classes are: - JobDetails, SessionDetails - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar id: Id of the item. Required. - :vartype id: str - :ivar name: The name of the item. It is not required for the name to be unique and it's only - used for display purposes. Required. - :vartype name: str - :ivar provider_id: The unique identifier for the provider. Required. - :vartype provider_id: str - :ivar target: The target identifier to run the job. Required. - :vartype target: str - :ivar item_type: Type of the Quantum Workspace item. Required. Known values are: "Job" and - "Session". - :vartype item_type: str or ~azure.quantum.models.ItemType - :ivar creation_time: The creation time of the item. - :vartype creation_time: ~datetime.datetime - :ivar begin_execution_time: The time when the item began execution. - :vartype begin_execution_time: ~datetime.datetime - :ivar end_execution_time: The time when the item finished execution. - :vartype end_execution_time: ~datetime.datetime - :ivar cost_estimate: Cost estimate. - :vartype cost_estimate: ~azure.quantum.models.CostEstimate - :ivar error_data: Error information. - :vartype error_data: ~azure.quantum.models.WorkspaceItemError - """ - - __mapping__: Dict[str, _model_base.Model] = {} - id: str = rest_field(visibility=["read"]) - """Id of the item. Required.""" - name: str = rest_field(visibility=["read", "create", "update"]) - """The name of the item. It is not required for the name to be unique and it's only used for - display purposes. Required.""" - provider_id: str = rest_field(name="providerId", visibility=["read", "create"]) - """The unique identifier for the provider. Required.""" - target: str = rest_field(visibility=["read", "create"]) - """The target identifier to run the job. Required.""" - item_type: str = rest_discriminator(name="itemType", visibility=["read", "create"]) - """Type of the Quantum Workspace item. Required. Known values are: \"Job\" and \"Session\".""" - creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", visibility=["read"], format="rfc3339") - """The creation time of the item.""" - begin_execution_time: Optional[datetime.datetime] = rest_field( - name="beginExecutionTime", visibility=["read"], format="rfc3339" - ) - """The time when the item began execution.""" - end_execution_time: Optional[datetime.datetime] = rest_field( - name="endExecutionTime", visibility=["read"], format="rfc3339" - ) - """The time when the item finished execution.""" - cost_estimate: Optional["_models.CostEstimate"] = rest_field(name="costEstimate", visibility=["read"]) - """Cost estimate.""" - error_data: Optional["_models.WorkspaceItemError"] = rest_field(name="errorData", visibility=["read"]) - """Error information.""" - - @overload - def __init__( - self, - *, - name: str, - provider_id: str, - target: str, - item_type: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class JobDetails(ItemDetails, discriminator="Job"): - """A job to be run in the workspace. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar name: The name of the item. It is not required for the name to be unique and it's only - used for display purposes. Required. - :vartype name: str - :ivar provider_id: The unique identifier for the provider. Required. - :vartype provider_id: str - :ivar target: The target identifier to run the job. Required. - :vartype target: str - :ivar creation_time: The creation time of the item. - :vartype creation_time: ~datetime.datetime - :ivar begin_execution_time: The time when the item began execution. - :vartype begin_execution_time: ~datetime.datetime - :ivar end_execution_time: The time when the item finished execution. - :vartype end_execution_time: ~datetime.datetime - :ivar cost_estimate: Cost estimate. - :vartype cost_estimate: ~azure.quantum.models.CostEstimate - :ivar error_data: Error information. - :vartype error_data: ~azure.quantum.models.WorkspaceItemError - :ivar id: Id of the job. Required. - :vartype id: str - :ivar item_type: Type of the Quantum Workspace item is Job. Required. A program, problem, or - application submitted for processing. - :vartype item_type: str or ~azure.quantum.models.JOB - :ivar job_type: The type of job. Known values are: "Unknown", "QuantumComputing", and - "Optimization". - :vartype job_type: str or ~azure.quantum.models.JobType - :ivar session_id: The ID of the session that the job is part of. - :vartype session_id: str - :ivar container_uri: The blob container SAS uri, the container is used to host job data. - Required. - :vartype container_uri: str - :ivar input_data_uri: The input blob URI, if specified, it will override the default input blob - in the container. - :vartype input_data_uri: str - :ivar input_data_format: The format of the input data. - :vartype input_data_format: str - :ivar status: The status of the job. Known values are: "Waiting", "Executing", "Succeeded", - "Failed", and "Cancelled". - :vartype status: str or ~azure.quantum.models.JobStatus - :ivar metadata: The job metadata. Metadata provides client the ability to store client-specific - information. - :vartype metadata: any - :ivar cancellation_time: The time when a job was successfully cancelled. - :vartype cancellation_time: ~datetime.datetime - :ivar tags: List of user-supplied tags associated with the job. - :vartype tags: list[str] - :ivar quantum_computing_data: Quantum computing data. - :vartype quantum_computing_data: ~azure.quantum.models.QuantumComputingData - :ivar input_params: The input parameters for the job. JSON object used by the target solver. It - is expected that the size of this object is small and only used to specify parameters for the - execution target, not the input data. - :vartype input_params: any - :ivar output_data_uri: The output blob uri. When a job finishes successfully, results will be - uploaded to this blob. - :vartype output_data_uri: str - :ivar output_data_format: The format of the output data. - :vartype output_data_format: str - """ - - item_type: Literal[ItemType.JOB] = rest_discriminator(name="itemType", visibility=["read", "create"]) # type: ignore # pylint: disable=line-too-long - """Type of the Quantum Workspace item is Job. Required. A program, problem, or application - submitted for processing.""" - job_type: Optional[Union[str, "_models.JobType"]] = rest_field(name="jobType", visibility=["read", "create"]) - """The type of job. Known values are: \"Unknown\", \"QuantumComputing\", and \"Optimization\".""" - session_id: Optional[str] = rest_field(name="sessionId", visibility=["read", "create"]) - """The ID of the session that the job is part of.""" - container_uri: str = rest_field(name="containerUri", visibility=["read", "create"]) - """The blob container SAS uri, the container is used to host job data. Required.""" - input_data_uri: Optional[str] = rest_field(name="inputDataUri", visibility=["read", "create"]) - """The input blob URI, if specified, it will override the default input blob in the container.""" - input_data_format: Optional[str] = rest_field(name="inputDataFormat", visibility=["read", "create"]) - """The format of the input data.""" - status: Optional[Union[str, "_models.JobStatus"]] = rest_field(visibility=["read"]) - """The status of the job. Known values are: \"Waiting\", \"Executing\", \"Succeeded\", \"Failed\", - and \"Cancelled\".""" - metadata: Optional[Any] = rest_field(visibility=["read", "create", "update"]) - """The job metadata. Metadata provides client the ability to store client-specific information.""" - cancellation_time: Optional[datetime.datetime] = rest_field( - name="cancellationTime", visibility=["read"], format="rfc3339" - ) - """The time when a job was successfully cancelled.""" - tags: Optional[List[str]] = rest_field(visibility=["read", "create", "update"]) - """List of user-supplied tags associated with the job.""" - quantum_computing_data: Optional["_models.QuantumComputingData"] = rest_field( - name="quantumComputingData", visibility=["read"] - ) - """Quantum computing data.""" - input_params: Optional[Any] = rest_field(name="inputParams", visibility=["read", "create"]) - """The input parameters for the job. JSON object used by the target solver. It is expected that - the size of this object is small and only used to specify parameters for the execution target, - not the input data.""" - output_data_uri: Optional[str] = rest_field(name="outputDataUri", visibility=["read", "create"]) - """The output blob uri. When a job finishes successfully, results will be uploaded to this blob.""" - output_data_format: Optional[str] = rest_field(name="outputDataFormat", visibility=["read", "create"]) - """The format of the output data.""" - - @overload - def __init__( - self, - *, - name: str, - provider_id: str, - target: str, - container_uri: str, - job_type: Optional[Union[str, "_models.JobType"]] = None, - session_id: Optional[str] = None, - input_data_uri: Optional[str] = None, - input_data_format: Optional[str] = None, - metadata: Optional[Any] = None, - tags: Optional[List[str]] = None, - input_params: Optional[Any] = None, - output_data_uri: Optional[str] = None, - output_data_format: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, item_type=ItemType.JOB, **kwargs) - - -class JsonPatchObject(_model_base.Model): - """A JSONPatch object as defined by RFC 6902. - - All required parameters must be populated in order to send to server. - - :ivar operation: The operation to be performed. Required. Known values are: "add", "remove", - "replace", "move", "copy", and "test". - :vartype operation: str or ~azure.quantum.models.JsonPatchOperation - :ivar path: A JSON-Pointer. Required. - :vartype path: str - :ivar value: A value to be used in the operation on the path. - :vartype value: any - :ivar from_property: Optional field used in copy and move operations. - :vartype from_property: str - """ - - operation: Union[str, "_models.JsonPatchOperation"] = rest_field(name="op", visibility=["create"]) - """The operation to be performed. Required. Known values are: \"add\", \"remove\", \"replace\", - \"move\", \"copy\", and \"test\".""" - path: str = rest_field(visibility=["create"]) - """A JSON-Pointer. Required.""" - value: Optional[Any] = rest_field(visibility=["create"]) - """A value to be used in the operation on the path.""" - from_property: Optional[str] = rest_field(name="from", visibility=["create"]) - """Optional field used in copy and move operations.""" - - @overload - def __init__( - self, - *, - operation: Union[str, "_models.JsonPatchOperation"], - path: str, - value: Optional[Any] = None, - from_property: Optional[str] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class ProviderStatus(_model_base.Model): - """Provider status. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar id: Provider id. Required. - :vartype id: str - :ivar current_availability: Current provider availability. Required. Known values are: - "Available", "Degraded", and "Unavailable". - :vartype current_availability: str or ~azure.quantum.models.ProviderAvailability - :ivar targets: Current target statuses. Required. - :vartype targets: list[~azure.quantum.models.TargetStatus] - """ - - id: str = rest_field(visibility=["read"]) - """Provider id. Required.""" - current_availability: Union[str, "_models.ProviderAvailability"] = rest_field( - name="currentAvailability", visibility=["read"] - ) - """Current provider availability. Required. Known values are: \"Available\", \"Degraded\", and - \"Unavailable\".""" - targets: List["_models.TargetStatus"] = rest_field(visibility=["read"]) - """Current target statuses. Required.""" - - -class QuantumComputingData(_model_base.Model): - """Quantum computing data. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar count: The number of quantum computing items in the job. Required. - :vartype count: int - """ - - count: int = rest_field(visibility=["read"]) - """The number of quantum computing items in the job. Required.""" - - -class Quota(_model_base.Model): - """Quota information. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar dimension: The name of the dimension associated with the quota. Required. - :vartype dimension: str - :ivar scope: The scope at which the quota is applied. Required. Known values are: "Workspace" - and "Subscription". - :vartype scope: str or ~azure.quantum.models.DimensionScope - :ivar provider_id: The unique identifier for the provider. Required. - :vartype provider_id: str - :ivar utilization: The amount of the usage that has been applied for the current period. - Required. - :vartype utilization: float - :ivar holds: The amount of the usage that has been reserved but not applied for the current - period. Required. - :vartype holds: float - :ivar limit: The maximum amount of usage allowed for the current period. Required. - :vartype limit: float - :ivar period: The time period in which the quota's underlying meter is accumulated. Based on - calendar year. 'None' is used for concurrent quotas. Required. Known values are: "None" and - "Monthly". - :vartype period: str or ~azure.quantum.models.MeterPeriod - """ - - dimension: str = rest_field(visibility=["read"]) - """The name of the dimension associated with the quota. Required.""" - scope: Union[str, "_models.DimensionScope"] = rest_field(visibility=["read"]) - """The scope at which the quota is applied. Required. Known values are: \"Workspace\" and - \"Subscription\".""" - provider_id: str = rest_field(name="providerId", visibility=["read"]) - """The unique identifier for the provider. Required.""" - utilization: float = rest_field(visibility=["read"]) - """The amount of the usage that has been applied for the current period. Required.""" - holds: float = rest_field(visibility=["read"]) - """The amount of the usage that has been reserved but not applied for the current period. - Required.""" - limit: float = rest_field(visibility=["read"]) - """The maximum amount of usage allowed for the current period. Required.""" - period: Union[str, "_models.MeterPeriod"] = rest_field(visibility=["read"]) - """The time period in which the quota's underlying meter is accumulated. Based on calendar year. - 'None' is used for concurrent quotas. Required. Known values are: \"None\" and \"Monthly\".""" - - -class SasUriResponse(_model_base.Model): - """SAS URI operation response. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar sas_uri: A URL with a SAS token to upload a blob for execution in the given workspace. - Required. - :vartype sas_uri: str - """ - - sas_uri: str = rest_field(name="sasUri", visibility=["read"]) - """A URL with a SAS token to upload a blob for execution in the given workspace. Required.""" - - -class SessionDetails(ItemDetails, discriminator="Session"): - """Session, a logical grouping of jobs. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar name: The name of the item. It is not required for the name to be unique and it's only - used for display purposes. Required. - :vartype name: str - :ivar provider_id: The unique identifier for the provider. Required. - :vartype provider_id: str - :ivar target: The target identifier to run the job. Required. - :vartype target: str - :ivar creation_time: The creation time of the item. - :vartype creation_time: ~datetime.datetime - :ivar begin_execution_time: The time when the item began execution. - :vartype begin_execution_time: ~datetime.datetime - :ivar end_execution_time: The time when the item finished execution. - :vartype end_execution_time: ~datetime.datetime - :ivar cost_estimate: Cost estimate. - :vartype cost_estimate: ~azure.quantum.models.CostEstimate - :ivar error_data: Error information. - :vartype error_data: ~azure.quantum.models.WorkspaceItemError - :ivar id: Id of the session. Required. - :vartype id: str - :ivar item_type: Type of the Quantum Workspace item is Session. Required. A logical grouping of - jobs. - :vartype item_type: str or ~azure.quantum.models.SESSION - :ivar job_failure_policy: Policy controlling the behavior of the Session when a job in the - session fails. Required. Known values are: "Abort" and "Continue". - :vartype job_failure_policy: str or ~azure.quantum.models.SessionJobFailurePolicy - :ivar status: The status of the session. Known values are: "Waiting", "Executing", "Succeeded", - "Failed", "Failure(s)", and "TimedOut". - :vartype status: str or ~azure.quantum.models.SessionStatus - """ - - item_type: Literal[ItemType.SESSION] = rest_discriminator(name="itemType", visibility=["read", "create"]) # type: ignore # pylint: disable=line-too-long - """Type of the Quantum Workspace item is Session. Required. A logical grouping of jobs.""" - job_failure_policy: Union[str, "_models.SessionJobFailurePolicy"] = rest_field( - name="jobFailurePolicy", visibility=["read", "create"] - ) - """Policy controlling the behavior of the Session when a job in the session fails. Required. Known - values are: \"Abort\" and \"Continue\".""" - status: Optional[Union[str, "_models.SessionStatus"]] = rest_field(visibility=["read"]) - """The status of the session. Known values are: \"Waiting\", \"Executing\", \"Succeeded\", - \"Failed\", \"Failure(s)\", and \"TimedOut\".""" - - @overload - def __init__( - self, - *, - name: str, - provider_id: str, - target: str, - job_failure_policy: Union[str, "_models.SessionJobFailurePolicy"], - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, item_type=ItemType.SESSION, **kwargs) - - -class TargetStatus(_model_base.Model): - """Target status. - - Readonly variables are only populated by the server, and will be ignored when sending a request. - - - :ivar id: Target id. Required. - :vartype id: str - :ivar current_availability: Current target availability. Required. Known values are: - "Available", "Degraded", and "Unavailable". - :vartype current_availability: str or ~azure.quantum.models.TargetAvailability - :ivar average_queue_time: Average queue time in seconds. Required. - :vartype average_queue_time: int - :ivar status_page: A page with detailed status of the provider. - :vartype status_page: str - """ - - id: str = rest_field(visibility=["read"]) - """Target id. Required.""" - current_availability: Union[str, "_models.TargetAvailability"] = rest_field( - name="currentAvailability", visibility=["read"] - ) - """Current target availability. Required. Known values are: \"Available\", \"Degraded\", and - \"Unavailable\".""" - average_queue_time: int = rest_field(name="averageQueueTime", visibility=["read"]) - """Average queue time in seconds. Required.""" - status_page: Optional[str] = rest_field(name="statusPage", visibility=["read"]) - """A page with detailed status of the provider.""" - - -class UsageEvent(_model_base.Model): - """Usage event details. - - - :ivar dimension_id: The dimension id. Required. - :vartype dimension_id: str - :ivar dimension_name: The dimension name. Required. - :vartype dimension_name: str - :ivar measure_unit: The unit of measure. Required. - :vartype measure_unit: str - :ivar amount_billed: The amount billed. Required. - :vartype amount_billed: float - :ivar amount_consumed: The amount consumed. Required. - :vartype amount_consumed: float - :ivar unit_price: The unit price. Required. - :vartype unit_price: float - """ - - dimension_id: str = rest_field(name="dimensionId") - """The dimension id. Required.""" - dimension_name: str = rest_field(name="dimensionName") - """The dimension name. Required.""" - measure_unit: str = rest_field(name="measureUnit") - """The unit of measure. Required.""" - amount_billed: float = rest_field(name="amountBilled") - """The amount billed. Required.""" - amount_consumed: float = rest_field(name="amountConsumed") - """The amount consumed. Required.""" - unit_price: float = rest_field(name="unitPrice") - """The unit price. Required.""" - - @overload - def __init__( - self, - *, - dimension_id: str, - dimension_name: str, - measure_unit: str, - amount_billed: float, - amount_consumed: float, - unit_price: float, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - -class WorkspaceItemError(_model_base.Model): - """The error object. - - - :ivar code: One of a server-defined set of error codes. Required. - :vartype code: str - :ivar message: A human-readable representation of the error. Required. - :vartype message: str - :ivar target: The target of the error. - :vartype target: str - :ivar details: An array of details about specific errors that led to this reported error. - :vartype details: list[~azure.core.ODataV4Format] - :ivar innererror: An object containing more specific information than the current object about - the error. - :vartype innererror: ~azure.quantum.models.InnerError - """ - - code: str = rest_field() - """One of a server-defined set of error codes. Required.""" - message: str = rest_field() - """A human-readable representation of the error. Required.""" - target: Optional[str] = rest_field() - """The target of the error.""" - details: Optional[List[ODataV4Format]] = rest_field() - """An array of details about specific errors that led to this reported error.""" - innererror: Optional["_models.InnerError"] = rest_field() - """An object containing more specific information than the current object about the error.""" - - @overload - def __init__( - self, - *, - code: str, - message: str, - target: Optional[str] = None, - details: Optional[List[ODataV4Format]] = None, - innererror: Optional["_models.InnerError"] = None, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_patch.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_patch.py deleted file mode 100644 index f7dd3251033..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/models/_patch.py +++ /dev/null @@ -1,20 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -"""Customize generated code here. - -Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize -""" -from typing import List - -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level - - -def patch_sdk(): - """Do not remove from this file. - - `patch_sdk` is a last resort escape hatch that allows you to do customizations - you can't accomplish using the techniques described in - https://aka.ms/azsdk/python/dpcodegen/python/customize - """ diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/py.typed b/src/quantum/azext_quantum/vendored_sdks/azure_quantum/py.typed deleted file mode 100644 index e5aff4f83af..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561. \ No newline at end of file diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/__init__.py deleted file mode 100644 index 84646caa202..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# coding=utf-8 -## -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -## - -from ._chained import * -from ._default import _DefaultAzureCredential -from ._token import _TokenFileCredential diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_chained.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_chained.py deleted file mode 100644 index aae945fdd04..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_chained.py +++ /dev/null @@ -1,120 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import logging - -import sys -from azure.core.exceptions import ClientAuthenticationError -# from azure.identity import CredentialUnavailableError -from ...azure_identity import CredentialUnavailableError -from azure.core.credentials import AccessToken, TokenCredential - - -_LOGGER = logging.getLogger(__name__) - - - -def filter_credential_warnings(record): - """Suppress warnings from credentials other than DefaultAzureCredential""" - if record.levelno == logging.WARNING: - message = record.getMessage() - return "DefaultAzureCredential" in message - return True - - -def _get_error_message(history): - attempts = [] - for credential, error in history: - if error: - attempts.append(f"{credential.__class__.__name__}: {error}") - else: - attempts.append(credential.__class__.__name__) - return """ -Attempted credentials:\n\t{}""".format( - "\n\t".join(attempts) - ) - - -class _ChainedTokenCredential(object): - """ - Based on Azure.Identity.ChainedTokenCredential from: - https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/identity/azure-identity/azure/identity/_credentials/chained.py - - The key difference is that we don't stop attempting all credentials - if some of then failed or raised an exception. - We also don't log a warning unless all credential attempts have failed. - """ - - def __init__(self, *credentials: TokenCredential): - self._successful_credential = None - self.credentials = credentials - - def get_token(self, *scopes: str, **kwargs) -> AccessToken: # pylint:disable=unused-argument - """ - Request a token from each chained credential, in order, - returning the first token received. - This method is called automatically by Azure SDK clients. - - :param str scopes: desired scopes for the access token. - This method requires at least one scope. - - :raises ~azure.core.exceptions.ClientAuthenticationError: - no credential in the chain provided a token - """ - history = [] - - # Suppress warnings from credentials in Azure.Identity - azure_identity_logger = logging.getLogger("azure.identity") - handler = logging.StreamHandler(stream=sys.stdout) - handler.addFilter(filter_credential_warnings) - azure_identity_logger.addHandler(handler) - try: - for credential in self.credentials: - try: - token = credential.get_token(*scopes, **kwargs) - _LOGGER.info( - "%s acquired a token from %s", - self.__class__.__name__, - credential.__class__.__name__, - ) - self._successful_credential = credential - return token - except CredentialUnavailableError as ex: - # credential didn't attempt authentication because - # it lacks required data or state -> continue - history.append((credential, ex.message)) - _LOGGER.info( - "%s - %s is unavailable", - self.__class__.__name__, - credential.__class__.__name__, - ) - except Exception as ex: # pylint: disable=broad-except - # credential failed to authenticate, - # or something unexpectedly raised -> break - history.append((credential, str(ex))) - # instead of logging a warning, we just want to log an info - # since other credentials might succeed - _LOGGER.info( - '%s.get_token failed: %s raised unexpected error "%s"', - self.__class__.__name__, - credential.__class__.__name__, - ex, - exc_info=_LOGGER.isEnabledFor(logging.DEBUG), - ) - # here we do NOT want break and - # will continue to try other credentials - - finally: - # Re-enable warnings from credentials in Azure.Identity - azure_identity_logger.removeHandler(handler) - - # if all attempts failed, only then we log a warning and raise an error - attempts = _get_error_message(history) - message = ( - self.__class__.__name__ - + " failed to retrieve a token from the included credentials." - + attempts - ) - _LOGGER.warning(message) - raise ClientAuthenticationError(message=message) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_default.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_default.py deleted file mode 100644 index 24fa9cb6bbb..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_default.py +++ /dev/null @@ -1,155 +0,0 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ -import logging -import re -from typing import Optional -import urllib3 -from azure.core.credentials import AccessToken -# from azure.identity import ( -from ...azure_identity import ( - AzurePowerShellCredential, - EnvironmentCredential, - ManagedIdentityCredential, - AzureCliCredential, - VisualStudioCodeCredential, - InteractiveBrowserCredential, - DeviceCodeCredential, - _internal as AzureIdentityInternals, -) -from ._chained import _ChainedTokenCredential -from ._token import _TokenFileCredential -# from azure.quantum._constants import ConnectionConstants -from .._constants import ConnectionConstants - -_LOGGER = logging.getLogger(__name__) -WWW_AUTHENTICATE_REGEX = re.compile( - r""" - ^ - Bearer\sauthorization_uri=" - https://(?P[^/]*)/ - (?P[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}) - " - """, - re.VERBOSE | re.IGNORECASE) -WWW_AUTHENTICATE_HEADER_NAME = "WWW-Authenticate" - - -class _DefaultAzureCredential(_ChainedTokenCredential): - """ - Based on Azure.Identity.DefaultAzureCredential from: - https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/identity/azure-identity/azure/identity/_credentials/default.py - - The three key differences are: - 1) Inherit from _ChainedTokenCredential, which has - more aggressive error handling than ChainedTokenCredential - 2) Instantiate the internal credentials the first time the get_token gets called - such that we can get the tenant_id if it was not passed by the user (but we don't - want to do that in the constructor). - We automatically identify the user's tenant_id for a given subscription - so that users with MSA accounts don't need to pass it. - This is a mitigation for bug https://github.com/Azure/azure-sdk-for-python/issues/18975 - We need the following parameters to enable auto-detection of tenant_id - - subscription_id - - arm_endpoint (defaults to the production url "https://management.azure.com/") - 3) Add custom TokenFileCredential as first method to attempt, - which will look for a local access token. - """ - def __init__( - self, - arm_endpoint: str, - subscription_id: str, - client_id: Optional[str] = None, - tenant_id: Optional[str] = None, - authority: Optional[str] = None, - ): - if arm_endpoint is None: - raise ValueError("arm_endpoint is mandatory parameter") - if subscription_id is None: - raise ValueError("subscription_id is mandatory parameter") - - self.authority = self._authority_or_default( - authority=authority, - arm_endpoint=arm_endpoint) - self.tenant_id = tenant_id - self.subscription_id = subscription_id - self.arm_endpoint = arm_endpoint - self.client_id = client_id - # credentials will be created lazy on the first call to get_token - super(_DefaultAzureCredential, self).__init__() - - def _authority_or_default(self, authority: str, arm_endpoint: str): - if authority: - return AzureIdentityInternals.normalize_authority(authority) - if arm_endpoint == ConnectionConstants.ARM_DOGFOOD_ENDPOINT: - return ConnectionConstants.DOGFOOD_AUTHORITY - return ConnectionConstants.AUTHORITY - - def _initialize_credentials(self): - self._discover_tenant_id_( - arm_endpoint=self.arm_endpoint, - subscription_id=self.subscription_id) - credentials = [] - credentials.append(_TokenFileCredential()) - credentials.append(EnvironmentCredential()) - if self.client_id: - credentials.append(ManagedIdentityCredential(client_id=self.client_id)) - if self.authority and self.tenant_id: - credentials.append(VisualStudioCodeCredential(authority=self.authority, tenant_id=self.tenant_id)) - credentials.append(AzureCliCredential(tenant_id=self.tenant_id)) - credentials.append(AzurePowerShellCredential(tenant_id=self.tenant_id)) - credentials.append(InteractiveBrowserCredential(authority=self.authority, tenant_id=self.tenant_id)) - if self.client_id: - credentials.append(DeviceCodeCredential(authority=self.authority, client_id=self.client_id, tenant_id=self.tenant_id)) - self.credentials = credentials - - def get_token(self, *scopes: str, **kwargs) -> AccessToken: - """ - Request an access token for `scopes`. - This method is called automatically by Azure SDK clients. - - :param str scopes: desired scopes for the access token. - This method requires at least one scope. - - :raises ~azure.core.exceptions.ClientAuthenticationError:authentication failed. - The exception has a `message` attribute listing each authentication - attempt and its error message. - """ - # lazy-initialize the credentials - if self.credentials is None or len(self.credentials) == 0: - self._initialize_credentials() - - return super(_DefaultAzureCredential, self).get_token(*scopes, **kwargs) - - def _discover_tenant_id_(self, arm_endpoint:str, subscription_id:str): - """ - If the tenant_id was not given, try to obtain it - by calling the management endpoint for the subscription_id, - or by applying default values. - """ - if self.tenant_id: - return - - try: - url = ( - f"{arm_endpoint.rstrip('/')}/subscriptions/" - + f"{subscription_id}?api-version=2018-01-01" - + "&discover-tenant-id" # used by the test recording infrastructure - ) - http = urllib3.PoolManager() - response = http.request( - method="GET", - url=url, - ) - if WWW_AUTHENTICATE_HEADER_NAME in response.headers: - www_authenticate = response.headers[WWW_AUTHENTICATE_HEADER_NAME] - match = re.search(WWW_AUTHENTICATE_REGEX, www_authenticate) - if match: - self.tenant_id = match.group("tenant_id") - # pylint: disable=broad-exception-caught - except Exception as ex: - _LOGGER.error(ex) - - # apply default values - self.tenant_id = self.tenant_id or ConnectionConstants.MSA_TENANT_ID diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_token.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_token.py deleted file mode 100644 index 3b0b3a6db51..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_authentication/_token.py +++ /dev/null @@ -1,85 +0,0 @@ -## -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. -## -import json -from json.decoder import JSONDecodeError -import logging -import os -import time - -# from azure.identity import CredentialUnavailableError -from ...azure_identity import CredentialUnavailableError -from azure.core.credentials import AccessToken -# from azure.quantum._constants import EnvironmentVariables -from .._constants import EnvironmentVariables - -_LOGGER = logging.getLogger(__name__) - - -class _TokenFileCredential(object): - """ - Implements a custom TokenCredential to use a local file as - the source for an AzureQuantum token. - - It will only use the local file if the AZURE_QUANTUM_TOKEN_FILE - environment variable is set, and references an existing json file - that contains the access_token and expires_on timestamp in milliseconds. - - If the environment variable is not set, the file does not exist, - or the token is invalid in any way (expired, for example), - then the credential will throw CredentialUnavailableError, - so that _ChainedTokenCredential can fallback to other methods. - """ - def __init__(self): - self.token_file = os.environ.get(EnvironmentVariables.QUANTUM_TOKEN_FILE) - if self.token_file: - _LOGGER.debug("Using provided token file location: %s", self.token_file) - else: - _LOGGER.debug("No token file location provided for %s environment variable.", - EnvironmentVariables.QUANTUM_TOKEN_FILE) - - def get_token(self, *scopes: str, **kwargs) -> AccessToken: # pylint:disable=unused-argument - """Request an access token for `scopes`. - This method is called automatically by Azure SDK clients. - This method only returns tokens for the https://quantum.microsoft.com/.default scope. - - :param str scopes: desired scopes for the access token. - - :raises ~azure.identity.CredentialUnavailableError - when failing to get the token. - The exception has a `message` attribute with the error message. - """ - if not self.token_file: - raise CredentialUnavailableError(message="Token file location not set.") - - if not os.path.isfile(self.token_file): - raise CredentialUnavailableError( - message=f"Token file at {self.token_file} does not exist.") - - try: - token = self._parse_token_file(self.token_file) - except JSONDecodeError as exception: - raise CredentialUnavailableError( - message="Failed to parse token file: Invalid JSON.") from exception - except KeyError as exception: - raise CredentialUnavailableError( - message="Failed to parse token file: Missing expected value: " - + str(exception)) from exception - except Exception as exception: - raise CredentialUnavailableError( - message="Failed to parse token file: " + str(exception)) from exception - - if token.expires_on <= time.time(): - raise CredentialUnavailableError( - message=f"Token already expired at {time.asctime(time.gmtime(token.expires_on))}") - - return token - - def _parse_token_file(self, path) -> AccessToken: - with open(path, mode="r", encoding="utf-8") as file: - data = json.load(file) - # Convert ms to seconds, since python time.time only handles epoch time in seconds - expires_on = int(data["expires_on"]) / 1000 - token = AccessToken(data["access_token"], expires_on) - return token diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/__init__.py index 56503227c20..922d8ab0948 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/__init__.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/__init__.py @@ -2,25 +2,31 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._client import QuantumClient +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import WorkspaceClient # type: ignore from ._version import VERSION __version__ = VERSION try: from ._patch import __all__ as _patch_all - from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import * except ImportError: _patch_all = [] from ._patch import patch_sdk as _patch_sdk __all__ = [ - "QuantumClient", + "WorkspaceClient", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_client.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_client.py index ad231759f33..e56d6c0ef19 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_client.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_client.py @@ -2,84 +2,49 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, TYPE_CHECKING +from typing import Any, TYPE_CHECKING, Union +from typing_extensions import Self from azure.core import PipelineClient +from azure.core.credentials import AzureKeyCredential from azure.core.pipeline import policies from azure.core.rest import HttpRequest, HttpResponse -from . import models as _models -from ._configuration import QuantumClientConfiguration -from ._serialization import Deserializer, Serializer -from .operations import ( - JobsOperations, - ProvidersOperations, - QuotasOperations, - SessionsOperations, - StorageOperations, - TopLevelItemsOperations, -) +from ._configuration import WorkspaceClientConfiguration +from ._utils.serialization import Deserializer, Serializer +from .operations import ServicesOperations if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -class QuantumClient: # pylint: disable=client-accepts-api-version-keyword - """Azure Quantum REST API client. - - :ivar jobs: JobsOperations operations - :vartype jobs: azure.quantum._client.operations.JobsOperations - :ivar providers: ProvidersOperations operations - :vartype providers: azure.quantum._client.operations.ProvidersOperations - :ivar storage: StorageOperations operations - :vartype storage: azure.quantum._client.operations.StorageOperations - :ivar quotas: QuotasOperations operations - :vartype quotas: azure.quantum._client.operations.QuotasOperations - :ivar sessions: SessionsOperations operations - :vartype sessions: azure.quantum._client.operations.SessionsOperations - :ivar top_level_items: TopLevelItemsOperations operations - :vartype top_level_items: azure.quantum._client.operations.TopLevelItemsOperations - :param azure_region: Supported Azure regions for Azure Quantum Services. For example, "eastus". - Required. - :type azure_region: str - :param subscription_id: The Azure subscription ID. This is a GUID-formatted string (e.g. - 00000000-0000-0000-0000-000000000000). Required. - :type subscription_id: str - :param resource_group_name: Name of an Azure resource group. Required. - :type resource_group_name: str - :param workspace_name: Name of the workspace. Required. - :type workspace_name: str - :param credential: Credential needed for the client to connect to Azure. Required. - :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: Api Version. Default value is "2023-11-13-preview". Note that overriding - this default value may result in unsupported behavior. +class WorkspaceClient: + """Azure Quantum Workspace Services. + + :ivar services: ServicesOperations operations + :vartype services: azure.quantum.operations.ServicesOperations + :param endpoint: The endpoint of the Azure Quantum service. For example, + https://{region}.quantum.azure.com. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a token + credential type or a key credential type. Required. + :type credential: ~azure.core.credentials.TokenCredential or + ~azure.core.credentials.AzureKeyCredential + :keyword api_version: The API version to use for this operation. Known values are + "2026-01-15-preview" and None. Default value is "2026-01-15-preview". Note that overriding this + default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - azure_region: str, - subscription_id: str, - resource_group_name: str, - workspace_name: str, - credential: "TokenCredential", - **kwargs: Any - ) -> None: - _endpoint = kwargs.pop("endpoint", f"https://{azure_region}.quantum.azure.com") - self._config = QuantumClientConfiguration( - azure_region=azure_region, - subscription_id=subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - credential=credential, - **kwargs - ) + def __init__(self, endpoint: str, credential: Union["TokenCredential", AzureKeyCredential], **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = WorkspaceClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) if _policies is None: _policies = [ @@ -99,17 +64,10 @@ def __init__( ] self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) - client_models = {k: v for k, v in _models._models.__dict__.items() if isinstance(v, type)} - client_models.update({k: v for k, v in _models.__dict__.items() if isinstance(v, type)}) - self._serialize = Serializer(client_models) - self._deserialize = Deserializer(client_models) + self._serialize = Serializer() + self._deserialize = Deserializer() self._serialize.client_side_validation = False - self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.providers = ProvidersOperations(self._client, self._config, self._serialize, self._deserialize) - self.storage = StorageOperations(self._client, self._config, self._serialize, self._deserialize) - self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) - self.sessions = SessionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.top_level_items = TopLevelItemsOperations(self._client, self._config, self._serialize, self._deserialize) + self.services = ServicesOperations(self._client, self._config, self._serialize, self._deserialize) def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. @@ -131,9 +89,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: request_copy = deepcopy(request) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) @@ -142,7 +98,7 @@ def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: def close(self) -> None: self._client.close() - def __enter__(self) -> "QuantumClient": + def __enter__(self) -> Self: self._client.__enter__() return self diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_configuration.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_configuration.py index 4514ff6502c..8346a6b9cc5 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_configuration.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_configuration.py @@ -2,70 +2,49 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, TYPE_CHECKING +from typing import Any, TYPE_CHECKING, Union +from azure.core.credentials import AzureKeyCredential from azure.core.pipeline import policies from ._version import VERSION if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential -class QuantumClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for QuantumClient. +class WorkspaceClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for WorkspaceClient. Note that all parameters used to create this instance are saved as instance attributes. - :param azure_region: Supported Azure regions for Azure Quantum Services. For example, "eastus". - Required. - :type azure_region: str - :param subscription_id: The Azure subscription ID. This is a GUID-formatted string (e.g. - 00000000-0000-0000-0000-000000000000). Required. - :type subscription_id: str - :param resource_group_name: Name of an Azure resource group. Required. - :type resource_group_name: str - :param workspace_name: Name of the workspace. Required. - :type workspace_name: str - :param credential: Credential needed for the client to connect to Azure. Required. - :type credential: ~azure.core.credentials.TokenCredential - :keyword api_version: Api Version. Default value is "2023-11-13-preview". Note that overriding - this default value may result in unsupported behavior. + :param endpoint: The endpoint of the Azure Quantum service. For example, + https://{region}.quantum.azure.com. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a token + credential type or a key credential type. Required. + :type credential: ~azure.core.credentials.TokenCredential or + ~azure.core.credentials.AzureKeyCredential + :keyword api_version: The API version to use for this operation. Known values are + "2026-01-15-preview" and None. Default value is "2026-01-15-preview". Note that overriding this + default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - azure_region: str, - subscription_id: str, - resource_group_name: str, - workspace_name: str, - credential: "TokenCredential", - **kwargs: Any - ) -> None: - api_version: str = kwargs.pop("api_version", "2022-09-12-preview") + def __init__(self, endpoint: str, credential: Union["TokenCredential", AzureKeyCredential], **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2026-01-15-preview") - if azure_region is None: - raise ValueError("Parameter 'azure_region' must not be None.") - if subscription_id is None: - raise ValueError("Parameter 'subscription_id' must not be None.") - if resource_group_name is None: - raise ValueError("Parameter 'resource_group_name' must not be None.") - if workspace_name is None: - raise ValueError("Parameter 'workspace_name' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") if credential is None: raise ValueError("Parameter 'credential' must not be None.") - self.azure_region = azure_region - self.subscription_id = subscription_id - self.resource_group_name = resource_group_name - self.workspace_name = workspace_name + self.endpoint = endpoint self.credential = credential self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://quantum.microsoft.com/.default"]) @@ -73,6 +52,13 @@ def __init__( self.polling_interval = kwargs.get("polling_interval", 30) self._configure(**kwargs) + def _infer_policy(self, **kwargs): + if hasattr(self.credential, "get_token"): + return policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + if isinstance(self.credential, AzureKeyCredential): + return policies.AzureKeyCredentialPolicy(self.credential, "x-ms-quantum-api-key", **kwargs) + raise TypeError(f"Unsupported credential: {self.credential}") + def _configure(self, **kwargs: Any) -> None: self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) @@ -84,6 +70,4 @@ def _configure(self, **kwargs: Any) -> None: self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy( - self.credential, *self.credential_scopes, **kwargs - ) + self.authentication_policy = self._infer_policy(**kwargs) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_patch.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_patch.py index f7dd3251033..87676c65a8f 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_patch.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_serialization.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_serialization.py deleted file mode 100644 index baa661cb82d..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_serialization.py +++ /dev/null @@ -1,2005 +0,0 @@ -# -------------------------------------------------------------------------- -# -# Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# -# -------------------------------------------------------------------------- - -# pylint: skip-file -# pyright: reportUnnecessaryTypeIgnoreComment=false - -from base64 import b64decode, b64encode -import calendar -import datetime -import decimal -import email -from enum import Enum -import json -import logging -import re -import sys -import codecs -from typing import ( - Dict, - Any, - cast, - Optional, - Union, - AnyStr, - IO, - Mapping, - Callable, - TypeVar, - MutableMapping, - Type, - List, - Mapping, -) - -try: - from urllib import quote # type: ignore -except ImportError: - from urllib.parse import quote -import xml.etree.ElementTree as ET - -import isodate # type: ignore - -from azure.core.exceptions import DeserializationError, SerializationError -from azure.core.serialization import NULL as CoreNull - -_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") - -ModelType = TypeVar("ModelType", bound="Model") -JSON = MutableMapping[str, Any] - - -class RawDeserializer: - - # Accept "text" because we're open minded people... - JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") - - # Name used in context - CONTEXT_NAME = "deserialized_data" - - @classmethod - def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: - """Decode data according to content-type. - - Accept a stream of data as well, but will be load at once in memory for now. - - If no content-type, will return the string version (not bytes, not stream) - - :param data: Input, could be bytes or stream (will be decoded with UTF8) or text - :type data: str or bytes or IO - :param str content_type: The content type. - """ - if hasattr(data, "read"): - # Assume a stream - data = cast(IO, data).read() - - if isinstance(data, bytes): - data_as_str = data.decode(encoding="utf-8-sig") - else: - # Explain to mypy the correct type. - data_as_str = cast(str, data) - - # Remove Byte Order Mark if present in string - data_as_str = data_as_str.lstrip(_BOM) - - if content_type is None: - return data - - if cls.JSON_REGEXP.match(content_type): - try: - return json.loads(data_as_str) - except ValueError as err: - raise DeserializationError("JSON is invalid: {}".format(err), err) - elif "xml" in (content_type or []): - try: - - try: - if isinstance(data, unicode): # type: ignore - # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string - data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore - except NameError: - pass - - return ET.fromstring(data_as_str) # nosec - except ET.ParseError as err: - # It might be because the server has an issue, and returned JSON with - # content-type XML.... - # So let's try a JSON load, and if it's still broken - # let's flow the initial exception - def _json_attemp(data): - try: - return True, json.loads(data) - except ValueError: - return False, None # Don't care about this one - - success, json_result = _json_attemp(data) - if success: - return json_result - # If i'm here, it's not JSON, it's not XML, let's scream - # and raise the last context in this block (the XML exception) - # The function hack is because Py2.7 messes up with exception - # context otherwise. - _LOGGER.critical("Wasn't XML not JSON, failing") - raise DeserializationError("XML is invalid") from err - raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) - - @classmethod - def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: - """Deserialize from HTTP response. - - Use bytes and headers to NOT use any requests/aiohttp or whatever - specific implementation. - Headers will tested for "content-type" - """ - # Try to use content-type from headers if available - content_type = None - if "content-type" in headers: - content_type = headers["content-type"].split(";")[0].strip().lower() - # Ouch, this server did not declare what it sent... - # Let's guess it's JSON... - # Also, since Autorest was considering that an empty body was a valid JSON, - # need that test as well.... - else: - content_type = "application/json" - - if body_bytes: - return cls.deserialize_from_text(body_bytes, content_type) - return None - - -try: - basestring # type: ignore - unicode_str = unicode # type: ignore -except NameError: - basestring = str - unicode_str = str - -_LOGGER = logging.getLogger(__name__) - -try: - _long_type = long # type: ignore -except NameError: - _long_type = int - - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0.""" - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation.""" - return "Z" - - def dst(self, dt): - """No daylight saving for UTC.""" - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset): - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore - -_FLATTEN = re.compile(r"(? None: - self.additional_properties: Optional[Dict[str, Any]] = {} - for k in kwargs: - if k not in self._attribute_map: - _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) - elif k in self._validation and self._validation[k].get("readonly", False): - _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) - else: - setattr(self, k, kwargs[k]) - - def __eq__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" - if isinstance(other, self.__class__): - return self.__dict__ == other.__dict__ - return False - - def __ne__(self, other: Any) -> bool: - """Compare objects by comparing all attributes.""" - return not self.__eq__(other) - - def __str__(self) -> str: - return str(self.__dict__) - - @classmethod - def enable_additional_properties_sending(cls) -> None: - cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} - - @classmethod - def is_xml_model(cls) -> bool: - try: - cls._xml_map # type: ignore - except AttributeError: - return False - return True - - @classmethod - def _create_xml_node(cls): - """Create XML node.""" - try: - xml_map = cls._xml_map # type: ignore - except AttributeError: - xml_map = {} - - return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) - - def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: - """Return the JSON that would be sent to server from this model. - - This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param bool keep_readonly: If you want to serialize the readonly attributes - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) # type: ignore - - def as_dict( - self, - keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, - **kwargs: Any - ) -> JSON: - """Return a dict that can be serialized using json.dump. - - Advanced usage might optionally use a callback as parameter: - - .. code::python - - def my_key_transformer(key, attr_desc, value): - return key - - Key is the attribute name used in Python. Attr_desc - is a dict of metadata. Currently contains 'type' with the - msrest type and 'key' with the RestAPI encoded key. - Value is the current value in this object. - - The string returned will be used to serialize the key. - If the return type is a list, this is considered hierarchical - result dict. - - See the three examples in this file: - - - attribute_transformer - - full_restapi_key_transformer - - last_restapi_key_transformer - - If you want XML serialization, you can pass the kwargs is_xml=True. - - :param function key_transformer: A key transformer function. - :returns: A dict JSON compatible object - :rtype: dict - """ - serializer = Serializer(self._infer_class_models()) - return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) # type: ignore - - @classmethod - def _infer_class_models(cls): - try: - str_models = cls.__module__.rsplit(".", 1)[0] - models = sys.modules[str_models] - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} - if cls.__name__ not in client_models: - raise ValueError("Not Autorest generated code") - except Exception: - # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. - client_models = {cls.__name__: cls} - return client_models - - @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: - """Parse a str using the RestAPI syntax and return a model. - - :param str data: A str using RestAPI structure. JSON by default. - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises: DeserializationError if something went wrong - """ - deserializer = Deserializer(cls._infer_class_models()) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def from_dict( - cls: Type[ModelType], - data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, - content_type: Optional[str] = None, - ) -> ModelType: - """Parse a dict using given key extractor return a model. - - By default consider key - extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor - and last_rest_key_case_insensitive_extractor) - - :param dict data: A dict using RestAPI structure - :param str content_type: JSON by default, set application/xml if XML. - :returns: An instance of this model - :raises: DeserializationError if something went wrong - """ - deserializer = Deserializer(cls._infer_class_models()) - deserializer.key_extractors = ( # type: ignore - [ # type: ignore - attribute_key_case_insensitive_extractor, - rest_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - if key_extractors is None - else key_extractors - ) - return deserializer(cls.__name__, data, content_type=content_type) # type: ignore - - @classmethod - def _flatten_subtype(cls, key, objects): - if "_subtype_map" not in cls.__dict__: - return {} - result = dict(cls._subtype_map[key]) - for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) - return result - - @classmethod - def _classify(cls, response, objects): - """Check the class _subtype_map for any child classes. - We want to ignore any inherited _subtype_maps. - Remove the polymorphic key from the initial data. - """ - for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): - subtype_value = None - - if not isinstance(response, ET.Element): - rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] - subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) - else: - subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) - if subtype_value: - # Try to match base class. Can be class name only - # (bug to fix in Autorest to support x-ms-discriminator-name) - if cls.__name__ == subtype_value: - return cls - flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) - try: - return objects[flatten_mapping_type[subtype_value]] # type: ignore - except KeyError: - _LOGGER.warning( - "Subtype value %s has no mapping, use base class %s.", - subtype_value, - cls.__name__, - ) - break - else: - _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) - break - return cls - - @classmethod - def _get_rest_key_parts(cls, attr_key): - """Get the RestAPI key of this attr, split it and decode part - :param str attr_key: Attribute key must be in attribute_map. - :returns: A list of RestAPI part - :rtype: list - """ - rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) - return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] - - -def _decode_attribute_map_key(key): - """This decode a key in an _attribute_map to the actual key we want to look at - inside the received data. - - :param str key: A key string from the generated code - """ - return key.replace("\\.", ".") - - -class Serializer(object): - """Request object model serializer.""" - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} - days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} - months = { - 1: "Jan", - 2: "Feb", - 3: "Mar", - 4: "Apr", - 5: "May", - 6: "Jun", - 7: "Jul", - 8: "Aug", - 9: "Sep", - 10: "Oct", - 11: "Nov", - 12: "Dec", - } - validation = { - "min_length": lambda x, y: len(x) < y, - "max_length": lambda x, y: len(x) > y, - "minimum": lambda x, y: x < y, - "maximum": lambda x, y: x > y, - "minimum_ex": lambda x, y: x <= y, - "maximum_ex": lambda x, y: x >= y, - "min_items": lambda x, y: len(x) < y, - "max_items": lambda x, y: len(x) > y, - "pattern": lambda x, y: not re.match(y, x, re.UNICODE), - "unique": lambda x, y: len(x) != len(set(x)), - "multiple": lambda x, y: x % y != 0, - } - - def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): - self.serialize_type = { - "iso-8601": Serializer.serialize_iso, - "rfc-1123": Serializer.serialize_rfc, - "unix-time": Serializer.serialize_unix, - "duration": Serializer.serialize_duration, - "date": Serializer.serialize_date, - "time": Serializer.serialize_time, - "decimal": Serializer.serialize_decimal, - "long": Serializer.serialize_long, - "bytearray": Serializer.serialize_bytearray, - "base64": Serializer.serialize_base64, - "object": self.serialize_object, - "[]": self.serialize_iter, - "{}": self.serialize_dict, - } - self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} - self.key_transformer = full_restapi_key_transformer - self.client_side_validation = True - - def _serialize(self, target_obj, data_type=None, **kwargs): - """Serialize data into a string according to type. - - :param target_obj: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str, dict - :raises: SerializationError if serialization fails. - """ - key_transformer = kwargs.get("key_transformer", self.key_transformer) - keep_readonly = kwargs.get("keep_readonly", False) - if target_obj is None: - return None - - attr_name = None - class_name = target_obj.__class__.__name__ - - if data_type: - return self.serialize_data(target_obj, data_type, **kwargs) - - if not hasattr(target_obj, "_attribute_map"): - data_type = type(target_obj).__name__ - if data_type in self.basic_types.values(): - return self.serialize_data(target_obj, data_type, **kwargs) - - # Force "is_xml" kwargs if we detect a XML model - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) - - serialized = {} - if is_xml_model_serialization: - serialized = target_obj._create_xml_node() - try: - attributes = target_obj._attribute_map - for attr, attr_desc in attributes.items(): - attr_name = attr - if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): - continue - - if attr_name == "additional_properties" and attr_desc["key"] == "": - if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) - continue - try: - - orig_attr = getattr(target_obj, attr) - if is_xml_model_serialization: - pass # Don't provide "transformer" for XML for now. Keep "orig_attr" - else: # JSON - keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) - keys = keys if isinstance(keys, list) else [keys] - - kwargs["serialization_ctxt"] = attr_desc - new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) - - if is_xml_model_serialization: - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - xml_prefix = xml_desc.get("prefix", None) - xml_ns = xml_desc.get("ns", None) - if xml_desc.get("attr", False): - if xml_ns: - ET.register_namespace(xml_prefix, xml_ns) - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - serialized.set(xml_name, new_attr) # type: ignore - continue - if xml_desc.get("text", False): - serialized.text = new_attr # type: ignore - continue - if isinstance(new_attr, list): - serialized.extend(new_attr) # type: ignore - elif isinstance(new_attr, ET.Element): - # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. - if "name" not in getattr(orig_attr, "_xml_map", {}): - splitted_tag = new_attr.tag.split("}") - if len(splitted_tag) == 2: # Namespace - new_attr.tag = "}".join([splitted_tag[0], xml_name]) - else: - new_attr.tag = xml_name - serialized.append(new_attr) # type: ignore - else: # That's a basic type - # Integrate namespace if necessary - local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) - local_node.text = unicode_str(new_attr) - serialized.append(local_node) # type: ignore - else: # JSON - for k in reversed(keys): # type: ignore - new_attr = {k: new_attr} - - _new_attr = new_attr - _serialized = serialized - for k in keys: # type: ignore - if k not in _serialized: - _serialized.update(_new_attr) # type: ignore - _new_attr = _new_attr[k] # type: ignore - _serialized = _serialized[k] - except ValueError as err: - if isinstance(err, SerializationError): - raise - - except (AttributeError, KeyError, TypeError) as err: - msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) - raise SerializationError(msg) from err - else: - return serialized - - def body(self, data, data_type, **kwargs): - """Serialize data intended for a request body. - - :param data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None - """ - - # Just in case this is a dict - internal_data_type_str = data_type.strip("[]{}") - internal_data_type = self.dependencies.get(internal_data_type_str, None) - try: - is_xml_model_serialization = kwargs["is_xml"] - except KeyError: - if internal_data_type and issubclass(internal_data_type, Model): - is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) - else: - is_xml_model_serialization = False - if internal_data_type and not isinstance(internal_data_type, Enum): - try: - deserializer = Deserializer(self.dependencies) - # Since it's on serialization, it's almost sure that format is not JSON REST - # We're not able to deal with additional properties for now. - deserializer.additional_properties_detection = False - if is_xml_model_serialization: - deserializer.key_extractors = [ # type: ignore - attribute_key_case_insensitive_extractor, - ] - else: - deserializer.key_extractors = [ - rest_key_case_insensitive_extractor, - attribute_key_case_insensitive_extractor, - last_rest_key_case_insensitive_extractor, - ] - data = deserializer._deserialize(data_type, data) - except DeserializationError as err: - raise SerializationError("Unable to build a model: " + str(err)) from err - - return self._serialize(data, data_type, **kwargs) - - def url(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL path. - - :param data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - """ - try: - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - - if kwargs.get("skip_quote") is True: - output = str(output) - output = output.replace("{", quote("{")).replace("}", quote("}")) - else: - output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return output - - def query(self, name, data, data_type, **kwargs): - """Serialize data intended for a URL query. - - :param data: The data to be serialized. - :param str data_type: The type to be serialized from. - :keyword bool skip_quote: Whether to skip quote the serialized result. - Defaults to False. - :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - """ - try: - # Treat the list aside, since we don't want to encode the div separator - if data_type.startswith("["): - internal_data_type = data_type[1:-1] - do_quote = not kwargs.get("skip_quote", False) - return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) - - # Not a list, regular serialization - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - if kwargs.get("skip_quote") is True: - output = str(output) - else: - output = quote(str(output), safe="") - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) - - def header(self, name, data, data_type, **kwargs): - """Serialize data intended for a request header. - - :param data: The data to be serialized. - :param str data_type: The type to be serialized from. - :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None - """ - try: - if data_type in ["[str]"]: - data = ["" if d is None else d for d in data] - - output = self.serialize_data(data, data_type, **kwargs) - if data_type == "bool": - output = json.dumps(output) - except SerializationError: - raise TypeError("{} must be type {}.".format(name, data_type)) - else: - return str(output) - - def serialize_data(self, data, data_type, **kwargs): - """Serialize generic data according to supplied data type. - - :param data: The data to be serialized. - :param str data_type: The type to be serialized from. - :param bool required: Whether it's essential that the data not be - empty or None - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. - """ - if data is None: - raise ValueError("No value for given attribute") - - try: - if data is CoreNull: - return None - if data_type in self.basic_types.values(): - return self.serialize_basic(data, data_type, **kwargs) - - elif data_type in self.serialize_type: - return self.serialize_type[data_type](data, **kwargs) - - # If dependencies is empty, try with current data class - # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) - if issubclass(enum_type, Enum): - return Serializer.serialize_enum(data, enum_obj=enum_type) - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.serialize_type: - return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) - - except (ValueError, TypeError) as err: - msg = "Unable to serialize value: {!r} as type: {!r}." - raise SerializationError(msg.format(data, data_type)) from err - else: - return self._serialize(data, **kwargs) - - @classmethod - def _get_custom_serializers(cls, data_type, **kwargs): - custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) - if custom_serializer: - return custom_serializer - if kwargs.get("is_xml", False): - return cls._xml_basic_types_serializers.get(data_type) - - @classmethod - def serialize_basic(cls, data, data_type, **kwargs): - """Serialize basic builting data type. - Serializes objects to str, int, float or bool. - - Possible kwargs: - - basic_types_serializers dict[str, callable] : If set, use the callable as serializer - - is_xml bool : If set, use xml_basic_types_serializers - - :param data: Object to be serialized. - :param str data_type: Type of object in the iterable. - """ - custom_serializer = cls._get_custom_serializers(data_type, **kwargs) - if custom_serializer: - return custom_serializer(data) - if data_type == "str": - return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec - - @classmethod - def serialize_unicode(cls, data): - """Special handling for serializing unicode strings in Py2. - Encode to UTF-8 if unicode, otherwise handle as a str. - - :param data: Object to be serialized. - :rtype: str - """ - try: # If I received an enum, return its value - return data.value - except AttributeError: - pass - - try: - if isinstance(data, unicode): # type: ignore - # Don't change it, JSON and XML ElementTree are totally able - # to serialize correctly u'' strings - return data - except NameError: - return str(data) - else: - return str(data) - - def serialize_iter(self, data, iter_type, div=None, **kwargs): - """Serialize iterable. - - Supported kwargs: - - serialization_ctxt dict : The current entry of _attribute_map, or same format. - serialization_ctxt['type'] should be same as data_type. - - is_xml bool : If set, serialize as XML - - :param list attr: Object to be serialized. - :param str iter_type: Type of object in the iterable. - :param bool required: Whether the objects in the iterable must - not be None or empty. - :param str div: If set, this str will be used to combine the elements - in the iterable into a combined string. Default is 'None'. - :keyword bool do_quote: Whether to quote the serialized result of each iterable element. - Defaults to False. - :rtype: list, str - """ - if isinstance(data, str): - raise SerializationError("Refuse str type as a valid iter type.") - - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - is_xml = kwargs.get("is_xml", False) - - serialized = [] - for d in data: - try: - serialized.append(self.serialize_data(d, iter_type, **kwargs)) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized.append(None) - - if kwargs.get("do_quote", False): - serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] - - if div: - serialized = ["" if s is None else str(s) for s in serialized] - serialized = div.join(serialized) - - if "xml" in serialization_ctxt or is_xml: - # XML serialization is more complicated - xml_desc = serialization_ctxt.get("xml", {}) - xml_name = xml_desc.get("name") - if not xml_name: - xml_name = serialization_ctxt["key"] - - # Create a wrap node if necessary (use the fact that Element and list have "append") - is_wrapped = xml_desc.get("wrapped", False) - node_name = xml_desc.get("itemsName", xml_name) - if is_wrapped: - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - else: - final_result = [] - # All list elements to "local_node" - for el in serialized: - if isinstance(el, ET.Element): - el_node = el - else: - el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - if el is not None: # Otherwise it writes "None" :-p - el_node.text = str(el) - final_result.append(el_node) - return final_result - return serialized - - def serialize_dict(self, attr, dict_type, **kwargs): - """Serialize a dictionary of objects. - - :param dict attr: Object to be serialized. - :param str dict_type: Type of object in the dictionary. - :param bool required: Whether the objects in the dictionary must - not be None or empty. - :rtype: dict - """ - serialization_ctxt = kwargs.get("serialization_ctxt", {}) - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) - except ValueError as err: - if isinstance(err, SerializationError): - raise - serialized[self.serialize_unicode(key)] = None - - if "xml" in serialization_ctxt: - # XML serialization is more complicated - xml_desc = serialization_ctxt["xml"] - xml_name = xml_desc["name"] - - final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) - for key, value in serialized.items(): - ET.SubElement(final_result, key).text = value - return final_result - - return serialized - - def serialize_object(self, attr, **kwargs): - """Serialize a generic object. - This will be handled as a dictionary. If object passed in is not - a basic type (str, int, float, dict, list) it will simply be - cast to str. - - :param dict attr: Object to be serialized. - :rtype: dict or str - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - return attr - obj_type = type(attr) - if obj_type in self.basic_types: - return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) - if obj_type is _long_type: - return self.serialize_long(attr) - if obj_type is unicode_str: - return self.serialize_unicode(attr) - if obj_type is datetime.datetime: - return self.serialize_iso(attr) - if obj_type is datetime.date: - return self.serialize_date(attr) - if obj_type is datetime.time: - return self.serialize_time(attr) - if obj_type is datetime.timedelta: - return self.serialize_duration(attr) - if obj_type is decimal.Decimal: - return self.serialize_decimal(attr) - - # If it's a model or I know this dependency, serialize as a Model - elif obj_type in self.dependencies.values() or isinstance(attr, Model): - return self._serialize(attr) - - if obj_type == dict: - serialized = {} - for key, value in attr.items(): - try: - serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) - except ValueError: - serialized[self.serialize_unicode(key)] = None - return serialized - - if obj_type == list: - serialized = [] - for obj in attr: - try: - serialized.append(self.serialize_object(obj, **kwargs)) - except ValueError: - pass - return serialized - return str(attr) - - @staticmethod - def serialize_enum(attr, enum_obj=None): - try: - result = attr.value - except AttributeError: - result = attr - try: - enum_obj(result) # type: ignore - return result - except ValueError: - for enum_value in enum_obj: # type: ignore - if enum_value.value.lower() == str(attr).lower(): - return enum_value.value - error = "{!r} is not valid value for enum {!r}" - raise SerializationError(error.format(attr, enum_obj)) - - @staticmethod - def serialize_bytearray(attr, **kwargs): - """Serialize bytearray into base-64 string. - - :param attr: Object to be serialized. - :rtype: str - """ - return b64encode(attr).decode() - - @staticmethod - def serialize_base64(attr, **kwargs): - """Serialize str into base-64 string. - - :param attr: Object to be serialized. - :rtype: str - """ - encoded = b64encode(attr).decode("ascii") - return encoded.strip("=").replace("+", "-").replace("/", "_") - - @staticmethod - def serialize_decimal(attr, **kwargs): - """Serialize Decimal object to float. - - :param attr: Object to be serialized. - :rtype: float - """ - return float(attr) - - @staticmethod - def serialize_long(attr, **kwargs): - """Serialize long (Py2) or int (Py3). - - :param attr: Object to be serialized. - :rtype: int/long - """ - return _long_type(attr) - - @staticmethod - def serialize_date(attr, **kwargs): - """Serialize Date object into ISO-8601 formatted string. - - :param Date attr: Object to be serialized. - :rtype: str - """ - if isinstance(attr, str): - attr = isodate.parse_date(attr) - t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) - return t - - @staticmethod - def serialize_time(attr, **kwargs): - """Serialize Time object into ISO-8601 formatted string. - - :param datetime.time attr: Object to be serialized. - :rtype: str - """ - if isinstance(attr, str): - attr = isodate.parse_time(attr) - t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) - if attr.microsecond: - t += ".{:02}".format(attr.microsecond) - return t - - @staticmethod - def serialize_duration(attr, **kwargs): - """Serialize TimeDelta object into ISO-8601 formatted string. - - :param TimeDelta attr: Object to be serialized. - :rtype: str - """ - if isinstance(attr, str): - attr = isodate.parse_duration(attr) - return isodate.duration_isoformat(attr) - - @staticmethod - def serialize_rfc(attr, **kwargs): - """Serialize Datetime object into RFC-1123 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: TypeError if format invalid. - """ - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - except AttributeError: - raise TypeError("RFC1123 object must be valid Datetime object.") - - return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( - Serializer.days[utc.tm_wday], - utc.tm_mday, - Serializer.months[utc.tm_mon], - utc.tm_year, - utc.tm_hour, - utc.tm_min, - utc.tm_sec, - ) - - @staticmethod - def serialize_iso(attr, **kwargs): - """Serialize Datetime object into ISO-8601 formatted string. - - :param Datetime attr: Object to be serialized. - :rtype: str - :raises: SerializationError if format invalid. - """ - if isinstance(attr, str): - attr = isodate.parse_datetime(attr) - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - utc = attr.utctimetuple() - if utc.tm_year > 9999 or utc.tm_year < 1: - raise OverflowError("Hit max or min date") - - microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") - if microseconds: - microseconds = "." + microseconds - date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( - utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec - ) - return date + microseconds + "Z" - except (ValueError, OverflowError) as err: - msg = "Unable to serialize datetime object." - raise SerializationError(msg) from err - except AttributeError as err: - msg = "ISO-8601 object must be valid Datetime object." - raise TypeError(msg) from err - - @staticmethod - def serialize_unix(attr, **kwargs): - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param Datetime attr: Object to be serialized. - :rtype: int - :raises: SerializationError if format invalid - """ - if isinstance(attr, int): - return attr - try: - if not attr.tzinfo: - _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") - return int(calendar.timegm(attr.utctimetuple())) - except AttributeError: - raise TypeError("Unix time object must be valid Datetime object.") - - -def rest_key_extractor(attr, attr_desc, data): - key = attr_desc["key"] - working_data = data - - while "." in key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = working_data.get(working_key, data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - return working_data.get(key) - - -def rest_key_case_insensitive_extractor(attr, attr_desc, data): - key = attr_desc["key"] - working_data = data - - while "." in key: - dict_keys = _FLATTEN.split(key) - if len(dict_keys) == 1: - key = _decode_attribute_map_key(dict_keys[0]) - break - working_key = _decode_attribute_map_key(dict_keys[0]) - working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) - if working_data is None: - # If at any point while following flatten JSON path see None, it means - # that all properties under are None as well - return None - key = ".".join(dict_keys[1:]) - - if working_data: - return attribute_key_case_insensitive_extractor(key, None, working_data) - - -def last_rest_key_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key.""" - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_extractor(dict_keys[-1], None, data) - - -def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): - """Extract the attribute in "data" based on the last part of the JSON path key. - - This is the case insensitive version of "last_rest_key_extractor" - """ - key = attr_desc["key"] - dict_keys = _FLATTEN.split(key) - return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) - - -def attribute_key_extractor(attr, _, data): - return data.get(attr) - - -def attribute_key_case_insensitive_extractor(attr, _, data): - found_key = None - lower_attr = attr.lower() - for key in data: - if lower_attr == key.lower(): - found_key = key - break - - return data.get(found_key) - - -def _extract_name_from_internal_type(internal_type): - """Given an internal type XML description, extract correct XML name with namespace. - - :param dict internal_type: An model type - :rtype: tuple - :returns: A tuple XML name + namespace dict - """ - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - xml_name = internal_type_xml_map.get("name", internal_type.__name__) - xml_ns = internal_type_xml_map.get("ns", None) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - return xml_name - - -def xml_key_extractor(attr, attr_desc, data): - if isinstance(data, dict): - return None - - # Test if this model is XML ready first - if not isinstance(data, ET.Element): - return None - - xml_desc = attr_desc.get("xml", {}) - xml_name = xml_desc.get("name", attr_desc["key"]) - - # Look for a children - is_iter_type = attr_desc["type"].startswith("[") - is_wrapped = xml_desc.get("wrapped", False) - internal_type = attr_desc.get("internalType", None) - internal_type_xml_map = getattr(internal_type, "_xml_map", {}) - - # Integrate namespace if necessary - xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) - if xml_ns: - xml_name = "{{{}}}{}".format(xml_ns, xml_name) - - # If it's an attribute, that's simple - if xml_desc.get("attr", False): - return data.get(xml_name) - - # If it's x-ms-text, that's simple too - if xml_desc.get("text", False): - return data.text - - # Scenario where I take the local name: - # - Wrapped node - # - Internal type is an enum (considered basic types) - # - Internal type has no XML/Name node - if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): - children = data.findall(xml_name) - # If internal type has a local name and it's not a list, I use that name - elif not is_iter_type and internal_type and "name" in internal_type_xml_map: - xml_name = _extract_name_from_internal_type(internal_type) - children = data.findall(xml_name) - # That's an array - else: - if internal_type: # Complex type, ignore itemsName and use the complex type name - items_name = _extract_name_from_internal_type(internal_type) - else: - items_name = xml_desc.get("itemsName", xml_name) - children = data.findall(items_name) - - if len(children) == 0: - if is_iter_type: - if is_wrapped: - return None # is_wrapped no node, we want None - else: - return [] # not wrapped, assume empty list - return None # Assume it's not there, maybe an optional node. - - # If is_iter_type and not wrapped, return all found children - if is_iter_type: - if not is_wrapped: - return children - else: # Iter and wrapped, should have found one node only (the wrap one) - if len(children) != 1: - raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( - xml_name - ) - ) - return list(children[0]) # Might be empty list and that's ok. - - # Here it's not a itertype, we should have found one element only or empty - if len(children) > 1: - raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) - return children[0] - - -class Deserializer(object): - """Response object model deserializer. - - :param dict classes: Class type dictionary for deserializing complex types. - :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. - """ - - basic_types = {str: "str", int: "int", bool: "bool", float: "float"} - - valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") - - def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): - self.deserialize_type = { - "iso-8601": Deserializer.deserialize_iso, - "rfc-1123": Deserializer.deserialize_rfc, - "unix-time": Deserializer.deserialize_unix, - "duration": Deserializer.deserialize_duration, - "date": Deserializer.deserialize_date, - "time": Deserializer.deserialize_time, - "decimal": Deserializer.deserialize_decimal, - "long": Deserializer.deserialize_long, - "bytearray": Deserializer.deserialize_bytearray, - "base64": Deserializer.deserialize_base64, - "object": self.deserialize_object, - "[]": self.deserialize_iter, - "{}": self.deserialize_dict, - } - self.deserialize_expected_types = { - "duration": (isodate.Duration, datetime.timedelta), - "iso-8601": (datetime.datetime), - } - self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} - self.key_extractors = [rest_key_extractor, xml_key_extractor] - # Additional properties only works if the "rest_key_extractor" is used to - # extract the keys. Making it to work whatever the key extractor is too much - # complicated, with no real scenario for now. - # So adding a flag to disable additional properties detection. This flag should be - # used if your expect the deserialization to NOT come from a JSON REST syntax. - # Otherwise, result are unexpected - self.additional_properties_detection = True - - def __call__(self, target_obj, response_data, content_type=None): - """Call the deserializer to process a REST response. - - :param str target_obj: Target data type to deserialize to. - :param requests.Response response_data: REST response object. - :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - """ - data = self._unpack_content(response_data, content_type) - return self._deserialize(target_obj, data) - - def _deserialize(self, target_obj, data): - """Call the deserializer on a model. - - Data needs to be already deserialized as JSON or XML ElementTree - - :param str target_obj: Target data type to deserialize to. - :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - """ - # This is already a model, go recursive just in case - if hasattr(data, "_attribute_map"): - constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] - try: - for attr, mapconfig in data._attribute_map.items(): - if attr in constants: - continue - value = getattr(data, attr) - if value is None: - continue - local_type = mapconfig["type"] - internal_data_type = local_type.strip("[]{}") - if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): - continue - setattr(data, attr, self._deserialize(local_type, value)) - return data - except AttributeError: - return - - response, class_name = self._classify_target(target_obj, data) - - if isinstance(response, basestring): - return self.deserialize_data(data, response) - elif isinstance(response, type) and issubclass(response, Enum): - return self.deserialize_enum(data, response) - - if data is None: - return data - try: - attributes = response._attribute_map # type: ignore - d_attrs = {} - for attr, attr_desc in attributes.items(): - # Check empty string. If it's not empty, someone has a real "additionalProperties"... - if attr == "additional_properties" and attr_desc["key"] == "": - continue - raw_value = None - # Enhance attr_desc with some dynamic data - attr_desc = attr_desc.copy() # Do a copy, do not change the real one - internal_data_type = attr_desc["type"].strip("[]{}") - if internal_data_type in self.dependencies: - attr_desc["internalType"] = self.dependencies[internal_data_type] - - for key_extractor in self.key_extractors: - found_value = key_extractor(attr, attr_desc, data) - if found_value is not None: - if raw_value is not None and raw_value != found_value: - msg = ( - "Ignoring extracted value '%s' from %s for key '%s'" - " (duplicate extraction, follow extractors order)" - ) - _LOGGER.warning(msg, found_value, key_extractor, attr) - continue - raw_value = found_value - - value = self.deserialize_data(raw_value, attr_desc["type"]) - d_attrs[attr] = value - except (AttributeError, TypeError, KeyError) as err: - msg = "Unable to deserialize to object: " + class_name # type: ignore - raise DeserializationError(msg) from err - else: - additional_properties = self._build_additional_properties(attributes, data) - return self._instantiate_model(response, d_attrs, additional_properties) - - def _build_additional_properties(self, attribute_map, data): - if not self.additional_properties_detection: - return None - if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": - # Check empty string. If it's not empty, someone has a real "additionalProperties" - return None - if isinstance(data, ET.Element): - data = {el.tag: el.text for el in data} - - known_keys = { - _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) - for desc in attribute_map.values() - if desc["key"] != "" - } - present_keys = set(data.keys()) - missing_keys = present_keys - known_keys - return {key: data[key] for key in missing_keys} - - def _classify_target(self, target, data): - """Check to see whether the deserialization target object can - be classified into a subclass. - Once classification has been determined, initialize object. - - :param str target: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - """ - if target is None: - return None, None - - if isinstance(target, basestring): - try: - target = self.dependencies[target] - except KeyError: - return target, target - - try: - target = target._classify(data, self.dependencies) - except AttributeError: - pass # Target is not a Model, no classify - return target, target.__class__.__name__ # type: ignore - - def failsafe_deserialize(self, target_obj, data, content_type=None): - """Ignores any errors encountered in deserialization, - and falls back to not deserializing the object. Recommended - for use in error deserialization, as we want to return the - HttpResponseError to users, and not have them deal with - a deserialization error. - - :param str target_obj: The target object type to deserialize to. - :param str/dict data: The response data to deserialize. - :param str content_type: Swagger "produces" if available. - """ - try: - return self(target_obj, data, content_type=content_type) - except: - _LOGGER.debug( - "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True - ) - return None - - @staticmethod - def _unpack_content(raw_data, content_type=None): - """Extract the correct structure for deserialization. - - If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. - if we can't, raise. Your Pipeline should have a RawDeserializer. - - If not a pipeline response and raw_data is bytes or string, use content-type - to decode it. If no content-type, try JSON. - - If raw_data is something else, bypass all logic and return it directly. - - :param raw_data: Data to be processed. - :param content_type: How to parse if raw_data is a string/bytes. - :raises JSONDecodeError: If JSON is requested and parsing is impossible. - :raises UnicodeDecodeError: If bytes is not UTF8 - """ - # Assume this is enough to detect a Pipeline Response without importing it - context = getattr(raw_data, "context", {}) - if context: - if RawDeserializer.CONTEXT_NAME in context: - return context[RawDeserializer.CONTEXT_NAME] - raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") - - # Assume this is enough to recognize universal_http.ClientResponse without importing it - if hasattr(raw_data, "body"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) - - # Assume this enough to recognize requests.Response without importing it. - if hasattr(raw_data, "_content_consumed"): - return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) - - if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): - return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore - return raw_data - - def _instantiate_model(self, response, attrs, additional_properties=None): - """Instantiate a response model passing in deserialized args. - - :param response: The response model class. - :param d_attrs: The deserialized response attributes. - """ - if callable(response): - subtype = getattr(response, "_subtype_map", {}) - try: - readonly = [k for k, v in response._validation.items() if v.get("readonly")] - const = [k for k, v in response._validation.items() if v.get("constant")] - kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} - response_obj = response(**kwargs) - for attr in readonly: - setattr(response_obj, attr, attrs.get(attr)) - if additional_properties: - response_obj.additional_properties = additional_properties - return response_obj - except TypeError as err: - msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore - raise DeserializationError(msg + str(err)) - else: - try: - for attr, value in attrs.items(): - setattr(response, attr, value) - return response - except Exception as exp: - msg = "Unable to populate response model. " - msg += "Type: {}, Error: {}".format(type(response), exp) - raise DeserializationError(msg) - - def deserialize_data(self, data, data_type): - """Process data for deserialization according to data type. - - :param str data: The response string to be deserialized. - :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. - :return: Deserialized object. - """ - if data is None: - return data - - try: - if not data_type: - return data - if data_type in self.basic_types.values(): - return self.deserialize_basic(data, data_type) - if data_type in self.deserialize_type: - if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): - return data - - is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] - if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: - return None - data_val = self.deserialize_type[data_type](data) - return data_val - - iter_type = data_type[0] + data_type[-1] - if iter_type in self.deserialize_type: - return self.deserialize_type[iter_type](data, data_type[1:-1]) - - obj_type = self.dependencies[data_type] - if issubclass(obj_type, Enum): - if isinstance(data, ET.Element): - data = data.text - return self.deserialize_enum(data, obj_type) - - except (ValueError, TypeError, AttributeError) as err: - msg = "Unable to deserialize response data." - msg += " Data: {}, {}".format(data, data_type) - raise DeserializationError(msg) from err - else: - return self._deserialize(obj_type, data) - - def deserialize_iter(self, attr, iter_type): - """Deserialize an iterable. - - :param list attr: Iterable to be deserialized. - :param str iter_type: The type of object in the iterable. - :rtype: list - """ - if attr is None: - return None - if isinstance(attr, ET.Element): # If I receive an element here, get the children - attr = list(attr) - if not isinstance(attr, (list, set)): - raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) - return [self.deserialize_data(a, iter_type) for a in attr] - - def deserialize_dict(self, attr, dict_type): - """Deserialize a dictionary. - - :param dict/list attr: Dictionary to be deserialized. Also accepts - a list of key, value pairs. - :param str dict_type: The object type of the items in the dictionary. - :rtype: dict - """ - if isinstance(attr, list): - return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} - - if isinstance(attr, ET.Element): - # Transform value into {"Key": "value"} - attr = {el.tag: el.text for el in attr} - return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} - - def deserialize_object(self, attr, **kwargs): - """Deserialize a generic object. - This will be handled as a dictionary. - - :param dict attr: Dictionary to be deserialized. - :rtype: dict - :raises: TypeError if non-builtin datatype encountered. - """ - if attr is None: - return None - if isinstance(attr, ET.Element): - # Do no recurse on XML, just return the tree as-is - return attr - if isinstance(attr, basestring): - return self.deserialize_basic(attr, "str") - obj_type = type(attr) - if obj_type in self.basic_types: - return self.deserialize_basic(attr, self.basic_types[obj_type]) - if obj_type is _long_type: - return self.deserialize_long(attr) - - if obj_type == dict: - deserialized = {} - for key, value in attr.items(): - try: - deserialized[key] = self.deserialize_object(value, **kwargs) - except ValueError: - deserialized[key] = None - return deserialized - - if obj_type == list: - deserialized = [] - for obj in attr: - try: - deserialized.append(self.deserialize_object(obj, **kwargs)) - except ValueError: - pass - return deserialized - - else: - error = "Cannot deserialize generic object with type: " - raise TypeError(error + str(obj_type)) - - def deserialize_basic(self, attr, data_type): - """Deserialize basic builtin data type from string. - Will attempt to convert to str, int, float and bool. - This function will also accept '1', '0', 'true' and 'false' as - valid bool values. - - :param str attr: response string to be deserialized. - :param str data_type: deserialization data type. - :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. - """ - # If we're here, data is supposed to be a basic type. - # If it's still an XML node, take the text - if isinstance(attr, ET.Element): - attr = attr.text - if not attr: - if data_type == "str": - # None or '', node is empty string. - return "" - else: - # None or '', node with a strong type is None. - # Don't try to model "empty bool" or "empty int" - return None - - if data_type == "bool": - if attr in [True, False, 1, 0]: - return bool(attr) - elif isinstance(attr, basestring): - if attr.lower() in ["true", "1"]: - return True - elif attr.lower() in ["false", "0"]: - return False - raise TypeError("Invalid boolean value: {}".format(attr)) - - if data_type == "str": - return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec - - @staticmethod - def deserialize_unicode(data): - """Preserve unicode objects in Python 2, otherwise return data - as a string. - - :param str data: response string to be deserialized. - :rtype: str or unicode - """ - # We might be here because we have an enum modeled as string, - # and we try to deserialize a partial dict with enum inside - if isinstance(data, Enum): - return data - - # Consider this is real string - try: - if isinstance(data, unicode): # type: ignore - return data - except NameError: - return str(data) - else: - return str(data) - - @staticmethod - def deserialize_enum(data, enum_obj): - """Deserialize string into enum object. - - If the string is not a valid enum value it will be returned as-is - and a warning will be logged. - - :param str data: Response string to be deserialized. If this value is - None or invalid it will be returned as-is. - :param Enum enum_obj: Enum object to deserialize to. - :rtype: Enum - """ - if isinstance(data, enum_obj) or data is None: - return data - if isinstance(data, Enum): - data = data.value - if isinstance(data, int): - # Workaround. We might consider remove it in the future. - try: - return list(enum_obj.__members__.values())[data] - except IndexError: - error = "{!r} is not a valid index for enum {!r}" - raise DeserializationError(error.format(data, enum_obj)) - try: - return enum_obj(str(data)) - except ValueError: - for enum_value in enum_obj: - if enum_value.value.lower() == str(data).lower(): - return enum_value - # We don't fail anymore for unknown value, we deserialize as a string - _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) - return Deserializer.deserialize_unicode(data) - - @staticmethod - def deserialize_bytearray(attr): - """Deserialize string into bytearray. - - :param str attr: response string to be deserialized. - :rtype: bytearray - :raises: TypeError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return bytearray(b64decode(attr)) # type: ignore - - @staticmethod - def deserialize_base64(attr): - """Deserialize base64 encoded string into string. - - :param str attr: response string to be deserialized. - :rtype: bytearray - :raises: TypeError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore - attr = attr + padding # type: ignore - encoded = attr.replace("-", "+").replace("_", "/") - return b64decode(encoded) - - @staticmethod - def deserialize_decimal(attr): - """Deserialize string into Decimal object. - - :param str attr: response string to be deserialized. - :rtype: Decimal - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - return decimal.Decimal(str(attr)) # type: ignore - except decimal.DecimalException as err: - msg = "Invalid decimal {}".format(attr) - raise DeserializationError(msg) from err - - @staticmethod - def deserialize_long(attr): - """Deserialize string into long (Py2) or int (Py3). - - :param str attr: response string to be deserialized. - :rtype: long or int - :raises: ValueError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - return _long_type(attr) # type: ignore - - @staticmethod - def deserialize_duration(attr): - """Deserialize ISO-8601 formatted string into TimeDelta object. - - :param str attr: response string to be deserialized. - :rtype: TimeDelta - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - duration = isodate.parse_duration(attr) - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize duration object." - raise DeserializationError(msg) from err - else: - return duration - - @staticmethod - def deserialize_date(attr): - """Deserialize ISO-8601 formatted string into Date object. - - :param str attr: response string to be deserialized. - :rtype: Date - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. - return isodate.parse_date(attr, defaultmonth=0, defaultday=0) - - @staticmethod - def deserialize_time(attr): - """Deserialize ISO-8601 formatted string into time object. - - :param str attr: response string to be deserialized. - :rtype: datetime.time - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore - raise DeserializationError("Date must have only digits and -. Received: %s" % attr) - return isodate.parse_time(attr) - - @staticmethod - def deserialize_rfc(attr): - """Deserialize RFC-1123 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :rtype: Datetime - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - parsed_date = email.utils.parsedate_tz(attr) # type: ignore - date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) - ) - if not date_obj.tzinfo: - date_obj = date_obj.astimezone(tz=TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to rfc datetime object." - raise DeserializationError(msg) from err - else: - return date_obj - - @staticmethod - def deserialize_iso(attr): - """Deserialize ISO-8601 formatted string into Datetime object. - - :param str attr: response string to be deserialized. - :rtype: Datetime - :raises: DeserializationError if string format invalid. - """ - if isinstance(attr, ET.Element): - attr = attr.text - try: - attr = attr.upper() # type: ignore - match = Deserializer.valid_date.match(attr) - if not match: - raise ValueError("Invalid datetime string: " + attr) - - check_decimal = attr.split(".") - if len(check_decimal) > 1: - decimal_str = "" - for digit in check_decimal[1]: - if digit.isdigit(): - decimal_str += digit - else: - break - if len(decimal_str) > 6: - attr = attr.replace(decimal_str, decimal_str[0:6]) - - date_obj = isodate.parse_datetime(attr) - test_utc = date_obj.utctimetuple() - if test_utc.tm_year > 9999 or test_utc.tm_year < 1: - raise OverflowError("Hit max or min date") - except (ValueError, OverflowError, AttributeError) as err: - msg = "Cannot deserialize datetime object." - raise DeserializationError(msg) from err - else: - return date_obj - - @staticmethod - def deserialize_unix(attr): - """Serialize Datetime object into IntTime format. - This is represented as seconds. - - :param int attr: Object to be serialized. - :rtype: Datetime - :raises: DeserializationError if format invalid - """ - if isinstance(attr, ET.Element): - attr = int(attr.text) # type: ignore - try: - attr = int(attr) - date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) - except ValueError as err: - msg = "Cannot deserialize to unix datetime object." - raise DeserializationError(msg) from err - else: - return date_obj diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_version.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/__init__.py similarity index 80% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/_version.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/__init__.py index 8dbbc07e356..8026245c2ab 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_version.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/__init__.py @@ -1,9 +1,6 @@ -# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- - -VERSION = "0.0.1" diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_model_base.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/model_base.py similarity index 79% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/_model_base.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/model_base.py index 6a6e1f38b17..c402af2afc6 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_model_base.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/model_base.py @@ -1,9 +1,10 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pylint: disable=protected-access, broad-except @@ -21,17 +22,14 @@ from datetime import datetime, date, time, timedelta, timezone from json import JSONEncoder import xml.etree.ElementTree as ET +from collections.abc import MutableMapping from typing_extensions import Self import isodate from azure.core.exceptions import DeserializationError from azure.core import CaseInsensitiveEnumMeta from azure.core.pipeline import PipelineResponse from azure.core.serialization import _Null - -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping +from azure.core.rest import HttpResponse _LOGGER = logging.getLogger(__name__) @@ -39,6 +37,7 @@ TZ_UTC = timezone.utc _T = typing.TypeVar("_T") +_NONE_TYPE = type(None) def _timedelta_as_isostr(td: timedelta) -> str: @@ -173,6 +172,21 @@ def default(self, o): # pylint: disable=too-many-return-statements r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" ) +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: """Deserialize ISO-8601 formatted string into Datetime object. @@ -204,7 +218,7 @@ def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: test_utc = date_obj.utctimetuple() if test_utc.tm_year > 9999 or test_utc.tm_year < 1: raise OverflowError("Hit max or min date") - return date_obj + return date_obj # type: ignore[no-any-return] def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: @@ -258,7 +272,7 @@ def _deserialize_time(attr: typing.Union[str, time]) -> time: """ if isinstance(attr, time): return attr - return isodate.parse_time(attr) + return isodate.parse_time(attr) # type: ignore[no-any-return] def _deserialize_bytes(attr): @@ -317,6 +331,8 @@ def _deserialize_int_as_str(attr): def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): if annotation is int and rf and rf._format == "str": return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) if rf and rf._format: return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore @@ -347,17 +363,47 @@ def _get_model(module_name: str, model_name: str): _UNSET = object() -class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object - def __init__(self, data: typing.Dict[str, typing.Any]) -> None: +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: self._data = data def __contains__(self, key: typing.Any) -> bool: return key in self._data def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized return self._data.__getitem__(key) def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass self._data.__setitem__(key, value) def __delitem__(self, key: str) -> None: @@ -373,50 +419,97 @@ def __ne__(self, other: typing.Any) -> bool: return not self.__eq__(other) def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ return self._data.keys() def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ return self._data.values() def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ return self._data.items() def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ try: return self[key] except KeyError: return default @typing.overload - def pop(self, key: str) -> typing.Any: ... + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ @typing.overload - def pop(self, key: str, default: _T) -> _T: ... + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs @typing.overload - def pop(self, key: str, default: typing.Any) -> typing.Any: ... + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ if default is _UNSET: return self._data.pop(key) return self._data.pop(key, default) - def popitem(self) -> typing.Tuple[str, typing.Any]: + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ return self._data.popitem() def clear(self) -> None: + """ + Remove all items from D. + """ self._data.clear() - def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ self._data.update(*args, **kwargs) @typing.overload def setdefault(self, key: str, default: None = None) -> None: ... @typing.overload - def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ if default is _UNSET: return self._data.setdefault(key) return self._data.setdefault(key, default) @@ -438,6 +531,8 @@ def _is_model(obj: typing.Any) -> bool: def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) return [_serialize(x, format) for x in o] if isinstance(o, dict): return {k: _serialize(v, format) for k, v in o.items()} @@ -469,9 +564,7 @@ def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-m return o -def _get_rest_field( - attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str -) -> typing.Optional["_RestField"]: +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: try: return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) except StopIteration: @@ -494,7 +587,7 @@ class Model(_MyMutableMapping): _is_model = True # label whether current class's _attr_to_rest_field has been calculated # could not see _attr_to_rest_field directly because subclass inherits it from parent class - _calculated: typing.Set[str] = set() + _calculated: set[str] = set() def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: class_name = self.__class__.__name__ @@ -579,7 +672,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order - attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") } annotations = { @@ -594,10 +687,10 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) if not rf._rest_name_input: rf._rest_name_input = attr - cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") - return super().__new__(cls) # pylint: disable=no-value-for-parameter + return super().__new__(cls) def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: for base in cls.__bases__: @@ -633,10 +726,10 @@ def _deserialize(cls, data, exist_discriminators): discriminator_value = data.find(xml_name).text # pyright: ignore else: discriminator_value = data.get(discriminator._rest_name) - mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member return mapped_cls._deserialize(data, exist_discriminators) - def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: """Return a dict that can be turned into json using json.dump. :keyword bool exclude_readonly: Whether to remove the readonly properties. @@ -696,7 +789,7 @@ def _deserialize_with_union(deserializers, obj): def _deserialize_dict( value_deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], - obj: typing.Dict[typing.Any, typing.Any], + obj: dict[typing.Any, typing.Any], ): if obj is None: return obj @@ -706,7 +799,7 @@ def _deserialize_dict( def _deserialize_multiple_sequence( - entry_deserializers: typing.List[typing.Optional[typing.Callable]], + entry_deserializers: list[typing.Optional[typing.Callable]], module: typing.Optional[str], obj, ): @@ -715,6 +808,14 @@ def _deserialize_multiple_sequence( return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + def _deserialize_sequence( deserializer: typing.Optional[typing.Callable], module: typing.Optional[str], @@ -724,17 +825,30 @@ def _deserialize_sequence( return obj if isinstance(obj, ET.Element): obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) -def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: return sorted( types, key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), ) -def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches annotation: typing.Any, module: typing.Optional[str], rf: typing.Optional["_RestField"] = None, @@ -774,16 +888,16 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur # is it optional? try: - if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore if len(annotation.__args__) <= 2: # pyright: ignore if_obj_deserializer = _get_deserialize_callable_from_annotation( - next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore ) return functools.partial(_deserialize_with_optional, if_obj_deserializer) # the type is Optional[Union[...]], we need to remove the None type from the Union annotation_copy = copy.copy(annotation) - annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) except AttributeError: pass @@ -799,7 +913,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur return functools.partial(_deserialize_with_union, deserializers) try: - if annotation._name == "Dict": # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": value_deserializer = _get_deserialize_callable_from_annotation( annotation.__args__[1], module, rf # pyright: ignore ) @@ -812,7 +929,10 @@ def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-retur except (AttributeError, IndexError): pass try: - if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: if len(annotation.__args__) > 1: # pyright: ignore entry_deserializers = [ _get_deserialize_callable_from_annotation(dt, module, rf) @@ -894,6 +1014,35 @@ def _deserialize( return _deserialize_with_callable(deserializer, value) +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + class _RestField: def __init__( self, @@ -901,11 +1050,11 @@ def __init__( name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin is_discriminator: bool = False, - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ): self._type = type self._rest_name_input = name @@ -920,7 +1069,11 @@ def __init__( @property def _class_type(self) -> typing.Any: - return getattr(self._type, "args", [None])[0] + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result @property def _rest_name(self) -> str: @@ -931,14 +1084,37 @@ def _rest_name(self) -> str: def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin # by this point, type and rest_name will have a value bc we default # them in __new__ of the Model class - item = obj.get(self._rest_name) + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) if item is None: return item if self._is_model: return item - return _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + if value is None: # we want to wipe out entries if users set attr to None try: @@ -963,11 +1139,11 @@ def rest_field( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, + visibility: typing.Optional[list[str]] = None, default: typing.Any = _UNSET, format: typing.Optional[str] = None, is_multipart_file_input: bool = False, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField( name=name, @@ -984,8 +1160,8 @@ def rest_discriminator( *, name: typing.Optional[str] = None, type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin - visibility: typing.Optional[typing.List[str]] = None, - xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, ) -> typing.Any: return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) @@ -1004,9 +1180,9 @@ def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: def _get_element( o: typing.Any, exclude_readonly: bool = False, - parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, wrapped_element: typing.Optional[ET.Element] = None, -) -> typing.Union[ET.Element, typing.List[ET.Element]]: +) -> typing.Union[ET.Element, list[ET.Element]]: if _is_model(o): model_meta = getattr(o, "_xml", {}) @@ -1095,7 +1271,7 @@ def _get_element( def _get_wrapped_element( v: typing.Any, exclude_readonly: bool, - meta: typing.Optional[typing.Dict[str, typing.Any]], + meta: typing.Optional[dict[str, typing.Any]], ) -> ET.Element: wrapped_element = _create_xml_element( meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None @@ -1106,7 +1282,7 @@ def _get_wrapped_element( _get_element(v, exclude_readonly, meta, wrapped_element) else: wrapped_element.text = _get_primitive_type_value(v) - return wrapped_element + return wrapped_element # type: ignore[no-any-return] def _get_primitive_type_value(v) -> str: @@ -1119,7 +1295,9 @@ def _get_primitive_type_value(v) -> str: return str(v) -def _create_xml_element(tag, prefix=None, ns=None): +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: if prefix and ns: ET.register_namespace(prefix, ns) if ns: @@ -1138,7 +1316,7 @@ def _deserialize_xml( def _convert_element(e: ET.Element): # dict case if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: - dict_result: typing.Dict[str, typing.Any] = {} + dict_result: dict[str, typing.Any] = {} for child in e: if dict_result.get(child.tag) is not None: if isinstance(dict_result[child.tag], list): @@ -1151,7 +1329,7 @@ def _convert_element(e: ET.Element): return dict_result # array case if len(e) > 0: - array_result: typing.List[typing.Any] = [] + array_result: list[typing.Any] = [] for child in e: array_result.append(_convert_element(child)) return array_result diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_serialization.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/serialization.py similarity index 92% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/_serialization.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/serialization.py index e2ad5186990..81ec1de5922 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_serialization.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_utils/serialization.py @@ -1,28 +1,10 @@ -# pylint: disable=too-many-lines +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 # -------------------------------------------------------------------------- -# # Copyright (c) Microsoft Corporation. All rights reserved. -# -# The MIT License (MIT) -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the ""Software""), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -# IN THE SOFTWARE. -# +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- # pyright: reportUnnecessaryTypeIgnoreComment=false @@ -39,7 +21,6 @@ import sys import codecs from typing import ( - Dict, Any, cast, Optional, @@ -48,10 +29,7 @@ IO, Mapping, Callable, - TypeVar, MutableMapping, - Type, - List, ) try: @@ -61,13 +39,13 @@ import xml.etree.ElementTree as ET import isodate # type: ignore +from typing_extensions import Self from azure.core.exceptions import DeserializationError, SerializationError from azure.core.serialization import NULL as CoreNull _BOM = codecs.BOM_UTF8.decode(encoding="utf-8") -ModelType = TypeVar("ModelType", bound="Model") JSON = MutableMapping[str, Any] @@ -185,73 +163,7 @@ def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], except NameError: _long_type = int - -class UTC(datetime.tzinfo): - """Time Zone info for handling UTC""" - - def utcoffset(self, dt): - """UTF offset for UTC is 0. - - :param datetime.datetime dt: The datetime - :returns: The offset - :rtype: datetime.timedelta - """ - return datetime.timedelta(0) - - def tzname(self, dt): - """Timestamp representation. - - :param datetime.datetime dt: The datetime - :returns: The timestamp representation - :rtype: str - """ - return "Z" - - def dst(self, dt): - """No daylight saving for UTC. - - :param datetime.datetime dt: The datetime - :returns: The daylight saving time - :rtype: datetime.timedelta - """ - return datetime.timedelta(hours=1) - - -try: - from datetime import timezone as _FixedOffset # type: ignore -except ImportError: # Python 2.7 - - class _FixedOffset(datetime.tzinfo): # type: ignore - """Fixed offset in minutes east from UTC. - Copy/pasted from Python doc - :param datetime.timedelta offset: offset in timedelta format - """ - - def __init__(self, offset) -> None: - self.__offset = offset - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return str(self.__offset.total_seconds() / 3600) - - def __repr__(self): - return "".format(self.tzname(None)) - - def dst(self, dt): - return datetime.timedelta(0) - - def __getinitargs__(self): - return (self.__offset,) - - -try: - from datetime import timezone - - TZ_UTC = timezone.utc -except ImportError: - TZ_UTC = UTC() # type: ignore +TZ_UTC = datetime.timezone.utc _FLATTEN = re.compile(r"(? None: - self.additional_properties: Optional[Dict[str, Any]] = {} + self.additional_properties: Optional[dict[str, Any]] = {} for k in kwargs: # pylint: disable=consider-using-dict-items if k not in self._attribute_map: _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) @@ -397,7 +309,7 @@ def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: def as_dict( self, keep_readonly: bool = True, - key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, **kwargs: Any ) -> JSON: """Return a dict that can be serialized using json.dump. @@ -450,25 +362,25 @@ def _infer_class_models(cls): return client_models @classmethod - def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: """Parse a str using the RestAPI syntax and return a model. :param str data: A str using RestAPI structure. JSON by default. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) return deserializer(cls.__name__, data, content_type=content_type) # type: ignore @classmethod def from_dict( - cls: Type[ModelType], + cls, data: Any, - key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, content_type: Optional[str] = None, - ) -> ModelType: + ) -> Self: """Parse a dict using given key extractor return a model. By default consider key @@ -479,8 +391,8 @@ def from_dict( :param function key_extractors: A key extractor function. :param str content_type: JSON by default, set application/xml if XML. :returns: An instance of this model - :raises: DeserializationError if something went wrong - :rtype: ModelType + :raises DeserializationError: if something went wrong + :rtype: Self """ deserializer = Deserializer(cls._infer_class_models()) deserializer.key_extractors = ( # type: ignore @@ -500,7 +412,7 @@ def _flatten_subtype(cls, key, objects): return {} result = dict(cls._subtype_map[key]) for valuetype in cls._subtype_map[key].values(): - result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access return result @classmethod @@ -563,7 +475,7 @@ def _decode_attribute_map_key(key): return key.replace("\\.", ".") -class Serializer(object): # pylint: disable=too-many-public-methods +class Serializer: # pylint: disable=too-many-public-methods """Request object model serializer.""" basic_types = {str: "str", int: "int", bool: "bool", float: "float"} @@ -614,7 +526,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "[]": self.serialize_iter, "{}": self.serialize_dict, } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_transformer = full_restapi_key_transformer self.client_side_validation = True @@ -626,7 +538,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to :param object target_obj: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, dict - :raises: SerializationError if serialization fails. + :raises SerializationError: if serialization fails. :returns: The serialized data. """ key_transformer = kwargs.get("key_transformer", self.key_transformer) @@ -665,7 +577,7 @@ def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, to if attr_name == "additional_properties" and attr_desc["key"] == "": if target_obj.additional_properties is not None: - serialized.update(target_obj.additional_properties) + serialized |= target_obj.additional_properties continue try: @@ -736,8 +648,8 @@ def body(self, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict - :raises: SerializationError if serialization fails. - :raises: ValueError if data is None + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized request body """ @@ -781,8 +693,8 @@ def url(self, name, data, data_type, **kwargs): :param str data_type: The type to be serialized from. :rtype: str :returns: The serialized URL path - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None """ try: output = self.serialize_data(data, data_type, **kwargs) @@ -805,8 +717,8 @@ def query(self, name, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str, list - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized query parameter """ try: @@ -835,8 +747,8 @@ def header(self, name, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: str - :raises: TypeError if serialization fails. - :raises: ValueError if data is None + :raises TypeError: if serialization fails. + :raises ValueError: if data is None :returns: The serialized header """ try: @@ -855,9 +767,9 @@ def serialize_data(self, data, data_type, **kwargs): :param object data: The data to be serialized. :param str data_type: The type to be serialized from. - :raises: AttributeError if required data is None. - :raises: ValueError if data is None - :raises: SerializationError if serialization fails. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. :returns: The serialized data. :rtype: str, int, float, bool, dict, list """ @@ -875,7 +787,7 @@ def serialize_data(self, data, data_type, **kwargs): # If dependencies is empty, try with current data class # It has to be a subclass of Enum anyway - enum_type = self.dependencies.get(data_type, data.__class__) + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) if issubclass(enum_type, Enum): return Serializer.serialize_enum(data, enum_obj=enum_type) @@ -909,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs): :param str data_type: Type of object in the iterable. :rtype: str, int, float, bool :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. """ custom_serializer = cls._get_custom_serializers(data_type, **kwargs) if custom_serializer: return custom_serializer(data) if data_type == "str": return cls.serialize_unicode(data) - return eval(data_type)(data) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) @classmethod def serialize_unicode(cls, data): @@ -1192,7 +1111,7 @@ def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: str - :raises: TypeError if format invalid. + :raises TypeError: if format invalid. :return: serialized rfc """ try: @@ -1218,7 +1137,7 @@ def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: str - :raises: SerializationError if format invalid. + :raises SerializationError: if format invalid. :return: serialized iso """ if isinstance(attr, str): @@ -1251,7 +1170,7 @@ def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument :param Datetime attr: Object to be serialized. :rtype: int - :raises: SerializationError if format invalid + :raises SerializationError: if format invalid :return: serialied unix """ if isinstance(attr, int): @@ -1270,7 +1189,7 @@ def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argumen while "." in key: # Need the cast, as for some reasons "split" is typed as list[str | Any] - dict_keys = cast(List[str], _FLATTEN.split(key)) + dict_keys = cast(list[str], _FLATTEN.split(key)) if len(dict_keys) == 1: key = _decode_attribute_map_key(dict_keys[0]) break @@ -1429,7 +1348,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument # Iter and wrapped, should have found one node only (the wrap one) if len(children) != 1: raise DeserializationError( - "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( # pylint: disable=line-too-long + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( xml_name ) ) @@ -1441,7 +1360,7 @@ def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument return children[0] -class Deserializer(object): +class Deserializer: """Response object model deserializer. :param dict classes: Class type dictionary for deserializing complex types. @@ -1472,7 +1391,7 @@ def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: "duration": (isodate.Duration, datetime.timedelta), "iso-8601": (datetime.datetime), } - self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.dependencies: dict[str, type] = dict(classes) if classes else {} self.key_extractors = [rest_key_extractor, xml_key_extractor] # Additional properties only works if the "rest_key_extractor" is used to # extract the keys. Making it to work whatever the key extractor is too much @@ -1488,7 +1407,7 @@ def __call__(self, target_obj, response_data, content_type=None): :param str target_obj: Target data type to deserialize to. :param requests.Response response_data: REST response object. :param str content_type: Swagger "produces" if available. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1502,7 +1421,7 @@ def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return :param str target_obj: Target data type to deserialize to. :param object data: Object to deserialize. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1717,7 +1636,7 @@ def deserialize_data(self, data, data_type): # pylint: disable=too-many-return- :param str data: The response string to be deserialized. :param str data_type: The type to deserialize to. - :raises: DeserializationError if deserialization fails. + :raises DeserializationError: if deserialization fails. :return: Deserialized object. :rtype: object """ @@ -1799,7 +1718,7 @@ def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return :param dict attr: Dictionary to be deserialized. :return: Deserialized object. :rtype: dict - :raises: TypeError if non-builtin datatype encountered. + :raises TypeError: if non-builtin datatype encountered. """ if attr is None: return None @@ -1845,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return :param str data_type: deserialization data type. :return: Deserialized basic type. :rtype: str, int, float or bool - :raises: TypeError if string format is not valid. + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. """ # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text @@ -1871,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return if data_type == "str": return self.deserialize_unicode(attr) - return eval(data_type)(attr) # nosec # pylint: disable=eval-used + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) @staticmethod def deserialize_unicode(data): @@ -1936,7 +1859,7 @@ def deserialize_bytearray(attr): :param str attr: response string to be deserialized. :return: Deserialized bytearray :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1949,7 +1872,7 @@ def deserialize_base64(attr): :param str attr: response string to be deserialized. :return: Deserialized base64 string :rtype: bytearray - :raises: TypeError if string format invalid. + :raises TypeError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1964,7 +1887,7 @@ def deserialize_decimal(attr): :param str attr: response string to be deserialized. :return: Deserialized decimal - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. :rtype: decimal """ if isinstance(attr, ET.Element): @@ -1982,7 +1905,7 @@ def deserialize_long(attr): :param str attr: response string to be deserialized. :return: Deserialized int :rtype: long or int - :raises: ValueError if string format invalid. + :raises ValueError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -1995,7 +1918,7 @@ def deserialize_duration(attr): :param str attr: response string to be deserialized. :return: Deserialized duration :rtype: TimeDelta - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2013,7 +1936,7 @@ def deserialize_date(attr): :param str attr: response string to be deserialized. :return: Deserialized date :rtype: Date - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2029,7 +1952,7 @@ def deserialize_time(attr): :param str attr: response string to be deserialized. :return: Deserialized time :rtype: datetime.time - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2044,14 +1967,14 @@ def deserialize_rfc(attr): :param str attr: response string to be deserialized. :return: Deserialized RFC datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text try: parsed_date = email.utils.parsedate_tz(attr) # type: ignore date_obj = datetime.datetime( - *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) ) if not date_obj.tzinfo: date_obj = date_obj.astimezone(tz=TZ_UTC) @@ -2067,7 +1990,7 @@ def deserialize_iso(attr): :param str attr: response string to be deserialized. :return: Deserialized ISO datetime :rtype: Datetime - :raises: DeserializationError if string format invalid. + :raises DeserializationError: if string format invalid. """ if isinstance(attr, ET.Element): attr = attr.text @@ -2105,7 +2028,7 @@ def deserialize_unix(attr): :param int attr: Object to be serialized. :return: Deserialized datetime :rtype: Datetime - :raises: DeserializationError if format invalid + :raises DeserializationError: if format invalid """ if isinstance(attr, ET.Element): attr = int(attr.text) # type: ignore diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_validation.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_validation.py new file mode 100644 index 00000000000..f5af3a4eb8a --- /dev/null +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_validation.py @@ -0,0 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if _index_with_default(method_added_on) > _index_with_default(client_api_version): + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_vendor.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_vendor.py deleted file mode 100644 index 8598d2b7459..00000000000 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_vendor.py +++ /dev/null @@ -1,20 +0,0 @@ -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from typing import List, cast - - -def _format_url_section(template, **kwargs): - components = template.split("/") - while components: - try: - return template.format(**kwargs) - except KeyError as key: - # Need the cast, as for some reasons "split" is typed as list[str | Any] - formatted_components = cast(List[str], template.split("/")) - components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] - template = "/".join(components) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_version.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_version.py index f30401ec204..84dd39fb67a 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_version.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/_version.py @@ -2,8 +2,8 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.2.0" +VERSION = "3.7.0b1" diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/__init__.py similarity index 87% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/__init__.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/__init__.py index bb982f93907..ceacbbbbb3c 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/__init__.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/__init__.py @@ -12,10 +12,7 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._client import ServicesClient # type: ignore -from ._version import VERSION - -__version__ = VERSION +from ._client import WorkspaceClient # type: ignore try: from ._patch import __all__ as _patch_all @@ -25,7 +22,7 @@ from ._patch import patch_sdk as _patch_sdk __all__ = [ - "ServicesClient", + "WorkspaceClient", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_client.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_client.py new file mode 100644 index 00000000000..21f48db15e7 --- /dev/null +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_client.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING, Union +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.credentials import AzureKeyCredential +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import WorkspaceClientConfiguration +from .operations import ServicesOperations + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class WorkspaceClient: + """Azure Quantum Workspace Services. + + :ivar services: ServicesOperations operations + :vartype services: azure.quantum.aio.operations.ServicesOperations + :param endpoint: The endpoint of the Azure Quantum service. For example, + https://{region}.quantum.azure.com. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a token + credential type or a key credential type. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential or + ~azure.core.credentials.AzureKeyCredential + :keyword api_version: The API version to use for this operation. Known values are + "2026-01-15-preview" and None. Default value is "2026-01-15-preview". Note that overriding this + default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, endpoint: str, credential: Union["AsyncTokenCredential", AzureKeyCredential], **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + self._config = WorkspaceClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.services = ServicesOperations(self._client, self._config, self._serialize, self._deserialize) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_configuration.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_configuration.py similarity index 67% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/_configuration.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_configuration.py index 1a15534cc0f..5d1aff54c11 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_configuration.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_configuration.py @@ -11,50 +11,43 @@ from azure.core.credentials import AzureKeyCredential from azure.core.pipeline import policies -from ._version import VERSION +from .._version import VERSION if TYPE_CHECKING: - from azure.core.credentials import TokenCredential + from azure.core.credentials_async import AsyncTokenCredential -class ServicesClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for ServicesClient. +class WorkspaceClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for WorkspaceClient. Note that all parameters used to create this instance are saved as instance attributes. - :param region: The Azure region where the Azure Quantum Workspace is located. Required. - :type region: str - :param credential: Credential used to authenticate requests to the service. Is either a - TokenCredential type or a AzureKeyCredential type. Required. - :type credential: ~azure.core.credentials.TokenCredential or + :param endpoint: The endpoint of the Azure Quantum service. For example, + https://{region}.quantum.azure.com. Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Is either a token + credential type or a key credential type. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential or ~azure.core.credentials.AzureKeyCredential - :param service_base_url: The Azure Quantum service base url. Default value is - "quantum.azure.com". - :type service_base_url: str - :keyword api_version: The API version to use for this operation. Default value is - "2024-10-01-preview". Note that overriding this default value may result in unsupported - behavior. + :keyword api_version: The API version to use for this operation. Known values are + "2026-01-15-preview" and None. Default value is "2026-01-15-preview". Note that overriding this + default value may result in unsupported behavior. :paramtype api_version: str """ def __init__( - self, - region: str, - credential: Union["TokenCredential", AzureKeyCredential], - service_base_url: str = "quantum.azure.com", - **kwargs: Any, + self, endpoint: str, credential: Union["AsyncTokenCredential", AzureKeyCredential], **kwargs: Any ) -> None: - api_version: str = kwargs.pop("api_version", "2024-10-01-preview") + api_version: str = kwargs.pop("api_version", "2026-01-15-preview") - if region is None: - raise ValueError("Parameter 'region' must not be None.") + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") if credential is None: raise ValueError("Parameter 'credential' must not be None.") - self.region = region + self.endpoint = endpoint self.credential = credential - self.service_base_url = service_base_url self.api_version = api_version self.credential_scopes = kwargs.pop("credential_scopes", ["https://quantum.microsoft.com/.default"]) kwargs.setdefault("sdk_moniker", "quantum/{}".format(VERSION)) @@ -63,7 +56,7 @@ def __init__( def _infer_policy(self, **kwargs): if hasattr(self.credential, "get_token"): - return policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + return policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) if isinstance(self.credential, AzureKeyCredential): return policies.AzureKeyCredentialPolicy(self.credential, "x-ms-quantum-api-key", **kwargs) raise TypeError(f"Unsupported credential: {self.credential}") @@ -75,8 +68,8 @@ def _configure(self, **kwargs: Any) -> None: self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: self.authentication_policy = self._infer_policy(**kwargs) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/_patch.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_patch.py similarity index 52% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/_patch.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_patch.py index f7dd3251033..87676c65a8f 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/_patch.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/__init__.py similarity index 62% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/__init__.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/__init__.py index 35635396fd3..18cce5a4e75 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/__init__.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/__init__.py @@ -12,24 +12,14 @@ if TYPE_CHECKING: from ._patch import * # pylint: disable=unused-wildcard-import -from ._operations import JobsOperations # type: ignore -from ._operations import SessionsOperations # type: ignore -from ._operations import ProvidersOperations # type: ignore -from ._operations import StorageOperations # type: ignore -from ._operations import QuotasOperations # type: ignore -from ._operations import TopLevelItemsOperations # type: ignore +from ._operations import ServicesOperations # type: ignore from ._patch import __all__ as _patch_all from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "JobsOperations", - "SessionsOperations", - "ProvidersOperations", - "StorageOperations", - "QuotasOperations", - "TopLevelItemsOperations", + "ServicesOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/_operations.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/_operations.py similarity index 60% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/_operations.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/_operations.py index e88781943d7..64077acdf86 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/operations/_operations.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/_operations.py @@ -6,12 +6,14 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase import json -import sys -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload import urllib.parse +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, @@ -22,524 +24,235 @@ StreamConsumedError, map_error, ) -from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict -from .. import models as _models -from .._model_base import SdkJSONEncoder, _deserialize -from .._serialization import Serializer +from ... import models as _models +from ..._utils.model_base import SdkJSONEncoder, _deserialize +from ..._utils.serialization import Deserializer, Serializer +from ..._validation import api_version_validation +from ...operations._operations import ( + build_services_jobs_cancel_request, + build_services_jobs_create_request, + build_services_jobs_delete_request, + build_services_jobs_get_request, + build_services_jobs_list_request, + build_services_jobs_update_request, + build_services_providers_list_request, + build_services_quotas_list_request, + build_services_sessions_close_request, + build_services_sessions_get_request, + build_services_sessions_jobs_list_request, + build_services_sessions_listv2_request, + build_services_sessions_open_request, + build_services_storage_get_sas_uri_request, + build_services_top_level_items_listv2_request, +) +from .._configuration import WorkspaceClientConfiguration -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -_SERIALIZER = Serializer() -_SERIALIZER.client_side_validation = False - - -def build_jobs_create_or_replace_request( - subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url("job_id", job_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) +class ServicesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + Instead, you should access the following operations through + :class:`~azure.quantum.aio.WorkspaceClient`'s + :attr:`services` attribute. + """ -def build_jobs_update_request( - subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") + self.top_level_items = ServicesTopLevelItemsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = ServicesJobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.providers = ServicesProvidersOperations(self._client, self._config, self._serialize, self._deserialize) + self.quotas = ServicesQuotasOperations(self._client, self._config, self._serialize, self._deserialize) + self.sessions = ServicesSessionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage = ServicesStorageOperations(self._client, self._config, self._serialize, self._deserialize) - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url("job_id", job_id, "str"), - } - _url: str = _url.format(**path_format_arguments) # type: ignore +class ServicesTopLevelItemsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_jobs_delete_request( - subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") + Instead, you should access the following operations through + :class:`~azure.quantum.aio.WorkspaceClient`'s + :attr:`top_level_items` attribute. + """ - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url("job_id", job_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_jobs_get_request( - subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url("job_id", job_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + @distributed_trace + @api_version_validation( + method_added_on="2024-10-01-preview", + params_added_on={ + "2024-10-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "filter", + "skip", + "top", + "orderby", + "accept", + ] + }, + api_versions_list=["2024-10-01-preview", "2025-09-01-preview", "2025-12-01-preview", "2026-01-15-preview"], + ) + def listv2( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any + ) -> AsyncItemPaged["_models.ItemDetails"]: + """List top-level items. - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :keyword filter: Filter the result list using the given expression. Default value is None. + :paramtype filter: str + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :keyword top: The number of jobs taken. Default value is None. + :paramtype top: int + :keyword orderby: The order of returned items. Default value is None. + :paramtype orderby: str + :return: An iterator like instance of ItemDetails + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.quantum.models.ItemDetails] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + cls: ClsType[list[_models.ItemDetails]] = kwargs.pop("cls", None) -def build_jobs_list_request( - subscription_id: str, - resource_group_name: str, - workspace_name: str, - *, - filter: Optional[str] = None, - skip: Optional[int] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - } + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) - _url: str = _url.format(**path_format_arguments) # type: ignore + def prepare_request(next_link=None): + if not next_link: - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if filter is not None: - _params["filter"] = _SERIALIZER.query("filter", filter, "str") - if skip is not None: - _params["skip"] = _SERIALIZER.query("skip", skip, "int") - if top is not None: - _params["top"] = _SERIALIZER.query("top", top, "int") - if orderby is not None: - _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") + _request = build_services_top_level_items_listv2_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + filter=filter, + skip=skip, + top=top, + orderby=orderby, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sessions_create_or_replace_request( - subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url("session_id", session_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sessions_close_request( - subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}:close" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url("session_id", session_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sessions_get_request( - subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url("session_id", session_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sessions_list_request( - subscription_id: str, - resource_group_name: str, - workspace_name: str, - *, - filter: Optional[str] = None, - skip: Optional[int] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if filter is not None: - _params["filter"] = _SERIALIZER.query("filter", filter, "str") - if skip is not None: - _params["skip"] = _SERIALIZER.query("skip", skip, "int") - if top is not None: - _params["top"] = _SERIALIZER.query("top", top, "int") - if orderby is not None: - _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_sessions_jobs_list_request( - subscription_id: str, - resource_group_name: str, - workspace_name: str, - session_id: str, - *, - filter: Optional[str] = None, - skip: Optional[int] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}/jobs" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url("session_id", session_id, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if filter is not None: - _params["filter"] = _SERIALIZER.query("filter", filter, "str") - if skip is not None: - _params["skip"] = _SERIALIZER.query("skip", skip, "int") - if top is not None: - _params["top"] = _SERIALIZER.query("top", top, "int") - if orderby is not None: - _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_providers_list_request( - subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/providerStatus" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_storage_get_sas_uri_request( - subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/storage/sasUri" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore - - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - - # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) - - -def build_quotas_list_request( - subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") - - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/quotas" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - } - - _url: str = _url.format(**path_format_arguments) # type: ignore + return _request - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.ItemDetails], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + async def get_next(next_link=None): + _request = prepare_request(next_link) + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response -def build_top_level_items_list_request( - subscription_id: str, - resource_group_name: str, - workspace_name: str, - *, - filter: Optional[str] = None, - skip: Optional[int] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - **kwargs: Any -) -> HttpRequest: - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01-preview")) - accept = _headers.pop("Accept", "application/json") + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) - # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/topLevelItems" # pylint: disable=line-too-long - path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - } + return pipeline_response - _url: str = _url.format(**path_format_arguments) # type: ignore + return AsyncItemPaged(get_next, extract_data) - # Construct parameters - _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - if filter is not None: - _params["filter"] = _SERIALIZER.query("filter", filter, "str") - if skip is not None: - _params["skip"] = _SERIALIZER.query("skip", skip, "int") - if top is not None: - _params["top"] = _SERIALIZER.query("top", top, "int") - if orderby is not None: - _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") - # Construct headers - _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - - -class JobsOperations: +class ServicesJobsOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.quantum.ServicesClient`'s + :class:`~azure.quantum.aio.WorkspaceClient`'s :attr:`jobs` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload - def create_or_replace( + async def create( self, subscription_id: str, resource_group_name: str, @@ -571,7 +284,7 @@ def create_or_replace( """ @overload - def create_or_replace( + async def create( self, subscription_id: str, resource_group_name: str, @@ -603,7 +316,7 @@ def create_or_replace( """ @overload - def create_or_replace( + async def create( self, subscription_id: str, resource_group_name: str, @@ -634,8 +347,8 @@ def create_or_replace( :raises ~azure.core.exceptions.HttpResponseError: """ - @distributed_trace - def create_or_replace( + @distributed_trace_async + async def create( self, subscription_id: str, resource_group_name: str, @@ -682,7 +395,7 @@ def create_or_replace( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_jobs_create_or_replace_request( + _request = build_services_jobs_create_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -694,15 +407,12 @@ def create_or_replace( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -711,7 +421,7 @@ def create_or_replace( if response.status_code not in [200, 201]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) @@ -728,17 +438,49 @@ def create_or_replace( return deserialized # type: ignore @overload - def update( + async def update( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: _models.JobUpdateOptions, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.JobUpdateOptions: + """Update job properties. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :param resource: The resource instance. Required. + :type resource: ~azure.quantum.models.JobUpdateOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, - resource: List[_models.JsonPatchObject], + resource: JSON, *, - content_type: str = "application/json-patch+json", + content_type: str = "application/merge-patch+json", **kwargs: Any - ) -> Optional[_models.JobDetails]: + ) -> _models.JobUpdateOptions: """Update job properties. :param subscription_id: The Azure subscription ID. Required. @@ -750,17 +492,17 @@ def update( :param job_id: Id of the job. Required. :type job_id: str :param resource: The resource instance. Required. - :type resource: list[~azure.quantum.models.JsonPatchObject] + :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json-patch+json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: JobDetails or None. The JobDetails is compatible with MutableMapping - :rtype: ~azure.quantum.models.JobDetails or None + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def update( + async def update( self, subscription_id: str, resource_group_name: str, @@ -768,9 +510,9 @@ def update( job_id: str, resource: IO[bytes], *, - content_type: str = "application/json-patch+json", + content_type: str = "application/merge-patch+json", **kwargs: Any - ) -> Optional[_models.JobDetails]: + ) -> _models.JobUpdateOptions: """Update job properties. :param subscription_id: The Azure subscription ID. Required. @@ -784,23 +526,38 @@ def update( :param resource: The resource instance. Required. :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json-patch+json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: JobDetails or None. The JobDetails is compatible with MutableMapping - :rtype: ~azure.quantum.models.JobDetails or None + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions :raises ~azure.core.exceptions.HttpResponseError: """ - @distributed_trace - def update( + @distributed_trace_async + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "job_id", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2025-12-01-preview", "2026-01-15-preview"], + ) + async def update( self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, - resource: Union[List[_models.JsonPatchObject], IO[bytes]], + resource: Union[_models.JobUpdateOptions, JSON, IO[bytes]], **kwargs: Any - ) -> Optional[_models.JobDetails]: + ) -> _models.JobUpdateOptions: """Update job properties. :param subscription_id: The Azure subscription ID. Required. @@ -811,11 +568,11 @@ def update( :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str - :param resource: The resource instance. Is either a [JsonPatchObject] type or a IO[bytes] type. - Required. - :type resource: list[~azure.quantum.models.JsonPatchObject] or IO[bytes] - :return: JobDetails or None. The JobDetails is compatible with MutableMapping - :rtype: ~azure.quantum.models.JobDetails or None + :param resource: The resource instance. Is one of the following types: JobUpdateOptions, JSON, + IO[bytes] Required. + :type resource: ~azure.quantum.models.JobUpdateOptions or JSON or IO[bytes] + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions :raises ~azure.core.exceptions.HttpResponseError: """ error_map: MutableMapping = { @@ -830,16 +587,16 @@ def update( _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.JobDetails]] = kwargs.pop("cls", None) + cls: ClsType[_models.JobUpdateOptions] = kwargs.pop("cls", None) - content_type = content_type or "application/json-patch+json" + content_type = content_type or "application/merge-patch+json" _content = None if isinstance(resource, (IOBase, bytes)): _content = resource else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_jobs_update_request( + _request = build_services_jobs_update_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -851,46 +608,41 @@ def update( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) - deserialized = None - if response.status_code == 200: - if _stream: - deserialized = response.iter_bytes() - else: - deserialized = _deserialize(_models.JobDetails, response.json()) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.JobUpdateOptions, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - @distributed_trace - def delete( # pylint: disable=inconsistent-return-statements + @distributed_trace_async + async def delete( self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any ) -> None: - """Request the cancellation of an existing job. + """Delete a job by its id. Use for cancellation in versions before 2025-12-01-preview. :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -917,7 +669,7 @@ def delete( # pylint: disable=inconsistent-return-statements cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_jobs_delete_request( + _request = build_services_jobs_delete_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -927,15 +679,12 @@ def delete( # pylint: disable=inconsistent-return-statements params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -948,8 +697,93 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) # type: ignore - @distributed_trace - def get( + @distributed_trace_async + @api_version_validation( + method_added_on="2025-12-01-preview", + params_added_on={ + "2025-12-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "job_id", + "accept", + ] + }, + api_versions_list=["2025-12-01-preview", "2026-01-15-preview"], + ) + async def cancel( + self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any + ) -> _models.JobDetails: + """Request the cancellation of an existing job. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None) + + _request = build_services_jobs_cancel_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.JobDetails, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get( self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any ) -> _models.JobDetails: """Get job by its id. @@ -979,7 +813,7 @@ def get( cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None) - _request = build_jobs_get_request( + _request = build_services_jobs_get_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -989,15 +823,12 @@ def get( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1006,7 +837,7 @@ def get( if response.status_code not in [200]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) @@ -1023,6 +854,16 @@ def get( return deserialized # type: ignore @distributed_trace + @api_version_validation( + params_added_on={"2024-10-01-preview": ["filter", "skip", "top", "orderby"]}, + api_versions_list=[ + "2024-03-01-preview", + "2024-10-01-preview", + "2025-09-01-preview", + "2025-12-01-preview", + "2026-01-15-preview", + ], + ) def list( self, subscription_id: str, @@ -1034,7 +875,7 @@ def list( top: Optional[int] = None, orderby: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.JobDetails"]: + ) -> AsyncItemPaged["_models.JobDetails"]: """List all jobs. :param subscription_id: The Azure subscription ID. Required. @@ -1052,13 +893,13 @@ def list( :keyword orderby: The order of returned items. Default value is None. :paramtype orderby: str :return: An iterator like instance of JobDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.JobDetails] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.quantum.models.JobDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.JobDetails]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.JobDetails]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1071,7 +912,7 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_jobs_list_request( + _request = build_services_jobs_list_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -1084,9 +925,8 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1105,27 +945,26 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request - def extract_data(pipeline_response): + async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.JobDetails], deserialized["value"]) + list_of_elem = _deserialize(list[_models.JobDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - def get_next(next_link=None): + async def get_next(next_link=None): _request = prepare_request(next_link) _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1136,39 +975,31 @@ def get_next(next_link=None): return pipeline_response - return ItemPaged(get_next, extract_data) + return AsyncItemPaged(get_next, extract_data) -class SessionsOperations: +class ServicesProvidersOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.quantum.ServicesClient`'s - :attr:`sessions` attribute. + :class:`~azure.quantum.aio.WorkspaceClient`'s + :attr:`providers` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @overload - def create_or_replace( - self, - subscription_id: str, - resource_group_name: str, - workspace_name: str, - session_id: str, - resource: _models.SessionDetails, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.SessionDetails: - """Open a new session. + @distributed_trace + def list( + self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.ProviderStatus"]: + """List all providers in the workspace with their respective status. :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -1176,36 +1007,266 @@ def create_or_replace( :type resource_group_name: str :param workspace_name: Name of the Azure Quantum workspace. Required. :type workspace_name: str - :param session_id: Id of the session. Required. - :type session_id: str - :param resource: The resource instance. Required. - :type resource: ~azure.quantum.models.SessionDetails - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: SessionDetails. The SessionDetails is compatible with MutableMapping - :rtype: ~azure.quantum.models.SessionDetails + :return: An iterator like instance of ProviderStatus + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.quantum.models.ProviderStatus] :raises ~azure.core.exceptions.HttpResponseError: """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} - @overload - def create_or_replace( - self, - subscription_id: str, - resource_group_name: str, - workspace_name: str, - session_id: str, - resource: JSON, - *, - content_type: str = "application/json", - **kwargs: Any - ) -> _models.SessionDetails: - """Open a new session. + cls: ClsType[list[_models.ProviderStatus]] = kwargs.pop("cls", None) - :param subscription_id: The Azure subscription ID. Required. - :type subscription_id: str - :param resource_group_name: Name of the Azure resource group. Required. - :type resource_group_name: str + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_services_providers_list_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.ProviderStatus], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ServicesQuotasOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.quantum.aio.WorkspaceClient`'s + :attr:`quotas` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.Quota"]: + """List quotas for the given workspace. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :return: An iterator like instance of Quota + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.quantum.models.Quota] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[list[_models.Quota]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_services_quotas_list_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.Quota], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class ServicesSessionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.quantum.aio.WorkspaceClient`'s + :attr:`sessions` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + async def open( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + resource: _models.SessionDetails, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SessionDetails: + """Open a new session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param session_id: Id of the session. Required. + :type session_id: str + :param resource: The resource instance. Required. + :type resource: ~azure.quantum.models.SessionDetails + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def open( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SessionDetails: + """Open a new session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str :param workspace_name: Name of the Azure Quantum workspace. Required. :type workspace_name: str :param session_id: Id of the session. Required. @@ -1221,7 +1282,7 @@ def create_or_replace( """ @overload - def create_or_replace( + async def open( self, subscription_id: str, resource_group_name: str, @@ -1252,8 +1313,8 @@ def create_or_replace( :raises ~azure.core.exceptions.HttpResponseError: """ - @distributed_trace - def create_or_replace( + @distributed_trace_async + async def open( self, subscription_id: str, resource_group_name: str, @@ -1300,7 +1361,7 @@ def create_or_replace( else: _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_sessions_create_or_replace_request( + _request = build_services_sessions_open_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -1312,15 +1373,12 @@ def create_or_replace( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1329,7 +1387,7 @@ def create_or_replace( if response.status_code not in [200, 201]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) @@ -1345,8 +1403,8 @@ def create_or_replace( return deserialized # type: ignore - @distributed_trace - def close( + @distributed_trace_async + async def close( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> _models.SessionDetails: """Close an existing session. @@ -1376,7 +1434,7 @@ def close( cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None) - _request = build_sessions_close_request( + _request = build_services_sessions_close_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -1386,15 +1444,12 @@ def close( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1403,7 +1458,7 @@ def close( if response.status_code not in [200]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) @@ -1419,8 +1474,8 @@ def close( return deserialized # type: ignore - @distributed_trace - def get( + @distributed_trace_async + async def get( self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> _models.SessionDetails: """Get Session by its id. @@ -1450,7 +1505,7 @@ def get( cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None) - _request = build_sessions_get_request( + _request = build_services_sessions_get_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -1460,15 +1515,12 @@ def get( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1477,7 +1529,7 @@ def get( if response.status_code not in [200]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) @@ -1494,7 +1546,24 @@ def get( return deserialized # type: ignore @distributed_trace - def list( + @api_version_validation( + method_added_on="2024-10-01-preview", + params_added_on={ + "2024-10-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "filter", + "skip", + "top", + "orderby", + "accept", + ] + }, + api_versions_list=["2024-10-01-preview", "2025-09-01-preview", "2025-12-01-preview", "2026-01-15-preview"], + ) + def listv2( self, subscription_id: str, resource_group_name: str, @@ -1505,7 +1574,7 @@ def list( top: Optional[int] = None, orderby: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.SessionDetails"]: + ) -> AsyncItemPaged["_models.SessionDetails"]: """List all Sessions. :param subscription_id: The Azure subscription ID. Required. @@ -1523,13 +1592,13 @@ def list( :keyword orderby: The order of returned items. Default value is None. :paramtype orderby: str :return: An iterator like instance of SessionDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.SessionDetails] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.quantum.models.SessionDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.SessionDetails]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.SessionDetails]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1542,7 +1611,7 @@ def list( def prepare_request(next_link=None): if not next_link: - _request = build_sessions_list_request( + _request = build_services_sessions_listv2_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -1555,9 +1624,8 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1576,27 +1644,26 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request - def extract_data(pipeline_response): + async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.SessionDetails], deserialized["value"]) + list_of_elem = _deserialize(list[_models.SessionDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - def get_next(next_link=None): + async def get_next(next_link=None): _request = prepare_request(next_link) _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1607,9 +1674,19 @@ def get_next(next_link=None): return pipeline_response - return ItemPaged(get_next, extract_data) + return AsyncItemPaged(get_next, extract_data) @distributed_trace + @api_version_validation( + params_added_on={"2024-10-01-preview": ["filter", "skip", "top", "orderby"]}, + api_versions_list=[ + "2024-03-01-preview", + "2024-10-01-preview", + "2025-09-01-preview", + "2025-12-01-preview", + "2026-01-15-preview", + ], + ) def jobs_list( self, subscription_id: str, @@ -1622,7 +1699,7 @@ def jobs_list( top: Optional[int] = None, orderby: Optional[str] = None, **kwargs: Any - ) -> Iterable["_models.JobDetails"]: + ) -> AsyncItemPaged["_models.JobDetails"]: """List jobs in a session. :param subscription_id: The Azure subscription ID. Required. @@ -1642,13 +1719,13 @@ def jobs_list( :keyword orderby: The order of returned items. Default value is None. :paramtype orderby: str :return: An iterator like instance of JobDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.JobDetails] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.quantum.models.JobDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[List[_models.JobDetails]] = kwargs.pop("cls", None) + cls: ClsType[list[_models.JobDetails]] = kwargs.pop("cls", None) error_map: MutableMapping = { 401: ClientAuthenticationError, @@ -1661,7 +1738,7 @@ def jobs_list( def prepare_request(next_link=None): if not next_link: - _request = build_sessions_jobs_list_request( + _request = build_services_sessions_jobs_list_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -1675,122 +1752,8 @@ def prepare_request(next_link=None): params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.JobDetails], deserialized["value"]) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - -class ProvidersOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.quantum.ServicesClient`'s - :attr:`providers` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.ProviderStatus"]: - """List all providers in the workspace with their respective status. - - :param subscription_id: The Azure subscription ID. Required. - :type subscription_id: str - :param resource_group_name: Name of the Azure resource group. Required. - :type resource_group_name: str - :param workspace_name: Name of the Azure Quantum workspace. Required. - :type workspace_name: str - :return: An iterator like instance of ProviderStatus - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.ProviderStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.ProviderStatus]] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_providers_list_request( - subscription_id=subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1809,27 +1772,26 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) return _request - def extract_data(pipeline_response): + async def extract_data(pipeline_response): deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.ProviderStatus], deserialized["value"]) + list_of_elem = _deserialize(list[_models.JobDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) - def get_next(next_link=None): + async def get_next(next_link=None): _request = prepare_request(next_link) _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1840,28 +1802,28 @@ def get_next(next_link=None): return pipeline_response - return ItemPaged(get_next, extract_data) + return AsyncItemPaged(get_next, extract_data) -class StorageOperations: +class ServicesStorageOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.quantum.ServicesClient`'s + :class:`~azure.quantum.aio.WorkspaceClient`'s :attr:`storage` attribute. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @overload - def get_sas_uri( + async def get_sas_uri( self, subscription_id: str, resource_group_name: str, @@ -1872,7 +1834,9 @@ def get_sas_uri( **kwargs: Any ) -> _models.SasUriResponse: """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -1891,7 +1855,7 @@ def get_sas_uri( """ @overload - def get_sas_uri( + async def get_sas_uri( self, subscription_id: str, resource_group_name: str, @@ -1902,7 +1866,9 @@ def get_sas_uri( **kwargs: Any ) -> _models.SasUriResponse: """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -1921,7 +1887,7 @@ def get_sas_uri( """ @overload - def get_sas_uri( + async def get_sas_uri( self, subscription_id: str, resource_group_name: str, @@ -1932,7 +1898,9 @@ def get_sas_uri( **kwargs: Any ) -> _models.SasUriResponse: """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -1950,8 +1918,8 @@ def get_sas_uri( :raises ~azure.core.exceptions.HttpResponseError: """ - @distributed_trace - def get_sas_uri( + @distributed_trace_async + async def get_sas_uri( self, subscription_id: str, resource_group_name: str, @@ -1960,7 +1928,9 @@ def get_sas_uri( **kwargs: Any ) -> _models.SasUriResponse: """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. :param subscription_id: The Azure subscription ID. Required. :type subscription_id: str @@ -1996,7 +1966,7 @@ def get_sas_uri( else: _content = json.dumps(blob_details, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_storage_get_sas_uri_request( + _request = build_services_storage_get_sas_uri_request( subscription_id=subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, @@ -2007,15 +1977,12 @@ def get_sas_uri( params=_params, ) path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) _stream = kwargs.pop("stream", False) - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -2024,7 +1991,7 @@ def get_sas_uri( if response.status_code not in [200]: if _stream: try: - response.read() # Load the body in memory and close the socket + await response.read() # Load the body in memory and close the socket except (StreamConsumedError, StreamClosedError): pass map_error(status_code=response.status_code, response=response, error_map=error_map) @@ -2039,250 +2006,3 @@ def get_sas_uri( return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - - -class QuotasOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.quantum.ServicesClient`'s - :attr:`quotas` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any - ) -> Iterable["_models.Quota"]: - """List quotas for the given workspace. - - :param subscription_id: The Azure subscription ID. Required. - :type subscription_id: str - :param resource_group_name: Name of the Azure resource group. Required. - :type resource_group_name: str - :param workspace_name: Name of the Azure Quantum workspace. Required. - :type workspace_name: str - :return: An iterator like instance of Quota - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.Quota] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.Quota]] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_quotas_list_request( - subscription_id=subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.Quota], deserialized["value"]) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) - - -class TopLevelItemsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.quantum.ServicesClient`'s - :attr:`top_level_items` attribute. - """ - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @distributed_trace - def list( - self, - subscription_id: str, - resource_group_name: str, - workspace_name: str, - *, - filter: Optional[str] = None, - skip: Optional[int] = None, - top: Optional[int] = None, - orderby: Optional[str] = None, - **kwargs: Any - ) -> Iterable["_models.ItemDetails"]: - """List top-level items. - - :param subscription_id: The Azure subscription ID. Required. - :type subscription_id: str - :param resource_group_name: Name of the Azure resource group. Required. - :type resource_group_name: str - :param workspace_name: Name of the Azure Quantum workspace. Required. - :type workspace_name: str - :keyword filter: Filter the result list using the given expression. Default value is None. - :paramtype filter: str - :keyword skip: The number of result items to skip. Default value is None. - :paramtype skip: int - :keyword top: The number of jobs taken. Default value is None. - :paramtype top: int - :keyword orderby: The order of returned items. Default value is None. - :paramtype orderby: str - :return: An iterator like instance of ItemDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.ItemDetails] - :raises ~azure.core.exceptions.HttpResponseError: - """ - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[List[_models.ItemDetails]] = kwargs.pop("cls", None) - - error_map: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - def prepare_request(next_link=None): - if not next_link: - - _request = build_top_level_items_list_request( - subscription_id=subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - filter=filter, - skip=skip, - top=top, - orderby=orderby, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - else: - # make call to next link with the client's api-version - _parsed_next_link = urllib.parse.urlparse(next_link) - _next_request_params = case_insensitive_dict( - { - key: [urllib.parse.quote(v) for v in value] - for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() - } - ) - _next_request_params["api-version"] = self._config.api_version - _request = HttpRequest( - "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params - ) - path_format_arguments = { - "region": self._serialize.url("self._config.region", self._config.region, "str"), - "serviceBaseUrl": self._serialize.url( - "self._config.service_base_url", self._config.service_base_url, "str" - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - return _request - - def extract_data(pipeline_response): - deserialized = pipeline_response.http_response.json() - list_of_elem = _deserialize(List[_models.ItemDetails], deserialized["value"]) - if cls: - list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.get("nextLink") or None, iter(list_of_elem) - - def get_next(next_link=None): - _request = prepare_request(next_link) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - response = pipeline_response.http_response - - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - - return pipeline_response - - return ItemPaged(get_next, extract_data) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_patch.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/_patch.py similarity index 52% rename from src/quantum/azext_quantum/vendored_sdks/azure_quantum/_patch.py rename to src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/_patch.py index f7dd3251033..87676c65a8f 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum/_patch.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/aio/operations/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/__init__.py index 65301085fbc..219a54d2c34 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/__init__.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/__init__.py @@ -2,64 +2,79 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._models import BlobDetails -from ._models import CostEstimate -from ._models import ErrorData -from ._models import ItemDetails -from ._models import JobDetails -from ._models import JsonPatchDocument -from ._models import ProviderStatus -from ._models import QuantumComputingData -from ._models import Quota -from ._models import RestError -from ._models import SasUriResponse -from ._models import SessionDetails -from ._models import TargetStatus -from ._models import UsageEvent +from typing import TYPE_CHECKING -from ._enums import DimensionScope -from ._enums import ItemType -from ._enums import JobStatus -from ._enums import JobType -from ._enums import JsonPatchOperation -from ._enums import MeterPeriod -from ._enums import ProviderAvailability -from ._enums import SessionJobFailurePolicy -from ._enums import SessionStatus -from ._enums import TargetAvailability +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + BlobDetails, + CostEstimate, + InnerError, + ItemDetails, + JobDetails, + JobUpdateOptions, + ProviderStatus, + QuantumComputingData, + Quota, + SasUriResponse, + SessionDetails, + TargetStatus, + Usage, + UsageEvent, + WorkspaceItemError, +) + +from ._enums import ( # type: ignore + CreatedByType, + DimensionScope, + ItemType, + JobStatus, + JobType, + MeterPeriod, + Priority, + ProviderAvailability, + SessionJobFailurePolicy, + SessionStatus, + TargetAvailability, +) from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ "BlobDetails", "CostEstimate", - "ErrorData", + "InnerError", "ItemDetails", "JobDetails", - "JsonPatchDocument", + "JobUpdateOptions", "ProviderStatus", "QuantumComputingData", "Quota", - "RestError", "SasUriResponse", "SessionDetails", "TargetStatus", + "Usage", "UsageEvent", + "WorkspaceItemError", + "CreatedByType", "DimensionScope", "ItemType", "JobStatus", "JobType", - "JsonPatchOperation", "MeterPeriod", + "Priority", "ProviderAvailability", "SessionJobFailurePolicy", "SessionStatus", "TargetAvailability", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_enums.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_enums.py index 2bd23c3f825..7cd9eff3413 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_enums.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_enums.py @@ -2,7 +2,7 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -10,15 +10,30 @@ from azure.core import CaseInsensitiveEnumMeta +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the item.""" + + USER = "User" + """The item is created by user.""" + APPLICATION = "Application" + """The item is created by application.""" + MANAGED_IDENTITY = "ManagedIdentity" + """The item is created using managed identity.""" + KEY = "Key" + """The item is created using key.""" + + class DimensionScope(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The scope at which the quota is applied.""" + """The scope at which the quota is applied to.""" WORKSPACE = "Workspace" + """The quota is applied to the Quantum Workspace.""" SUBSCRIPTION = "Subscription" + """The quota is applied to the Azure Subscription.""" class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of item.""" + """The type of the workspace item.""" JOB = "Job" """A program, problem, or application submitted for processing.""" @@ -29,30 +44,37 @@ class ItemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of the job.""" + QUEUED = "Queued" + """The job has been queued.""" WAITING = "Waiting" + """The job is waiting in the queue to be executed.""" EXECUTING = "Executing" + """The job is being executed.""" + CANCELLATION_REQUESTED = "CancellationRequested" + """Cancellation of the job has been requested.""" + CANCELLING = "Cancelling" + """The job is in the process of being cancelled.""" + FINISHING = "Finishing" + """The job is in the process of being finished.""" + COMPLETED = "Completed" + """The job completed.""" SUCCEEDED = "Succeeded" + """The job completed with success.""" FAILED = "Failed" + """The job completed with failure.""" CANCELLED = "Cancelled" + """The job was cancelled.""" class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The type of job.""" + """The type of the job.""" UNKNOWN = "Unknown" + """Unknown job type.""" QUANTUM_COMPUTING = "QuantumComputing" + """Quantum Computing job type.""" OPTIMIZATION = "Optimization" - - -class JsonPatchOperation(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """The operation to be performed.""" - - ADD = "add" - REMOVE = "remove" - REPLACE = "replace" - MOVE = "move" - COPY = "copy" - TEST = "test" + """Optimization job type.""" class MeterPeriod(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -61,15 +83,29 @@ class MeterPeriod(str, Enum, metaclass=CaseInsensitiveEnumMeta): """ NONE = "None" + """The meter period is instantaneous. Used for concurrent quotas.""" MONTHLY = "Monthly" + """The meter period is per month.""" + + +class Priority(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Job priority levels.""" + + STANDARD = "Standard" + """The job's base priority.""" + HIGH = "High" + """The job's priority is elevated.""" class ProviderAvailability(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Provider availability.""" AVAILABLE = "Available" + """Provider is available.""" DEGRADED = "Degraded" + """Provider is available with degraded experience.""" UNAVAILABLE = "Unavailable" + """Provider is unavailable.""" class SessionJobFailurePolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): @@ -85,16 +121,25 @@ class SessionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The status of the session.""" WAITING = "Waiting" + """The session is waiting in the queue to be executed.""" EXECUTING = "Executing" + """The session is being executed.""" SUCCEEDED = "Succeeded" + """The session completed with success.""" FAILED = "Failed" + """The session completed with failure.""" FAILURE_S_ = "Failure(s)" + """The session completed with some failures.""" TIMED_OUT = "TimedOut" + """The session timed out.""" class TargetAvailability(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Target availability.""" AVAILABLE = "Available" + """Target is available.""" DEGRADED = "Degraded" + """Target is available with degraded experience.""" UNAVAILABLE = "Unavailable" + """Target is unavailable.""" diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_models.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_models.py index 5579fb8e9e3..b1ae5481cd0 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_models.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_models.py @@ -1,32 +1,27 @@ +# pylint: disable=line-too-long,useless-suppression # coding=utf-8 -# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation -import sys -from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union +import datetime +from typing import Any, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload -from .. import _serialization +from azure.core.exceptions import ODataV4Format -if sys.version_info >= (3, 9): - from collections.abc import MutableMapping -else: - from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +from .._utils.model_base import Model as _Model, rest_discriminator, rest_field +from ._enums import ItemType if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports from .. import models as _models -JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class BlobDetails(_serialization.Model): - """Blob details. - - All required parameters must be populated in order to send to server. +class BlobDetails(_Model): + """The details (name and container) of the blob to store or download data. :ivar container_name: The container name. Required. :vartype container_name: str @@ -34,115 +29,115 @@ class BlobDetails(_serialization.Model): :vartype blob_name: str """ - _validation = { - "container_name": {"required": True}, - } + container_name: str = rest_field(name="containerName", visibility=["read", "create"]) + """The container name. Required.""" + blob_name: Optional[str] = rest_field(name="blobName", visibility=["read", "create"]) + """The blob name.""" - _attribute_map = { - "container_name": {"key": "containerName", "type": "str"}, - "blob_name": {"key": "blobName", "type": "str"}, - } + @overload + def __init__( + self, + *, + container_name: str, + blob_name: Optional[str] = None, + ) -> None: ... - def __init__(self, *, container_name: str, blob_name: Optional[str] = None, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword container_name: The container name. Required. - :paramtype container_name: str - :keyword blob_name: The blob name. - :paramtype blob_name: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.container_name = container_name - self.blob_name = blob_name + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class CostEstimate(_serialization.Model): +class CostEstimate(_Model): """The job cost billed by the provider. The final cost on your bill might be slightly different due to added taxes and currency conversion rates. - :ivar currency_code: The currency code. + :ivar currency_code: The currency code. Required. :vartype currency_code: str :ivar events: List of usage events. - :vartype events: list[~azure.quantum._client.models.UsageEvent] - :ivar estimated_total: The estimated total. + :vartype events: list[~azure.quantum.models.UsageEvent] + :ivar estimated_total: The estimated total. Required. :vartype estimated_total: float """ - _attribute_map = { - "currency_code": {"key": "currencyCode", "type": "str"}, - "events": {"key": "events", "type": "[UsageEvent]"}, - "estimated_total": {"key": "estimatedTotal", "type": "float"}, - } - + currency_code: str = rest_field(name="currencyCode", visibility=["read", "create", "update", "delete", "query"]) + """The currency code. Required.""" + events: Optional[list["_models.UsageEvent"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of usage events.""" + estimated_total: float = rest_field( + name="estimatedTotal", visibility=["read", "create", "update", "delete", "query"] + ) + """The estimated total. Required.""" + + @overload def __init__( self, *, - currency_code: Optional[str] = None, - events: Optional[List["_models.UsageEvent"]] = None, - estimated_total: Optional[float] = None, - **kwargs: Any - ) -> None: + currency_code: str, + estimated_total: float, + events: Optional[list["_models.UsageEvent"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword currency_code: The currency code. - :paramtype currency_code: str - :keyword events: List of usage events. - :paramtype events: list[~azure.quantum._client.models.UsageEvent] - :keyword estimated_total: The estimated total. - :paramtype estimated_total: float + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.currency_code = currency_code - self.events = events - self.estimated_total = estimated_total + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class ErrorData(_serialization.Model): - """An error response from Azure. - All required parameters must be populated in order to send to server. +class InnerError(_Model): + """An object containing more specific information about the error. As per Azure REST API + guidelines - `https://aka.ms/AzureRestApiGuidelines#handling-errors + `_. - :ivar code: An identifier for the error. Codes are invariant and are intended to be consumed - programmatically. Required. + :ivar code: One of a server-defined set of error codes. :vartype code: str - :ivar message: A message describing the error, intended to be suitable for displaying in a user - interface. Required. - :vartype message: str + :ivar innererror: Inner error. + :vartype innererror: ~azure.quantum.models.InnerError """ - _validation = { - "code": {"required": True}, - "message": {"required": True}, - } + code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """One of a server-defined set of error codes.""" + innererror: Optional["_models.InnerError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Inner error.""" - _attribute_map = { - "code": {"key": "code", "type": "str"}, - "message": {"key": "message", "type": "str"}, - } + @overload + def __init__( + self, + *, + code: Optional[str] = None, + innererror: Optional["_models.InnerError"] = None, + ) -> None: ... - def __init__(self, *, code: str, message: str, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword code: An identifier for the error. Codes are invariant and are intended to be consumed - programmatically. Required. - :paramtype code: str - :keyword message: A message describing the error, intended to be suitable for displaying in a - user interface. Required. - :paramtype message: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.code = code - self.message = message + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class ItemDetails(_serialization.Model): - """Item details. An item can be a job or a session. + +class ItemDetails(_Model): + """A workspace item. You probably want to use the sub-classes and not this class directly. Known sub-classes are: JobDetails, SessionDetails - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to server. - - :ivar id: The id of the item. Required. + :ivar id: Id of the item. Required. :vartype id: str :ivar name: The name of the item. It is not required for the name to be unique and it's only used for display purposes. Required. @@ -151,128 +146,125 @@ class ItemDetails(_serialization.Model): :vartype provider_id: str :ivar target: The target identifier to run the job. Required. :vartype target: str - :ivar item_type: The type of item. Required. Known values are: "Job" and "Session". - :vartype item_type: str or ~azure.quantum._client.models.ItemType + :ivar item_type: Type of the Quantum Workspace item. Required. Known values are: "Job" and + "Session". + :vartype item_type: str or ~azure.quantum.models.ItemType :ivar creation_time: The creation time of the item. :vartype creation_time: ~datetime.datetime + :ivar created_by: The identity that created the item. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the item. Known values are: "User", + "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.quantum.models.CreatedByType + :ivar last_modified_time: The timestamp of the item last modification initiated by the + customer. + :vartype last_modified_time: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the item. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the item. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.quantum.models.CreatedByType + :ivar last_updated_time: The last time the item was updated by the system. + :vartype last_updated_time: ~datetime.datetime :ivar begin_execution_time: The time when the item began execution. :vartype begin_execution_time: ~datetime.datetime :ivar end_execution_time: The time when the item finished execution. :vartype end_execution_time: ~datetime.datetime - :ivar cost_estimate: The job cost billed by the provider. The final cost on your bill might be - slightly different due to added taxes and currency conversion rates. - :vartype cost_estimate: ~azure.quantum._client.models.CostEstimate - :ivar error_data: An error response from Azure. - :vartype error_data: ~azure.quantum._client.models.ErrorData + :ivar cost_estimate: Cost estimate. + :vartype cost_estimate: ~azure.quantum.models.CostEstimate + :ivar error_data: Error information. + :vartype error_data: ~azure.quantum.models.WorkspaceItemError + :ivar priority: Priority of job or session. Known values are: "Standard" and "High". + :vartype priority: str or ~azure.quantum.models.Priority + :ivar tags: List of user-supplied tags associated with the job. + :vartype tags: list[str] + :ivar usage: Resource consumption metrics containing provider-specific usage data such as + execution time, quantum shots consumed etc. + :vartype usage: ~azure.quantum.models.Usage """ - _validation = { - "id": {"required": True}, - "name": {"required": True}, - "provider_id": {"required": True}, - "target": {"required": True}, - "item_type": {"required": True}, - "creation_time": {"readonly": True}, - "begin_execution_time": {"readonly": True}, - "end_execution_time": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "provider_id": {"key": "providerId", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "item_type": {"key": "itemType", "type": "str"}, - "creation_time": {"key": "creationTime", "type": "iso-8601"}, - "begin_execution_time": {"key": "beginExecutionTime", "type": "iso-8601"}, - "end_execution_time": {"key": "endExecutionTime", "type": "iso-8601"}, - "cost_estimate": {"key": "costEstimate", "type": "CostEstimate"}, - "error_data": {"key": "errorData", "type": "ErrorData"}, - } - - _subtype_map = {"item_type": {"Job": "JobDetails", "Session": "SessionDetails"}} - + __mapping__: dict[str, _Model] = {} + id: str = rest_field(visibility=["read"]) + """Id of the item. Required.""" + name: str = rest_field(visibility=["read", "create", "update"]) + """The name of the item. It is not required for the name to be unique and it's only used for + display purposes. Required.""" + provider_id: str = rest_field(name="providerId", visibility=["read", "create"]) + """The unique identifier for the provider. Required.""" + target: str = rest_field(visibility=["read", "create"]) + """The target identifier to run the job. Required.""" + item_type: str = rest_discriminator(name="itemType", visibility=["read", "create"]) + """Type of the Quantum Workspace item. Required. Known values are: \"Job\" and \"Session\".""" + creation_time: Optional[datetime.datetime] = rest_field(name="creationTime", visibility=["read"], format="rfc3339") + """The creation time of the item.""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The identity that created the item.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read"] + ) + """The type of identity that created the item. Known values are: \"User\", \"Application\", + \"ManagedIdentity\", and \"Key\".""" + last_modified_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedTime", visibility=["read"], format="rfc3339" + ) + """The timestamp of the item last modification initiated by the customer.""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The identity that last modified the item.""" + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="lastModifiedByType", visibility=["read"] + ) + """The type of identity that last modified the item. Known values are: \"User\", \"Application\", + \"ManagedIdentity\", and \"Key\".""" + last_updated_time: Optional[datetime.datetime] = rest_field( + name="lastUpdatedTime", visibility=["read"], format="rfc3339" + ) + """The last time the item was updated by the system.""" + begin_execution_time: Optional[datetime.datetime] = rest_field( + name="beginExecutionTime", visibility=["read"], format="rfc3339" + ) + """The time when the item began execution.""" + end_execution_time: Optional[datetime.datetime] = rest_field( + name="endExecutionTime", visibility=["read"], format="rfc3339" + ) + """The time when the item finished execution.""" + cost_estimate: Optional["_models.CostEstimate"] = rest_field(name="costEstimate", visibility=["read"]) + """Cost estimate.""" + error_data: Optional["_models.WorkspaceItemError"] = rest_field(name="errorData", visibility=["read"]) + """Error information.""" + priority: Optional[Union[str, "_models.Priority"]] = rest_field(visibility=["read", "create", "update"]) + """Priority of job or session. Known values are: \"Standard\" and \"High\".""" + tags: Optional[list[str]] = rest_field(visibility=["read", "create", "update"]) + """List of user-supplied tags associated with the job.""" + usage: Optional["_models.Usage"] = rest_field(visibility=["read"]) + """Resource consumption metrics containing provider-specific usage data such as execution time, + quantum shots consumed etc.""" + + @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin name: str, provider_id: str, target: str, - cost_estimate: Optional["_models.CostEstimate"] = None, - error_data: Optional["_models.ErrorData"] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: The id of the item. Required. - :paramtype id: str - :keyword name: The name of the item. It is not required for the name to be unique and it's only - used for display purposes. Required. - :paramtype name: str - :keyword provider_id: The unique identifier for the provider. Required. - :paramtype provider_id: str - :keyword target: The target identifier to run the job. Required. - :paramtype target: str - :keyword cost_estimate: The job cost billed by the provider. The final cost on your bill might - be slightly different due to added taxes and currency conversion rates. - :paramtype cost_estimate: ~azure.quantum._client.models.CostEstimate - :keyword error_data: An error response from Azure. - :paramtype error_data: ~azure.quantum._client.models.ErrorData - """ - super().__init__(**kwargs) - self.id = id - self.name = name - self.provider_id = provider_id - self.target = target - self.item_type: Optional[str] = None - self.creation_time = None - self.begin_execution_time = None - self.end_execution_time = None - self.cost_estimate = cost_estimate - self.error_data = error_data - - -class ItemDetailsList(_serialization.Model): - """List of item details. - - All required parameters must be populated in order to send to server. - - :ivar value: Required. - :vartype value: list[~azure.quantum._client.models.ItemDetails] - :ivar next_link: Link to the next page of results. - :vartype next_link: str - """ - - _validation = { - "value": {"required": True}, - } - - _attribute_map = { - "value": {"key": "value", "type": "[ItemDetails]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } + item_type: str, + priority: Optional[Union[str, "_models.Priority"]] = None, + tags: Optional[list[str]] = None, + ) -> None: ... - def __init__(self, *, value: List["_models.ItemDetails"], next_link: Optional[str] = None, **kwargs: Any) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword value: Required. - :paramtype value: list[~azure.quantum._client.models.ItemDetails] - :keyword next_link: Link to the next page of results. - :paramtype next_link: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.value = value - self.next_link = next_link + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class JobDetails(ItemDetails): # pylint: disable=too-many-instance-attributes - """Job details. - Variables are only populated by the server, and will be ignored when sending a request. +class JobDetails(ItemDetails, discriminator="Job"): + """A job to be run in the workspace. - All required parameters must be populated in order to send to server. - - :ivar id: The id of the item. Required. - :vartype id: str :ivar name: The name of the item. It is not required for the name to be unique and it's only used for display purposes. Required. :vartype name: str @@ -280,511 +272,280 @@ class JobDetails(ItemDetails): # pylint: disable=too-many-instance-attributes :vartype provider_id: str :ivar target: The target identifier to run the job. Required. :vartype target: str - :ivar item_type: The type of item. Required. Known values are: "Job" and "Session". - :vartype item_type: str or ~azure.quantum._client.models.ItemType :ivar creation_time: The creation time of the item. :vartype creation_time: ~datetime.datetime + :ivar created_by: The identity that created the item. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the item. Known values are: "User", + "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.quantum.models.CreatedByType + :ivar last_modified_time: The timestamp of the item last modification initiated by the + customer. + :vartype last_modified_time: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the item. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the item. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.quantum.models.CreatedByType + :ivar last_updated_time: The last time the item was updated by the system. + :vartype last_updated_time: ~datetime.datetime :ivar begin_execution_time: The time when the item began execution. :vartype begin_execution_time: ~datetime.datetime :ivar end_execution_time: The time when the item finished execution. :vartype end_execution_time: ~datetime.datetime - :ivar cost_estimate: The job cost billed by the provider. The final cost on your bill might be - slightly different due to added taxes and currency conversion rates. - :vartype cost_estimate: ~azure.quantum._client.models.CostEstimate - :ivar error_data: An error response from Azure. - :vartype error_data: ~azure.quantum._client.models.ErrorData + :ivar cost_estimate: Cost estimate. + :vartype cost_estimate: ~azure.quantum.models.CostEstimate + :ivar error_data: Error information. + :vartype error_data: ~azure.quantum.models.WorkspaceItemError + :ivar priority: Priority of job or session. Known values are: "Standard" and "High". + :vartype priority: str or ~azure.quantum.models.Priority + :ivar tags: List of user-supplied tags associated with the job. + :vartype tags: list[str] + :ivar usage: Resource consumption metrics containing provider-specific usage data such as + execution time, quantum shots consumed etc. + :vartype usage: ~azure.quantum.models.Usage + :ivar id: Id of the job. Required. + :vartype id: str + :ivar item_type: Type of the Quantum Workspace item is Job. Required. A program, problem, or + application submitted for processing. + :vartype item_type: str or ~azure.quantum.models.JOB :ivar job_type: The type of job. Known values are: "Unknown", "QuantumComputing", and "Optimization". - :vartype job_type: str or ~azure.quantum._client.models.JobType + :vartype job_type: str or ~azure.quantum.models.JobType :ivar session_id: The ID of the session that the job is part of. :vartype session_id: str :ivar container_uri: The blob container SAS uri, the container is used to host job data. Required. :vartype container_uri: str - :ivar input_data_uri: The input blob SAS uri, if specified, it will override the default input - blob in the container. + :ivar input_data_uri: The input blob URI, if specified, it will override the default input blob + in the container. :vartype input_data_uri: str - :ivar input_data_format: The format of the input data. Required. + :ivar input_data_format: The format of the input data. :vartype input_data_format: str + :ivar status: The status of the job. Known values are: "Queued", "Waiting", "Executing", + "CancellationRequested", "Cancelling", "Finishing", "Completed", "Succeeded", "Failed", and + "Cancelled". + :vartype status: str or ~azure.quantum.models.JobStatus + :ivar metadata: The job metadata. Metadata provides client the ability to store client-specific + information. + :vartype metadata: any + :ivar cancellation_time: The time when a job was successfully cancelled. + :vartype cancellation_time: ~datetime.datetime + :ivar quantum_computing_data: Quantum computing data. + :vartype quantum_computing_data: ~azure.quantum.models.QuantumComputingData :ivar input_params: The input parameters for the job. JSON object used by the target solver. It is expected that the size of this object is small and only used to specify parameters for the execution target, not the input data. - :vartype input_params: JSON - :ivar status: The status of the job. Known values are: "Waiting", "Executing", "Succeeded", - "Failed", and "Cancelled". - :vartype status: str or ~azure.quantum._client.models.JobStatus - :ivar metadata: The job metadata. Metadata provides client the ability to store client-specific - information. - :vartype metadata: dict[str, str] - :ivar output_data_uri: The output blob SAS uri. When a job finishes successfully, results will - be uploaded to this blob. + :vartype input_params: any + :ivar output_data_uri: The output blob uri. When a job finishes successfully, results will be + uploaded to this blob. :vartype output_data_uri: str :ivar output_data_format: The format of the output data. :vartype output_data_format: str - :ivar cancellation_time: The time when a job was successfully cancelled. - :vartype cancellation_time: ~datetime.datetime - :ivar quantum_computing_data: Quantum computing data. - :vartype quantum_computing_data: ~azure.quantum._client.models.QuantumComputingData - :ivar tags: List of user-supplied tags associated with the job. - :vartype tags: list[str] """ - _validation = { - "id": {"required": True}, - "name": {"required": True}, - "provider_id": {"required": True}, - "target": {"required": True}, - "item_type": {"required": True}, - "creation_time": {"readonly": True}, - "begin_execution_time": {"readonly": True}, - "end_execution_time": {"readonly": True}, - "job_type": {"readonly": True}, - "container_uri": {"required": True}, - "input_data_format": {"required": True}, - "status": {"readonly": True}, - "cancellation_time": {"readonly": True}, - "quantum_computing_data": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "provider_id": {"key": "providerId", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "item_type": {"key": "itemType", "type": "str"}, - "creation_time": {"key": "creationTime", "type": "iso-8601"}, - "begin_execution_time": {"key": "beginExecutionTime", "type": "iso-8601"}, - "end_execution_time": {"key": "endExecutionTime", "type": "iso-8601"}, - "cost_estimate": {"key": "costEstimate", "type": "CostEstimate"}, - "error_data": {"key": "errorData", "type": "ErrorData"}, - "job_type": {"key": "jobType", "type": "str"}, - "session_id": {"key": "sessionId", "type": "str"}, - "container_uri": {"key": "containerUri", "type": "str"}, - "input_data_uri": {"key": "inputDataUri", "type": "str"}, - "input_data_format": {"key": "inputDataFormat", "type": "str"}, - "input_params": {"key": "inputParams", "type": "object"}, - "status": {"key": "status", "type": "str"}, - "metadata": {"key": "metadata", "type": "{str}"}, - "output_data_uri": {"key": "outputDataUri", "type": "str"}, - "output_data_format": {"key": "outputDataFormat", "type": "str"}, - "cancellation_time": {"key": "cancellationTime", "type": "iso-8601"}, - "quantum_computing_data": {"key": "quantumComputingData", "type": "QuantumComputingData"}, - "tags": {"key": "tags", "type": "[str]"}, - } - - def __init__( + item_type: Literal[ItemType.JOB] = rest_discriminator(name="itemType", visibility=["read", "create"]) # type: ignore + """Type of the Quantum Workspace item is Job. Required. A program, problem, or application + submitted for processing.""" + job_type: Optional[Union[str, "_models.JobType"]] = rest_field(name="jobType", visibility=["read", "create"]) + """The type of job. Known values are: \"Unknown\", \"QuantumComputing\", and \"Optimization\".""" + session_id: Optional[str] = rest_field(name="sessionId", visibility=["read", "create"]) + """The ID of the session that the job is part of.""" + container_uri: str = rest_field(name="containerUri", visibility=["read", "create"]) + """The blob container SAS uri, the container is used to host job data. Required.""" + input_data_uri: Optional[str] = rest_field(name="inputDataUri", visibility=["read", "create"]) + """The input blob URI, if specified, it will override the default input blob in the container.""" + input_data_format: Optional[str] = rest_field(name="inputDataFormat", visibility=["read", "create"]) + """The format of the input data.""" + status: Optional[Union[str, "_models.JobStatus"]] = rest_field(visibility=["read"]) + """The status of the job. Known values are: \"Queued\", \"Waiting\", \"Executing\", + \"CancellationRequested\", \"Cancelling\", \"Finishing\", \"Completed\", \"Succeeded\", + \"Failed\", and \"Cancelled\".""" + metadata: Optional[Any] = rest_field(visibility=["read", "create", "update"]) + """The job metadata. Metadata provides client the ability to store client-specific information.""" + cancellation_time: Optional[datetime.datetime] = rest_field( + name="cancellationTime", visibility=["read"], format="rfc3339" + ) + """The time when a job was successfully cancelled.""" + quantum_computing_data: Optional["_models.QuantumComputingData"] = rest_field( + name="quantumComputingData", visibility=["read"] + ) + """Quantum computing data.""" + input_params: Optional[Any] = rest_field(name="inputParams", visibility=["read", "create"]) + """The input parameters for the job. JSON object used by the target solver. It is expected that + the size of this object is small and only used to specify parameters for the execution target, + not the input data.""" + output_data_uri: Optional[str] = rest_field(name="outputDataUri", visibility=["read", "create"]) + """The output blob uri. When a job finishes successfully, results will be uploaded to this blob.""" + output_data_format: Optional[str] = rest_field(name="outputDataFormat", visibility=["read", "create"]) + """The format of the output data.""" + + @overload + def __init__( # pylint: disable=too-many-locals self, *, - id: str, # pylint: disable=redefined-builtin name: str, provider_id: str, target: str, container_uri: str, - input_data_format: str, - cost_estimate: Optional["_models.CostEstimate"] = None, - error_data: Optional["_models.ErrorData"] = None, + priority: Optional[Union[str, "_models.Priority"]] = None, + tags: Optional[list[str]] = None, + job_type: Optional[Union[str, "_models.JobType"]] = None, session_id: Optional[str] = None, input_data_uri: Optional[str] = None, - input_params: Optional[JSON] = None, - metadata: Optional[Dict[str, str]] = None, + input_data_format: Optional[str] = None, + metadata: Optional[Any] = None, + input_params: Optional[Any] = None, output_data_uri: Optional[str] = None, output_data_format: Optional[str] = None, - tags: Optional[List[str]] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: The id of the item. Required. - :paramtype id: str - :keyword name: The name of the item. It is not required for the name to be unique and it's only - used for display purposes. Required. - :paramtype name: str - :keyword provider_id: The unique identifier for the provider. Required. - :paramtype provider_id: str - :keyword target: The target identifier to run the job. Required. - :paramtype target: str - :keyword cost_estimate: The job cost billed by the provider. The final cost on your bill might - be slightly different due to added taxes and currency conversion rates. - :paramtype cost_estimate: ~azure.quantum._client.models.CostEstimate - :keyword error_data: An error response from Azure. - :paramtype error_data: ~azure.quantum._client.models.ErrorData - :keyword session_id: The ID of the session that the job is part of. - :paramtype session_id: str - :keyword container_uri: The blob container SAS uri, the container is used to host job data. - Required. - :paramtype container_uri: str - :keyword input_data_uri: The input blob SAS uri, if specified, it will override the default - input blob in the container. - :paramtype input_data_uri: str - :keyword input_data_format: The format of the input data. Required. - :paramtype input_data_format: str - :keyword input_params: The input parameters for the job. JSON object used by the target solver. - It is expected that the size of this object is small and only used to specify parameters for - the execution target, not the input data. - :paramtype input_params: JSON - :keyword metadata: The job metadata. Metadata provides client the ability to store - client-specific information. - :paramtype metadata: dict[str, str] - :keyword output_data_uri: The output blob SAS uri. When a job finishes successfully, results - will be uploaded to this blob. - :paramtype output_data_uri: str - :keyword output_data_format: The format of the output data. - :paramtype output_data_format: str - :keyword tags: List of user-supplied tags associated with the job. - :paramtype tags: list[str] - """ - super().__init__( - id=id, - name=name, - provider_id=provider_id, - target=target, - cost_estimate=cost_estimate, - error_data=error_data, - **kwargs - ) - self.item_type: str = "Job" - self.job_type = None - self.session_id = session_id - self.container_uri = container_uri - self.input_data_uri = input_data_uri - self.input_data_format = input_data_format - self.input_params = input_params - self.status = None - self.metadata = metadata - self.output_data_uri = output_data_uri - self.output_data_format = output_data_format - self.cancellation_time = None - self.quantum_computing_data = None - self.tags = tags - - -class JobDetailsList(_serialization.Model): - """List of job details. - - :ivar value: - :vartype value: list[~azure.quantum._client.models.JobDetails] - :ivar next_link: Link to the next page of results. - :vartype next_link: str - """ - - _attribute_map = { - "value": {"key": "value", "type": "[JobDetails]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } + ) -> None: ... - def __init__( - self, *, value: Optional[List["_models.JobDetails"]] = None, next_link: Optional[str] = None, **kwargs: Any - ) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword value: - :paramtype value: list[~azure.quantum._client.models.JobDetails] - :keyword next_link: Link to the next page of results. - :paramtype next_link: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class JsonPatchDocument(_serialization.Model): - """A JSONPatch document as defined by RFC 6902. - - All required parameters must be populated in order to send to server. - - :ivar op: The operation to be performed. Required. Known values are: "add", "remove", - "replace", "move", "copy", and "test". - :vartype op: str or ~azure.quantum._client.models.JsonPatchOperation - :ivar path: A JSON-Pointer. Required. - :vartype path: str - :ivar value: A value to be used in the operation on the path. - :vartype value: JSON - :ivar from_property: Optional field used in copy and move operations. - :vartype from_property: str - """ - _validation = { - "op": {"required": True}, - "path": {"required": True}, - } + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.item_type = ItemType.JOB # type: ignore - _attribute_map = { - "op": {"key": "op", "type": "str"}, - "path": {"key": "path", "type": "str"}, - "value": {"key": "value", "type": "object"}, - "from_property": {"key": "from", "type": "str"}, - } - def __init__( - self, - *, - op: Union[str, "_models.JsonPatchOperation"], - path: str, - value: Optional[JSON] = None, - from_property: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword op: The operation to be performed. Required. Known values are: "add", "remove", - "replace", "move", "copy", and "test". - :paramtype op: str or ~azure.quantum._client.models.JsonPatchOperation - :keyword path: A JSON-Pointer. Required. - :paramtype path: str - :keyword value: A value to be used in the operation on the path. - :paramtype value: JSON - :keyword from_property: Optional field used in copy and move operations. - :paramtype from_property: str - """ - super().__init__(**kwargs) - self.op = op - self.path = path - self.value = value - self.from_property = from_property +class JobUpdateOptions(_Model): + """Options for updating a job. - -class ProviderStatus(_serialization.Model): - """Providers status. - - :ivar id: Provider id. + :ivar id: Id of the job. Required. :vartype id: str - :ivar current_availability: Provider availability. Known values are: "Available", "Degraded", - and "Unavailable". - :vartype current_availability: str or ~azure.quantum._client.models.ProviderAvailability - :ivar targets: - :vartype targets: list[~azure.quantum._client.models.TargetStatus] + :ivar priority: Priority of job. Known values are: "Standard" and "High". + :vartype priority: str or ~azure.quantum.models.Priority + :ivar name: The name of the job. + :vartype name: str + :ivar tags: List of user-supplied tags associated with the job. + :vartype tags: list[str] """ - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "current_availability": {"key": "currentAvailability", "type": "str"}, - "targets": {"key": "targets", "type": "[TargetStatus]"}, - } + id: str = rest_field(visibility=["read"]) + """Id of the job. Required.""" + priority: Optional[Union[str, "_models.Priority"]] = rest_field(visibility=["update"]) + """Priority of job. Known values are: \"Standard\" and \"High\".""" + name: Optional[str] = rest_field(visibility=["read", "update"]) + """The name of the job.""" + tags: Optional[list[str]] = rest_field(visibility=["read", "create", "update"]) + """List of user-supplied tags associated with the job.""" + @overload def __init__( self, *, - id: Optional[str] = None, # pylint: disable=redefined-builtin - current_availability: Optional[Union[str, "_models.ProviderAvailability"]] = None, - targets: Optional[List["_models.TargetStatus"]] = None, - **kwargs: Any - ) -> None: + priority: Optional[Union[str, "_models.Priority"]] = None, + name: Optional[str] = None, + tags: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword id: Provider id. - :paramtype id: str - :keyword current_availability: Provider availability. Known values are: "Available", - "Degraded", and "Unavailable". - :paramtype current_availability: str or ~azure.quantum._client.models.ProviderAvailability - :keyword targets: - :paramtype targets: list[~azure.quantum._client.models.TargetStatus] + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.id = id - self.current_availability = current_availability - self.targets = targets + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) -class ProviderStatusList(_serialization.Model): - """Providers status. - :ivar value: - :vartype value: list[~azure.quantum._client.models.ProviderStatus] - :ivar next_link: Link to the next page of results. - :vartype next_link: str - """ +class ProviderStatus(_Model): + """Provider status. - _attribute_map = { - "value": {"key": "value", "type": "[ProviderStatus]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } + :ivar id: Provider id. Required. + :vartype id: str + :ivar current_availability: Current provider availability. Required. Known values are: + "Available", "Degraded", and "Unavailable". + :vartype current_availability: str or ~azure.quantum.models.ProviderAvailability + :ivar targets: Current target statuses. Required. + :vartype targets: list[~azure.quantum.models.TargetStatus] + """ - def __init__( - self, *, value: Optional[List["_models.ProviderStatus"]] = None, next_link: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword value: - :paramtype value: list[~azure.quantum._client.models.ProviderStatus] - :keyword next_link: Link to the next page of results. - :paramtype next_link: str - """ - super().__init__(**kwargs) - self.value = value - self.next_link = next_link + id: str = rest_field(visibility=["read"]) + """Provider id. Required.""" + current_availability: Union[str, "_models.ProviderAvailability"] = rest_field( + name="currentAvailability", visibility=["read"] + ) + """Current provider availability. Required. Known values are: \"Available\", \"Degraded\", and + \"Unavailable\".""" + targets: list["_models.TargetStatus"] = rest_field(visibility=["read"]) + """Current target statuses. Required.""" -class QuantumComputingData(_serialization.Model): +class QuantumComputingData(_Model): """Quantum computing data. - :ivar count: The number of quantum computing items in the job. + :ivar count: The number of quantum computing items in the job. Required. :vartype count: int """ - _attribute_map = { - "count": {"key": "count", "type": "int"}, - } - - def __init__(self, *, count: Optional[int] = None, **kwargs: Any) -> None: - """ - :keyword count: The number of quantum computing items in the job. - :paramtype count: int - """ - super().__init__(**kwargs) - self.count = count + count: int = rest_field(visibility=["read"]) + """The number of quantum computing items in the job. Required.""" -class Quota(_serialization.Model): +class Quota(_Model): """Quota information. - :ivar dimension: The name of the dimension associated with the quota. + :ivar dimension: The name of the dimension associated with the quota. Required. :vartype dimension: str - :ivar scope: The scope at which the quota is applied. Known values are: "Workspace" and - "Subscription". - :vartype scope: str or ~azure.quantum._client.models.DimensionScope - :ivar provider_id: The unique identifier for the provider. + :ivar scope: The scope at which the quota is applied. Required. Known values are: "Workspace" + and "Subscription". + :vartype scope: str or ~azure.quantum.models.DimensionScope + :ivar provider_id: The unique identifier for the provider. Required. :vartype provider_id: str :ivar utilization: The amount of the usage that has been applied for the current period. + Required. :vartype utilization: float :ivar holds: The amount of the usage that has been reserved but not applied for the current - period. + period. Required. :vartype holds: float - :ivar limit: The maximum amount of usage allowed for the current period. + :ivar limit: The maximum amount of usage allowed for the current period. Required. :vartype limit: float :ivar period: The time period in which the quota's underlying meter is accumulated. Based on - calendar year. 'None' is used for concurrent quotas. Known values are: "None" and "Monthly". - :vartype period: str or ~azure.quantum._client.models.MeterPeriod - """ - - _attribute_map = { - "dimension": {"key": "dimension", "type": "str"}, - "scope": {"key": "scope", "type": "str"}, - "provider_id": {"key": "providerId", "type": "str"}, - "utilization": {"key": "utilization", "type": "float"}, - "holds": {"key": "holds", "type": "float"}, - "limit": {"key": "limit", "type": "float"}, - "period": {"key": "period", "type": "str"}, - } - - def __init__( - self, - *, - dimension: Optional[str] = None, - scope: Optional[Union[str, "_models.DimensionScope"]] = None, - provider_id: Optional[str] = None, - utilization: Optional[float] = None, - holds: Optional[float] = None, - limit: Optional[float] = None, - period: Optional[Union[str, "_models.MeterPeriod"]] = None, - **kwargs: Any - ) -> None: - """ - :keyword dimension: The name of the dimension associated with the quota. - :paramtype dimension: str - :keyword scope: The scope at which the quota is applied. Known values are: "Workspace" and - "Subscription". - :paramtype scope: str or ~azure.quantum._client.models.DimensionScope - :keyword provider_id: The unique identifier for the provider. - :paramtype provider_id: str - :keyword utilization: The amount of the usage that has been applied for the current period. - :paramtype utilization: float - :keyword holds: The amount of the usage that has been reserved but not applied for the current - period. - :paramtype holds: float - :keyword limit: The maximum amount of usage allowed for the current period. - :paramtype limit: float - :keyword period: The time period in which the quota's underlying meter is accumulated. Based on - calendar year. 'None' is used for concurrent quotas. Known values are: "None" and "Monthly". - :paramtype period: str or ~azure.quantum._client.models.MeterPeriod - """ - super().__init__(**kwargs) - self.dimension = dimension - self.scope = scope - self.provider_id = provider_id - self.utilization = utilization - self.holds = holds - self.limit = limit - self.period = period - - -class QuotaList(_serialization.Model): - """List of quotas. - - :ivar value: - :vartype value: list[~azure.quantum._client.models.Quota] - :ivar next_link: Link to the next page of results. - :vartype next_link: str - """ - - _attribute_map = { - "value": {"key": "value", "type": "[Quota]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__( - self, *, value: Optional[List["_models.Quota"]] = None, next_link: Optional[str] = None, **kwargs: Any - ) -> None: - """ - :keyword value: - :paramtype value: list[~azure.quantum._client.models.Quota] - :keyword next_link: Link to the next page of results. - :paramtype next_link: str - """ - super().__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class RestError(_serialization.Model): - """Error information returned by the API. - - All required parameters must be populated in order to send to server. - - :ivar error: An error response from Azure. Required. - :vartype error: ~azure.quantum._client.models.ErrorData + calendar year. 'None' is used for concurrent quotas. Required. Known values are: "None" and + "Monthly". + :vartype period: str or ~azure.quantum.models.MeterPeriod """ - _validation = { - "error": {"required": True}, - } - - _attribute_map = { - "error": {"key": "error", "type": "ErrorData"}, - } - - def __init__(self, *, error: "_models.ErrorData", **kwargs: Any) -> None: - """ - :keyword error: An error response from Azure. Required. - :paramtype error: ~azure.quantum._client.models.ErrorData - """ - super().__init__(**kwargs) - self.error = error - - -class SasUriResponse(_serialization.Model): - """Get SAS URL operation response. + dimension: str = rest_field(visibility=["read"]) + """The name of the dimension associated with the quota. Required.""" + scope: Union[str, "_models.DimensionScope"] = rest_field(visibility=["read"]) + """The scope at which the quota is applied. Required. Known values are: \"Workspace\" and + \"Subscription\".""" + provider_id: str = rest_field(name="providerId", visibility=["read"]) + """The unique identifier for the provider. Required.""" + utilization: float = rest_field(visibility=["read"]) + """The amount of the usage that has been applied for the current period. Required.""" + holds: float = rest_field(visibility=["read"]) + """The amount of the usage that has been reserved but not applied for the current period. + Required.""" + limit: float = rest_field(visibility=["read"]) + """The maximum amount of usage allowed for the current period. Required.""" + period: Union[str, "_models.MeterPeriod"] = rest_field(visibility=["read"]) + """The time period in which the quota's underlying meter is accumulated. Based on calendar year. + 'None' is used for concurrent quotas. Required. Known values are: \"None\" and \"Monthly\".""" + + +class SasUriResponse(_Model): + """SAS URI operation response. :ivar sas_uri: A URL with a SAS token to upload a blob for execution in the given workspace. + Required. :vartype sas_uri: str """ - _attribute_map = { - "sas_uri": {"key": "sasUri", "type": "str"}, - } - - def __init__(self, *, sas_uri: Optional[str] = None, **kwargs: Any) -> None: - """ - :keyword sas_uri: A URL with a SAS token to upload a blob for execution in the given workspace. - :paramtype sas_uri: str - """ - super().__init__(**kwargs) - self.sas_uri = sas_uri - + sas_uri: str = rest_field(name="sasUri", visibility=["read"]) + """A URL with a SAS token to upload a blob for execution in the given workspace. Required.""" -class SessionDetails(ItemDetails): # pylint: disable=too-many-instance-attributes - """Session details. - Variables are only populated by the server, and will be ignored when sending a request. +class SessionDetails(ItemDetails, discriminator="Session"): + """Session, a logical grouping of jobs. - All required parameters must be populated in order to send to server. - - :ivar id: The id of the item. Required. - :vartype id: str :ivar name: The name of the item. It is not required for the name to be unique and it's only used for display purposes. Required. :vartype name: str @@ -792,230 +553,232 @@ class SessionDetails(ItemDetails): # pylint: disable=too-many-instance-attribut :vartype provider_id: str :ivar target: The target identifier to run the job. Required. :vartype target: str - :ivar item_type: The type of item. Required. Known values are: "Job" and "Session". - :vartype item_type: str or ~azure.quantum._client.models.ItemType :ivar creation_time: The creation time of the item. :vartype creation_time: ~datetime.datetime + :ivar created_by: The identity that created the item. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the item. Known values are: "User", + "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.quantum.models.CreatedByType + :ivar last_modified_time: The timestamp of the item last modification initiated by the + customer. + :vartype last_modified_time: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the item. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the item. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.quantum.models.CreatedByType + :ivar last_updated_time: The last time the item was updated by the system. + :vartype last_updated_time: ~datetime.datetime :ivar begin_execution_time: The time when the item began execution. :vartype begin_execution_time: ~datetime.datetime :ivar end_execution_time: The time when the item finished execution. :vartype end_execution_time: ~datetime.datetime - :ivar cost_estimate: The job cost billed by the provider. The final cost on your bill might be - slightly different due to added taxes and currency conversion rates. - :vartype cost_estimate: ~azure.quantum._client.models.CostEstimate - :ivar error_data: An error response from Azure. - :vartype error_data: ~azure.quantum._client.models.ErrorData + :ivar cost_estimate: Cost estimate. + :vartype cost_estimate: ~azure.quantum.models.CostEstimate + :ivar error_data: Error information. + :vartype error_data: ~azure.quantum.models.WorkspaceItemError + :ivar priority: Priority of job or session. Known values are: "Standard" and "High". + :vartype priority: str or ~azure.quantum.models.Priority + :ivar tags: List of user-supplied tags associated with the job. + :vartype tags: list[str] + :ivar usage: Resource consumption metrics containing provider-specific usage data such as + execution time, quantum shots consumed etc. + :vartype usage: ~azure.quantum.models.Usage + :ivar id: Id of the session. Required. + :vartype id: str + :ivar item_type: Type of the Quantum Workspace item is Session. Required. A logical grouping of + jobs. + :vartype item_type: str or ~azure.quantum.models.SESSION :ivar job_failure_policy: Policy controlling the behavior of the Session when a job in the - session fails. Known values are: "Abort" and "Continue". - :vartype job_failure_policy: str or ~azure.quantum._client.models.SessionJobFailurePolicy + session fails. Required. Known values are: "Abort" and "Continue". + :vartype job_failure_policy: str or ~azure.quantum.models.SessionJobFailurePolicy :ivar status: The status of the session. Known values are: "Waiting", "Executing", "Succeeded", "Failed", "Failure(s)", and "TimedOut". - :vartype status: str or ~azure.quantum._client.models.SessionStatus + :vartype status: str or ~azure.quantum.models.SessionStatus """ - _validation = { - "id": {"required": True}, - "name": {"required": True}, - "provider_id": {"required": True}, - "target": {"required": True}, - "item_type": {"required": True}, - "creation_time": {"readonly": True}, - "begin_execution_time": {"readonly": True}, - "end_execution_time": {"readonly": True}, - "status": {"readonly": True}, - } - - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "name": {"key": "name", "type": "str"}, - "provider_id": {"key": "providerId", "type": "str"}, - "target": {"key": "target", "type": "str"}, - "item_type": {"key": "itemType", "type": "str"}, - "creation_time": {"key": "creationTime", "type": "iso-8601"}, - "begin_execution_time": {"key": "beginExecutionTime", "type": "iso-8601"}, - "end_execution_time": {"key": "endExecutionTime", "type": "iso-8601"}, - "cost_estimate": {"key": "costEstimate", "type": "CostEstimate"}, - "error_data": {"key": "errorData", "type": "ErrorData"}, - "job_failure_policy": {"key": "jobFailurePolicy", "type": "str"}, - "status": {"key": "status", "type": "str"}, - } - + item_type: Literal[ItemType.SESSION] = rest_discriminator(name="itemType", visibility=["read", "create"]) # type: ignore + """Type of the Quantum Workspace item is Session. Required. A logical grouping of jobs.""" + job_failure_policy: Union[str, "_models.SessionJobFailurePolicy"] = rest_field( + name="jobFailurePolicy", visibility=["read", "create"] + ) + """Policy controlling the behavior of the Session when a job in the session fails. Required. Known + values are: \"Abort\" and \"Continue\".""" + status: Optional[Union[str, "_models.SessionStatus"]] = rest_field(visibility=["read"]) + """The status of the session. Known values are: \"Waiting\", \"Executing\", \"Succeeded\", + \"Failed\", \"Failure(s)\", and \"TimedOut\".""" + + @overload def __init__( self, *, - id: str, # pylint: disable=redefined-builtin name: str, provider_id: str, target: str, - cost_estimate: Optional["_models.CostEstimate"] = None, - error_data: Optional["_models.ErrorData"] = None, - job_failure_policy: Union[str, "_models.SessionJobFailurePolicy"] = "Abort", - **kwargs: Any - ) -> None: - """ - :keyword id: The id of the item. Required. - :paramtype id: str - :keyword name: The name of the item. It is not required for the name to be unique and it's only - used for display purposes. Required. - :paramtype name: str - :keyword provider_id: The unique identifier for the provider. Required. - :paramtype provider_id: str - :keyword target: The target identifier to run the job. Required. - :paramtype target: str - :keyword cost_estimate: The job cost billed by the provider. The final cost on your bill might - be slightly different due to added taxes and currency conversion rates. - :paramtype cost_estimate: ~azure.quantum._client.models.CostEstimate - :keyword error_data: An error response from Azure. - :paramtype error_data: ~azure.quantum._client.models.ErrorData - :keyword job_failure_policy: Policy controlling the behavior of the Session when a job in the - session fails. Known values are: "Abort" and "Continue". - :paramtype job_failure_policy: str or ~azure.quantum._client.models.SessionJobFailurePolicy - """ - super().__init__( - id=id, - name=name, - provider_id=provider_id, - target=target, - cost_estimate=cost_estimate, - error_data=error_data, - **kwargs - ) - self.item_type: str = "Session" - self.job_failure_policy = job_failure_policy - self.status = None - - -class SessionDetailsList(_serialization.Model): - """List of session details. - - :ivar value: - :vartype value: list[~azure.quantum._client.models.SessionDetails] - :ivar next_link: Link to the next page of results. - :vartype next_link: str - """ + job_failure_policy: Union[str, "_models.SessionJobFailurePolicy"], + priority: Optional[Union[str, "_models.Priority"]] = None, + tags: Optional[list[str]] = None, + ) -> None: ... - _attribute_map = { - "value": {"key": "value", "type": "[SessionDetails]"}, - "next_link": {"key": "nextLink", "type": "str"}, - } - - def __init__( - self, *, value: Optional[List["_models.SessionDetails"]] = None, next_link: Optional[str] = None, **kwargs: Any - ) -> None: + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword value: - :paramtype value: list[~azure.quantum._client.models.SessionDetails] - :keyword next_link: Link to the next page of results. - :paramtype next_link: str + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.value = value - self.next_link = next_link + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.item_type = ItemType.SESSION # type: ignore -class TargetStatus(_serialization.Model): + +class TargetStatus(_Model): """Target status. - :ivar id: Target id. + :ivar id: Target id. Required. :vartype id: str - :ivar current_availability: Target availability. Known values are: "Available", "Degraded", and - "Unavailable". - :vartype current_availability: str or ~azure.quantum._client.models.TargetAvailability - :ivar average_queue_time: Average queue time in seconds. + :ivar current_availability: Current target availability. Required. Known values are: + "Available", "Degraded", and "Unavailable". + :vartype current_availability: str or ~azure.quantum.models.TargetAvailability + :ivar average_queue_time: Average queue time in seconds. Required. :vartype average_queue_time: int :ivar status_page: A page with detailed status of the provider. :vartype status_page: str + :ivar num_qubits: The qubit number. + :vartype num_qubits: int + :ivar target_profile: Target QIR profile. + :vartype target_profile: str + :ivar metadata: The metadata of this target. + :vartype metadata: any """ - _attribute_map = { - "id": {"key": "id", "type": "str"}, - "current_availability": {"key": "currentAvailability", "type": "str"}, - "average_queue_time": {"key": "averageQueueTime", "type": "int"}, - "status_page": {"key": "statusPage", "type": "str"}, - } + id: str = rest_field(visibility=["read"]) + """Target id. Required.""" + current_availability: Union[str, "_models.TargetAvailability"] = rest_field( + name="currentAvailability", visibility=["read"] + ) + """Current target availability. Required. Known values are: \"Available\", \"Degraded\", and + \"Unavailable\".""" + average_queue_time: int = rest_field(name="averageQueueTime", visibility=["read"]) + """Average queue time in seconds. Required.""" + status_page: Optional[str] = rest_field(name="statusPage", visibility=["read"]) + """A page with detailed status of the provider.""" + num_qubits: Optional[int] = rest_field(name="numQubits", visibility=["read"]) + """The qubit number.""" + target_profile: Optional[str] = rest_field(name="targetProfile", visibility=["read"]) + """Target QIR profile.""" + metadata: Optional[Any] = rest_field(visibility=["read"]) + """The metadata of this target.""" + + +class Usage(_Model): + """Resource usage metrics represented as key-value pairs. Keys are provider-defined metric names + (e.g. "standardMinutes", "shots") and values are the corresponding consumption amounts. The + specific metrics available depend on the quantum provider and target used. - def __init__( - self, - *, - id: Optional[str] = None, # pylint: disable=redefined-builtin - current_availability: Optional[Union[str, "_models.TargetAvailability"]] = None, - average_queue_time: Optional[int] = None, - status_page: Optional[str] = None, - **kwargs: Any - ) -> None: - """ - :keyword id: Target id. - :paramtype id: str - :keyword current_availability: Target availability. Known values are: "Available", "Degraded", - and "Unavailable". - :paramtype current_availability: str or ~azure.quantum._client.models.TargetAvailability - :keyword average_queue_time: Average queue time in seconds. - :paramtype average_queue_time: int - :keyword status_page: A page with detailed status of the provider. - :paramtype status_page: str - """ - super().__init__(**kwargs) - self.id = id - self.current_availability = current_availability - self.average_queue_time = average_queue_time - self.status_page = status_page + """ -class UsageEvent(_serialization.Model): +class UsageEvent(_Model): """Usage event details. - :ivar dimension_id: The dimension id. + :ivar dimension_id: The dimension id. Required. :vartype dimension_id: str - :ivar dimension_name: The dimension name. + :ivar dimension_name: The dimension name. Required. :vartype dimension_name: str - :ivar measure_unit: The unit of measure. + :ivar measure_unit: The unit of measure. Required. :vartype measure_unit: str - :ivar amount_billed: The amount billed. + :ivar amount_billed: The amount billed. Required. :vartype amount_billed: float - :ivar amount_consumed: The amount consumed. + :ivar amount_consumed: The amount consumed. Required. :vartype amount_consumed: float - :ivar unit_price: The unit price. + :ivar unit_price: The unit price. Required. :vartype unit_price: float """ - _attribute_map = { - "dimension_id": {"key": "dimensionId", "type": "str"}, - "dimension_name": {"key": "dimensionName", "type": "str"}, - "measure_unit": {"key": "measureUnit", "type": "str"}, - "amount_billed": {"key": "amountBilled", "type": "float"}, - "amount_consumed": {"key": "amountConsumed", "type": "float"}, - "unit_price": {"key": "unitPrice", "type": "float"}, - } + dimension_id: str = rest_field(name="dimensionId", visibility=["read", "create", "update", "delete", "query"]) + """The dimension id. Required.""" + dimension_name: str = rest_field(name="dimensionName", visibility=["read", "create", "update", "delete", "query"]) + """The dimension name. Required.""" + measure_unit: str = rest_field(name="measureUnit", visibility=["read", "create", "update", "delete", "query"]) + """The unit of measure. Required.""" + amount_billed: float = rest_field(name="amountBilled", visibility=["read", "create", "update", "delete", "query"]) + """The amount billed. Required.""" + amount_consumed: float = rest_field( + name="amountConsumed", visibility=["read", "create", "update", "delete", "query"] + ) + """The amount consumed. Required.""" + unit_price: float = rest_field(name="unitPrice", visibility=["read", "create", "update", "delete", "query"]) + """The unit price. Required.""" + + @overload + def __init__( + self, + *, + dimension_id: str, + dimension_name: str, + measure_unit: str, + amount_billed: float, + amount_consumed: float, + unit_price: float, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + +class WorkspaceItemError(_Model): + """The error object. + + :ivar code: One of a server-defined set of error codes. Required. + :vartype code: str + :ivar message: A human-readable representation of the error. Required. + :vartype message: str + :ivar target: The target of the error. + :vartype target: str + :ivar details: An array of details about specific errors that led to this reported error. + :vartype details: list[~azure.core.ODataV4Format] + :ivar innererror: An object containing more specific information than the current object about + the error. + :vartype innererror: ~azure.quantum.models.InnerError + """ + + code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """One of a server-defined set of error codes. Required.""" + message: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """A human-readable representation of the error. Required.""" + target: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The target of the error.""" + details: Optional[list[ODataV4Format]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of details about specific errors that led to this reported error.""" + innererror: Optional["_models.InnerError"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An object containing more specific information than the current object about the error.""" + + @overload def __init__( self, *, - dimension_id: Optional[str] = None, - dimension_name: Optional[str] = None, - measure_unit: Optional[str] = None, - amount_billed: Optional[float] = None, - amount_consumed: Optional[float] = None, - unit_price: Optional[float] = None, - **kwargs: Any - ) -> None: + code: str, + message: str, + target: Optional[str] = None, + details: Optional[list[ODataV4Format]] = None, + innererror: Optional["_models.InnerError"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: """ - :keyword dimension_id: The dimension id. - :paramtype dimension_id: str - :keyword dimension_name: The dimension name. - :paramtype dimension_name: str - :keyword measure_unit: The unit of measure. - :paramtype measure_unit: str - :keyword amount_billed: The amount billed. - :paramtype amount_billed: float - :keyword amount_consumed: The amount consumed. - :paramtype amount_consumed: float - :keyword unit_price: The unit price. - :paramtype unit_price: float + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] """ - super().__init__(**kwargs) - self.dimension_id = dimension_id - self.dimension_name = dimension_name - self.measure_unit = measure_unit - self.amount_billed = amount_billed - self.amount_consumed = amount_consumed - self.unit_price = unit_price + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_patch.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_patch.py index f7dd3251033..87676c65a8f 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_patch.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/models/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/__init__.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/__init__.py index 6ee87de11fa..18cce5a4e75 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/__init__.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/__init__.py @@ -2,28 +2,24 @@ # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position -from ._operations import JobsOperations -from ._operations import ProvidersOperations -from ._operations import StorageOperations -from ._operations import QuotasOperations -from ._operations import SessionsOperations -from ._operations import TopLevelItemsOperations +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import ServicesOperations # type: ignore from ._patch import __all__ as _patch_all -from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import * from ._patch import patch_sdk as _patch_sdk __all__ = [ - "JobsOperations", - "ProvidersOperations", - "StorageOperations", - "QuotasOperations", - "SessionsOperations", - "TopLevelItemsOperations", + "ServicesOperations", ] -__all__.extend([p for p in _patch_all if p not in __all__]) +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore _patch_sdk() diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_operations.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_operations.py index cf4a97efc9e..4f6b49cc250 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_operations.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_operations.py @@ -1,21 +1,26 @@ -# pylint: disable=too-many-lines,too-many-statements +# pylint: disable=line-too-long,useless-suppression,too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. +# Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +from collections.abc import MutableMapping from io import IOBase -from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +import json +from typing import Any, Callable, IO, Optional, TypeVar, Union, overload import urllib.parse +from azure.core import PipelineClient from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, map_error, ) from azure.core.paging import ItemPaged @@ -25,26 +30,38 @@ from azure.core.utils import case_insensitive_dict from .. import models as _models -from .._serialization import Serializer +from .._configuration import WorkspaceClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize +from .._utils.serialization import Deserializer, Serializer +from .._validation import api_version_validation T = TypeVar("T") -ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +JSON = MutableMapping[str, Any] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -def build_jobs_list_request( - subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_top_level_items_listv2_request( # pylint: disable=name-too-long + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/topLevelItems" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -55,6 +72,14 @@ def build_jobs_list_request( # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -62,28 +87,23 @@ def build_jobs_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_jobs_get_request( - job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_jobs_create_request( + subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url( - "job_id", - job_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -92,34 +112,30 @@ def build_jobs_get_request( _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_jobs_create_request( - job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_jobs_update_request( + subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobUpdateOptions/{jobId}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url( - "job_id", - job_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -132,31 +148,48 @@ def build_jobs_create_request( _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_services_jobs_delete_request( + subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) -def build_jobs_cancel_request( - job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any + +def build_services_jobs_cancel_request( + subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}:cancel" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url( - "job_id", - job_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -167,32 +200,25 @@ def build_jobs_cancel_request( # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_jobs_patch_request( - job_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_jobs_get_request( + subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs/{jobId}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "jobId": _SERIALIZER.url( - "job_id", - job_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "jobId": _SERIALIZER.url("job_id", job_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -201,24 +227,30 @@ def build_jobs_patch_request( _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_providers_get_status_request( - subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_jobs_list_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/providerStatus" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/jobs" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -229,6 +261,14 @@ def build_providers_get_status_request( # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -236,18 +276,17 @@ def build_providers_get_status_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_storage_sas_uri_request( +def build_services_providers_list_request( subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/storage/sasUri" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/providerStatus" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -260,24 +299,22 @@ def build_storage_sas_uri_request( _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_quotas_list_request( +def build_services_quotas_list_request( subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/quotas" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/quotas" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -295,21 +332,23 @@ def build_quotas_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_sessions_list_request( - subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_sessions_open_request( + subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), + "sessionId": _SERIALIZER.url("session_id", session_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -318,33 +357,29 @@ def build_sessions_list_request( _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_sessions_get_request( - session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_sessions_close_request( + subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}:close" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url( - "session_id", - session_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "sessionId": _SERIALIZER.url("session_id", session_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -355,32 +390,25 @@ def build_sessions_get_request( # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_sessions_open_request( - session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_sessions_get_request( + subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url( - "session_id", - session_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "sessionId": _SERIALIZER.url("session_id", session_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore @@ -389,76 +417,94 @@ def build_sessions_open_request( _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - if content_type is not None: - _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_sessions_close_request( - session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_sessions_listv2_request( + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}:close" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url( - "session_id", - session_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_sessions_jobs_list_request( - session_id: str, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any +def build_services_sessions_jobs_list_request( # pylint: disable=name-too-long + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}/jobs" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/sessions/{sessionId}/jobs" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str"), - "sessionId": _SERIALIZER.url( - "session_id", - session_id, - "str", - max_length=36, - pattern=r"^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", - ), + "sessionId": _SERIALIZER.url("session_id", session_id, "str"), } _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if filter is not None: + _params["filter"] = _SERIALIZER.query("filter", filter, "str") + if skip is not None: + _params["skip"] = _SERIALIZER.query("skip", skip, "int") + if top is not None: + _params["top"] = _SERIALIZER.query("top", top, "int") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") @@ -466,17 +512,18 @@ def build_sessions_jobs_list_request( return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_top_level_items_list_request( +def build_services_storage_get_sas_uri_request( # pylint: disable=name-too-long subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-11-13-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-01-15-preview")) accept = _headers.pop("Accept", "application/json") # Construct URL - _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/topLevelItems" # pylint: disable=line-too-long + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Quantum/workspaces/{workspaceName}/storage/sasUri" path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), @@ -489,44 +536,113 @@ def build_top_level_items_list_request( _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -class JobsOperations: +class ServicesOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.quantum._client.QuantumClient`'s - :attr:`jobs` attribute. + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`services` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + self.top_level_items = ServicesTopLevelItemsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.jobs = ServicesJobsOperations(self._client, self._config, self._serialize, self._deserialize) + self.providers = ServicesProvidersOperations(self._client, self._config, self._serialize, self._deserialize) + self.quotas = ServicesQuotasOperations(self._client, self._config, self._serialize, self._deserialize) + self.sessions = ServicesSessionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.storage = ServicesStorageOperations(self._client, self._config, self._serialize, self._deserialize) + + +class ServicesTopLevelItemsOperations: """ + .. warning:: + **DO NOT** instantiate this class directly. - models = _models + Instead, you should access the following operations through + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`top_level_items` attribute. + """ - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.JobDetails"]: - """List jobs. + @api_version_validation( + method_added_on="2024-10-01-preview", + params_added_on={ + "2024-10-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "filter", + "skip", + "top", + "orderby", + "accept", + ] + }, + api_versions_list=["2024-10-01-preview", "2025-09-01-preview", "2025-12-01-preview", "2026-01-15-preview"], + ) + def listv2( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.ItemDetails"]: + """List top-level items. - :return: An iterator like instance of JobDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.JobDetails] + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :keyword filter: Filter the result list using the given expression. Default value is None. + :paramtype filter: str + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :keyword top: The number of jobs taken. Default value is None. + :paramtype top: int + :keyword orderby: The order of returned items. Default value is None. + :paramtype orderby: str + :return: An iterator like instance of ItemDetails + :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.ItemDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models._models.JobDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access + cls: ClsType[list[_models.ItemDetails]] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -537,17 +653,21 @@ def list(self, **kwargs: Any) -> Iterable["_models.JobDetails"]: def prepare_request(next_link=None): if not next_link: - _request = build_jobs_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + _request = build_services_top_level_items_listv2_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + filter=filter, + skip=skip, + top=top, + orderby=orderby, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -566,8 +686,8 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -575,13 +695,11 @@ def prepare_request(next_link=None): return _request def extract_data(pipeline_response): - deserialized = self._deserialize( - _models._models.JobDetailsList, pipeline_response # pylint: disable=protected-access - ) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.ItemDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -593,27 +711,155 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) - @distributed_trace - def get(self, job_id: str, **kwargs: Any) -> _models.JobDetails: - """Get job by id. +class ServicesJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`jobs` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + def create( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: _models.JobDetails, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobDetails: + """Create a new job. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :param resource: The resource instance. Required. + :type resource: ~azure.quantum.models.JobDetails + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobDetails: + """Create a new job. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :param resource: The resource instance. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobDetails: + """Create a new job. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :param resource: The resource instance. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: Union[_models.JobDetails, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.JobDetails: + """Create a new job. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str - :return: JobDetails - :rtype: ~azure.quantum._client.models.JobDetails + :param resource: The resource instance. Is one of the following types: JobDetails, JSON, + IO[bytes] Required. + :type resource: ~azure.quantum.models.JobDetails or JSON or IO[bytes] + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -621,42 +867,55 @@ def get(self, job_id: str, **kwargs: Any) -> _models.JobDetails: } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = kwargs.pop("headers", {}) or {} + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = kwargs.pop("params", {}) or {} + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None) - _request = build_jobs_get_request( + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_services_jobs_create_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, job_id=job_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + content_type=content_type, api_version=self._config.api_version, + content=_content, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 201]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) - deserialized = self._deserialize("JobDetails", pipeline_response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.JobDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -664,58 +923,144 @@ def get(self, job_id: str, **kwargs: Any) -> _models.JobDetails: return deserialized # type: ignore @overload - def create( - self, job_id: str, job: _models.JobDetails, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.JobDetails: - """Create a job. - + def update( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: _models.JobUpdateOptions, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.JobUpdateOptions: + """Update job properties. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str - :param job: The complete metadata of the job to submit. Required. - :type job: ~azure.quantum._client.models.JobDetails + :param resource: The resource instance. Required. + :type resource: ~azure.quantum.models.JobUpdateOptions :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: JobDetails - :rtype: ~azure.quantum._client.models.JobDetails + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions :raises ~azure.core.exceptions.HttpResponseError: """ @overload - def create( - self, job_id: str, job: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.JobDetails: - """Create a job. + def update( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: JSON, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.JobUpdateOptions: + """Update job properties. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :param resource: The resource instance. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions + :raises ~azure.core.exceptions.HttpResponseError: + """ + @overload + def update( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _models.JobUpdateOptions: + """Update job properties. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str - :param job: The complete metadata of the job to submit. Required. - :type job: IO[bytes] + :param resource: The resource instance. Required. + :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". + Default value is "application/merge-patch+json". :paramtype content_type: str - :return: JobDetails - :rtype: ~azure.quantum._client.models.JobDetails + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace - def create(self, job_id: str, job: Union[_models.JobDetails, IO[bytes]], **kwargs: Any) -> _models.JobDetails: - """Create a job. - + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "job_id", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2025-12-01-preview", "2026-01-15-preview"], + ) + def update( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + job_id: str, + resource: Union[_models.JobUpdateOptions, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.JobUpdateOptions: + """Update job properties. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str - :param job: The complete metadata of the job to submit. Is either a JobDetails type or a - IO[bytes] type. Required. - :type job: ~azure.quantum._client.models.JobDetails or IO[bytes] - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :return: JobDetails - :rtype: ~azure.quantum._client.models.JobDetails + :param resource: The resource instance. Is one of the following types: JobUpdateOptions, JSON, + IO[bytes] Required. + :type resource: ~azure.quantum.models.JobUpdateOptions or JSON or IO[bytes] + :return: JobUpdateOptions. The JobUpdateOptions is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobUpdateOptions :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -727,54 +1072,51 @@ def create(self, job_id: str, job: Union[_models.JobDetails, IO[bytes]], **kwarg _params = kwargs.pop("params", {}) or {} content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None) + cls: ClsType[_models.JobUpdateOptions] = kwargs.pop("cls", None) - content_type = content_type or "application/json" - _json = None + content_type = content_type or "application/merge-patch+json" _content = None - if isinstance(job, (IOBase, bytes)): - _content = job + if isinstance(resource, (IOBase, bytes)): + _content = resource else: - _json = self._serialize.body(job, "JobDetails") + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_jobs_create_request( + _request = build_services_jobs_update_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, job_id=job_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, content_type=content_type, api_version=self._config.api_version, - json=_json, content=_content, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200, 201]: + if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) - if response.status_code == 200: - deserialized = self._deserialize("JobDetails", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("JobDetails", pipeline_response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.JobUpdateOptions, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -782,16 +1124,24 @@ def create(self, job_id: str, job: Union[_models.JobDetails, IO[bytes]], **kwarg return deserialized # type: ignore @distributed_trace - def cancel(self, job_id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements - """Cancel a job. - + def delete( # pylint: disable=inconsistent-return-statements + self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any + ) -> None: + """Delete a job by its id. Use for cancellation in versions before 2025-12-01-preview. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str :return: None :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -804,19 +1154,17 @@ def cancel(self, job_id: str, **kwargs: Any) -> None: # pylint: disable=inconsi cls: ClsType[None] = kwargs.pop("cls", None) - _request = build_jobs_cancel_request( + _request = build_services_jobs_delete_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, job_id=job_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -828,75 +1176,45 @@ def cancel(self, job_id: str, **kwargs: Any) -> None: # pylint: disable=inconsi response = pipeline_response.http_response if response.status_code not in [204]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) if cls: return cls(pipeline_response, None, {}) # type: ignore - @overload - def patch( - self, - job_id: str, - patch_job: List[_models.JsonPatchDocument], - *, - content_type: str = "application/json", - **kwargs: Any - ) -> Optional[_models.JobDetails]: - """Patch a job. - - :param job_id: Id of the job. Required. - :type job_id: str - :param patch_job: The json patch document containing the patch operations. Required. - :type patch_job: list[~azure.quantum._client.models.JsonPatchDocument] - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: JobDetails or None - :rtype: ~azure.quantum._client.models.JobDetails or None - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def patch( - self, job_id: str, patch_job: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> Optional[_models.JobDetails]: - """Patch a job. - - :param job_id: Id of the job. Required. - :type job_id: str - :param patch_job: The json patch document containing the patch operations. Required. - :type patch_job: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: JobDetails or None - :rtype: ~azure.quantum._client.models.JobDetails or None - :raises ~azure.core.exceptions.HttpResponseError: - """ - @distributed_trace - def patch( - self, job_id: str, patch_job: Union[List[_models.JsonPatchDocument], IO[bytes]], **kwargs: Any - ) -> Optional[_models.JobDetails]: - """Patch a job. - + @api_version_validation( + method_added_on="2025-12-01-preview", + params_added_on={ + "2025-12-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "job_id", + "accept", + ] + }, + api_versions_list=["2025-12-01-preview", "2026-01-15-preview"], + ) + def cancel( + self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any + ) -> _models.JobDetails: + """Request the cancellation of an existing job. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param job_id: Id of the job. Required. :type job_id: str - :param patch_job: The json patch document containing the patch operations. Is either a - [JsonPatchDocument] type or a IO[bytes] type. Required. - :type patch_job: list[~azure.quantum._client.models.JsonPatchDocument] or IO[bytes] - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :return: JobDetails or None - :rtype: ~azure.quantum._client.models.JobDetails or None + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -904,96 +1222,171 @@ def patch( } error_map.update(kwargs.pop("error_map", {}) or {}) - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[Optional[_models.JobDetails]] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(patch_job, (IOBase, bytes)): - _content = patch_job - else: - _json = self._serialize.body(patch_job, "[JsonPatchDocument]") + cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None) - _request = build_jobs_patch_request( + _request = build_services_jobs_cancel_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, job_id=job_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, - content_type=content_type, api_version=self._config.api_version, - json=_json, - content=_content, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) response = pipeline_response.http_response - if response.status_code not in [200, 204]: + if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize("JobDetails", pipeline_response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.JobDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore - -class ProvidersOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.quantum._client.QuantumClient`'s - :attr:`providers` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def get_status(self, **kwargs: Any) -> Iterable["_models.ProviderStatus"]: - """Get provider status. - - :return: An iterator like instance of ProviderStatus - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.ProviderStatus] - :raises ~azure.core.exceptions.HttpResponseError: - """ + def get( + self, subscription_id: str, resource_group_name: str, workspace_name: str, job_id: str, **kwargs: Any + ) -> _models.JobDetails: + """Get job by its id. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param job_id: Id of the job. Required. + :type job_id: str + :return: JobDetails. The JobDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.JobDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models._models.ProviderStatusList] = kwargs.pop("cls", None) # pylint: disable=protected-access + cls: ClsType[_models.JobDetails] = kwargs.pop("cls", None) - error_map = { + _request = build_services_jobs_get_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + job_id=job_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.JobDetails, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + params_added_on={"2024-10-01-preview": ["filter", "skip", "top", "orderby"]}, + api_versions_list=[ + "2024-03-01-preview", + "2024-10-01-preview", + "2025-09-01-preview", + "2025-12-01-preview", + "2026-01-15-preview", + ], + ) + def list( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.JobDetails"]: + """List all jobs. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :keyword filter: Filter the result list using the given expression. Default value is None. + :paramtype filter: str + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :keyword top: The number of jobs taken. Default value is None. + :paramtype top: int + :keyword orderby: The order of returned items. Default value is None. + :paramtype orderby: str + :return: An iterator like instance of JobDetails + :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.JobDetails] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[list[_models.JobDetails]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1004,17 +1397,21 @@ def get_status(self, **kwargs: Any) -> Iterable["_models.ProviderStatus"]: def prepare_request(next_link=None): if not next_link: - _request = build_providers_get_status_request( - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + _request = build_services_jobs_list_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + filter=filter, + skip=skip, + top=top, + orderby=orderby, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1033,8 +1430,8 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1042,13 +1439,11 @@ def prepare_request(next_link=None): return _request def extract_data(pipeline_response): - deserialized = self._deserialize( - _models._models.ProviderStatusList, pipeline_response # pylint: disable=protected-access - ) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.JobDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -1060,182 +1455,53 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) -class StorageOperations: +class ServicesProvidersOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.quantum._client.QuantumClient`'s - :attr:`storage` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - - @overload - def sas_uri( - self, blob_details: _models.BlobDetails, *, content_type: str = "application/json", **kwargs: Any - ) -> _models.SasUriResponse: - """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. - - :param blob_details: The details (name and container) of the blob to store or download data. - Required. - :type blob_details: ~azure.quantum._client.models.BlobDetails - :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. - Default value is "application/json". - :paramtype content_type: str - :return: SasUriResponse - :rtype: ~azure.quantum._client.models.SasUriResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @overload - def sas_uri( - self, blob_details: IO[bytes], *, content_type: str = "application/json", **kwargs: Any - ) -> _models.SasUriResponse: - """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. - - :param blob_details: The details (name and container) of the blob to store or download data. - Required. - :type blob_details: IO[bytes] - :keyword content_type: Body Parameter content-type. Content type parameter for binary body. - Default value is "application/json". - :paramtype content_type: str - :return: SasUriResponse - :rtype: ~azure.quantum._client.models.SasUriResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - - @distributed_trace - def sas_uri(self, blob_details: Union[_models.BlobDetails, IO[bytes]], **kwargs: Any) -> _models.SasUriResponse: - """Gets a URL with SAS token for a container/blob in the storage account associated with the - workspace. The SAS URL can be used to upload job input and/or download job output. - - :param blob_details: The details (name and container) of the blob to store or download data. Is - either a BlobDetails type or a IO[bytes] type. Required. - :type blob_details: ~azure.quantum._client.models.BlobDetails or IO[bytes] - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :return: SasUriResponse - :rtype: ~azure.quantum._client.models.SasUriResponse - :raises ~azure.core.exceptions.HttpResponseError: - """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) - _params = kwargs.pop("params", {}) or {} - - content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) - cls: ClsType[_models.SasUriResponse] = kwargs.pop("cls", None) - - content_type = content_type or "application/json" - _json = None - _content = None - if isinstance(blob_details, (IOBase, bytes)): - _content = blob_details - else: - _json = self._serialize.body(blob_details, "BlobDetails") - - _request = build_storage_sas_uri_request( - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, - content_type=content_type, - api_version=self._config.api_version, - json=_json, - content=_content, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - deserialized = self._deserialize("SasUriResponse", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore - - -class QuotasOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.quantum._client.QuantumClient`'s - :attr:`quotas` attribute. + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`providers` attribute. """ - models = _models - - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.Quota"]: - """List quotas for the given workspace. - - :return: An iterator like instance of Quota - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.Quota] + def list( + self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.ProviderStatus"]: + """List all providers in the workspace with their respective status. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :return: An iterator like instance of ProviderStatus + :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.ProviderStatus] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models._models.QuotaList] = kwargs.pop("cls", None) # pylint: disable=protected-access + cls: ClsType[list[_models.ProviderStatus]] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1246,17 +1512,17 @@ def list(self, **kwargs: Any) -> Iterable["_models.Quota"]: def prepare_request(next_link=None): if not next_link: - _request = build_quotas_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + _request = build_services_providers_list_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1275,8 +1541,8 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1284,13 +1550,11 @@ def prepare_request(next_link=None): return _request def extract_data(pipeline_response): - deserialized = self._deserialize( - _models._models.QuotaList, pipeline_response # pylint: disable=protected-access - ) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.ProviderStatus], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -1302,50 +1566,53 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) -class SessionsOperations: +class ServicesQuotasOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through - :class:`~azure.quantum._client.QuantumClient`'s - :attr:`sessions` attribute. + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`quotas` attribute. """ - models = _models - - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.SessionDetails"]: - """List sessions. + def list( + self, subscription_id: str, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> ItemPaged["_models.Quota"]: + """List quotas for the given workspace. - :return: An iterator like instance of SessionDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.SessionDetails] + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :return: An iterator like instance of Quota + :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.Quota] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models._models.SessionDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access + cls: ClsType[list[_models.Quota]] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1356,17 +1623,17 @@ def list(self, **kwargs: Any) -> Iterable["_models.SessionDetails"]: def prepare_request(next_link=None): if not next_link: - _request = build_sessions_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + _request = build_services_quotas_list_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1385,8 +1652,8 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1394,13 +1661,11 @@ def prepare_request(next_link=None): return _request def extract_data(pipeline_response): - deserialized = self._deserialize( - _models._models.SessionDetailsList, pipeline_response # pylint: disable=protected-access - ) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.Quota], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -1412,131 +1677,155 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) - @distributed_trace - def get(self, session_id: str, **kwargs: Any) -> _models.SessionDetails: - """Get session by id. +class ServicesSessionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`sessions` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + def open( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + resource: _models.SessionDetails, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SessionDetails: + """Open a new session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param session_id: Id of the session. Required. :type session_id: str - :return: SessionDetails - :rtype: ~azure.quantum._client.models.SessionDetails + :param resource: The resource instance. Required. + :type resource: ~azure.quantum.models.SessionDetails + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, - } - error_map.update(kwargs.pop("error_map", {}) or {}) - - _headers = kwargs.pop("headers", {}) or {} - _params = kwargs.pop("params", {}) or {} - - cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None) - - _request = build_sessions_get_request( - session_id=session_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, - api_version=self._config.api_version, - headers=_headers, - params=_params, - ) - path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), - } - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - _stream = False - pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access - _request, stream=_stream, **kwargs - ) - - response = pipeline_response.http_response - - if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket - map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) - - deserialized = self._deserialize("SessionDetails", pipeline_response) - - if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore - - return deserialized # type: ignore @overload def open( - self, session_id: str, session: _models.SessionDetails, *, content_type: str = "application/json", **kwargs: Any + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any ) -> _models.SessionDetails: - """Open a session. - + """Open a new session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param session_id: Id of the session. Required. :type session_id: str - :param session: The complete metadata of the session to be opened. Required. - :type session: ~azure.quantum._client.models.SessionDetails + :param resource: The resource instance. Required. + :type resource: JSON :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str - :return: SessionDetails - :rtype: ~azure.quantum._client.models.SessionDetails + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails :raises ~azure.core.exceptions.HttpResponseError: """ @overload def open( - self, session_id: str, session: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any ) -> _models.SessionDetails: - """Open a session. - + """Open a new session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param session_id: Id of the session. Required. :type session_id: str - :param session: The complete metadata of the session to be opened. Required. - :type session: IO[bytes] + :param resource: The resource instance. Required. + :type resource: IO[bytes] :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str - :return: SessionDetails - :rtype: ~azure.quantum._client.models.SessionDetails + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def open( - self, session_id: str, session: Union[_models.SessionDetails, IO[bytes]], **kwargs: Any + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + resource: Union[_models.SessionDetails, JSON, IO[bytes]], + **kwargs: Any ) -> _models.SessionDetails: - """Open a session. - + """Open a new session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param session_id: Id of the session. Required. :type session_id: str - :param session: The complete metadata of the session to be opened. Is either a SessionDetails - type or a IO[bytes] type. Required. - :type session: ~azure.quantum._client.models.SessionDetails or IO[bytes] - :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. - Default value is None. - :paramtype content_type: str - :return: SessionDetails - :rtype: ~azure.quantum._client.models.SessionDetails + :param resource: The resource instance. Is one of the following types: SessionDetails, JSON, + IO[bytes] Required. + :type resource: ~azure.quantum.models.SessionDetails or JSON or IO[bytes] + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1551,33 +1840,29 @@ def open( cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None) content_type = content_type or "application/json" - _json = None _content = None - if isinstance(session, (IOBase, bytes)): - _content = session + if isinstance(resource, (IOBase, bytes)): + _content = resource else: - _json = self._serialize.body(session, "SessionDetails") + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore - _request = build_sessions_open_request( + _request = build_services_sessions_open_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, session_id=session_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, content_type=content_type, api_version=self._config.api_version, - json=_json, content=_content, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1586,16 +1871,17 @@ def open( if response.status_code not in [200, 201]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) - if response.status_code == 200: - deserialized = self._deserialize("SessionDetails", pipeline_response) - - if response.status_code == 201: - deserialized = self._deserialize("SessionDetails", pipeline_response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.SessionDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1603,16 +1889,24 @@ def open( return deserialized # type: ignore @distributed_trace - def close(self, session_id: str, **kwargs: Any) -> _models.SessionDetails: - """Close a session. - + def close( + self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any + ) -> _models.SessionDetails: + """Close an existing session. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param session_id: Id of the session. Required. :type session_id: str - :return: SessionDetails - :rtype: ~azure.quantum._client.models.SessionDetails + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails :raises ~azure.core.exceptions.HttpResponseError: """ - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1625,23 +1919,21 @@ def close(self, session_id: str, **kwargs: Any) -> _models.SessionDetails: cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None) - _request = build_sessions_close_request( + _request = build_services_sessions_close_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, session_id=session_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True - ), + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), } _request.url = self._client.format_url(_request.url, **path_format_arguments) - _stream = False + _stream = kwargs.pop("stream", False) pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access _request, stream=_stream, **kwargs ) @@ -1650,12 +1942,17 @@ def close(self, session_id: str, **kwargs: Any) -> _models.SessionDetails: if response.status_code not in [200]: if _stream: - response.read() # Load the body in memory and close the socket + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) - deserialized = self._deserialize("SessionDetails", pipeline_response) + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.SessionDetails, response.json()) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore @@ -1663,21 +1960,132 @@ def close(self, session_id: str, **kwargs: Any) -> _models.SessionDetails: return deserialized # type: ignore @distributed_trace - def jobs_list(self, session_id: str, **kwargs: Any) -> Iterable["_models.JobDetails"]: - """List jobs in a session. - + def get( + self, subscription_id: str, resource_group_name: str, workspace_name: str, session_id: str, **kwargs: Any + ) -> _models.SessionDetails: + """Get Session by its id. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str :param session_id: Id of the session. Required. :type session_id: str - :return: An iterator like instance of JobDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.JobDetails] + :return: SessionDetails. The SessionDetails is compatible with MutableMapping + :rtype: ~azure.quantum.models.SessionDetails + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.SessionDetails] = kwargs.pop("cls", None) + + _request = build_services_sessions_get_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + session_id=session_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.SessionDetails, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-10-01-preview", + params_added_on={ + "2024-10-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "workspace_name", + "filter", + "skip", + "top", + "orderby", + "accept", + ] + }, + api_versions_list=["2024-10-01-preview", "2025-09-01-preview", "2025-12-01-preview", "2026-01-15-preview"], + ) + def listv2( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.SessionDetails"]: + """List all Sessions. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :keyword filter: Filter the result list using the given expression. Default value is None. + :paramtype filter: str + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :keyword top: The number of jobs taken. Default value is None. + :paramtype top: int + :keyword orderby: The order of returned items. Default value is None. + :paramtype orderby: str + :return: An iterator like instance of SessionDetails + :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.SessionDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models._models.JobDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access + cls: ClsType[list[_models.SessionDetails]] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1688,18 +2096,21 @@ def jobs_list(self, session_id: str, **kwargs: Any) -> Iterable["_models.JobDeta def prepare_request(next_link=None): if not next_link: - _request = build_sessions_jobs_list_request( - session_id=session_id, - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + _request = build_services_sessions_listv2_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + filter=filter, + skip=skip, + top=top, + orderby=orderby, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1718,8 +2129,8 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1727,13 +2138,11 @@ def prepare_request(next_link=None): return _request def extract_data(pipeline_response): - deserialized = self._deserialize( - _models._models.JobDetailsList, pipeline_response # pylint: disable=protected-access - ) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.SessionDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -1745,50 +2154,65 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) - -class TopLevelItemsOperations: - """ - .. warning:: - **DO NOT** instantiate this class directly. - - Instead, you should access the following operations through - :class:`~azure.quantum._client.QuantumClient`'s - :attr:`top_level_items` attribute. - """ - - models = _models - - def __init__(self, *args, **kwargs): - input_args = list(args) - self._client = input_args.pop(0) if input_args else kwargs.pop("client") - self._config = input_args.pop(0) if input_args else kwargs.pop("config") - self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") - self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace - def list(self, **kwargs: Any) -> Iterable["_models.ItemDetails"]: - """List top-level items. + @api_version_validation( + params_added_on={"2024-10-01-preview": ["filter", "skip", "top", "orderby"]}, + api_versions_list=[ + "2024-03-01-preview", + "2024-10-01-preview", + "2025-09-01-preview", + "2025-12-01-preview", + "2026-01-15-preview", + ], + ) + def jobs_list( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + session_id: str, + *, + filter: Optional[str] = None, + skip: Optional[int] = None, + top: Optional[int] = None, + orderby: Optional[str] = None, + **kwargs: Any + ) -> ItemPaged["_models.JobDetails"]: + """List jobs in a session. - :return: An iterator like instance of ItemDetails - :rtype: ~azure.core.paging.ItemPaged[~azure.quantum._client.models.ItemDetails] + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param session_id: Id of the session to list jobs from. Required. + :type session_id: str + :keyword filter: Filter the result list using the given expression. Default value is None. + :paramtype filter: str + :keyword skip: The number of result items to skip. Default value is None. + :paramtype skip: int + :keyword top: The number of jobs taken. Default value is None. + :paramtype top: int + :keyword orderby: The order of returned items. Default value is None. + :paramtype orderby: str + :return: An iterator like instance of JobDetails + :rtype: ~azure.core.paging.ItemPaged[~azure.quantum.models.JobDetails] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = kwargs.pop("params", {}) or {} - cls: ClsType[_models._models.ItemDetailsList] = kwargs.pop("cls", None) # pylint: disable=protected-access + cls: ClsType[list[_models.JobDetails]] = kwargs.pop("cls", None) - error_map = { + error_map: MutableMapping = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, @@ -1799,17 +2223,22 @@ def list(self, **kwargs: Any) -> Iterable["_models.ItemDetails"]: def prepare_request(next_link=None): if not next_link: - _request = build_top_level_items_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=self._config.resource_group_name, - workspace_name=self._config.workspace_name, + _request = build_services_sessions_jobs_list_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + session_id=session_id, + filter=filter, + skip=skip, + top=top, + orderby=orderby, api_version=self._config.api_version, headers=_headers, params=_params, ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1828,8 +2257,8 @@ def prepare_request(next_link=None): "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) path_format_arguments = { - "azureRegion": self._serialize.url( - "self._config.azure_region", self._config.azure_region, "str", skip_quote=True + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True ), } _request.url = self._client.format_url(_request.url, **path_format_arguments) @@ -1837,13 +2266,11 @@ def prepare_request(next_link=None): return _request def extract_data(pipeline_response): - deserialized = self._deserialize( - _models._models.ItemDetailsList, pipeline_response # pylint: disable=protected-access - ) - list_of_elem = deserialized.value + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(list[_models.JobDetails], deserialized.get("value", [])) if cls: list_of_elem = cls(list_of_elem) # type: ignore - return deserialized.next_link or None, iter(list_of_elem) + return deserialized.get("nextLink") or None, iter(list_of_elem) def get_next(next_link=None): _request = prepare_request(next_link) @@ -1855,12 +2282,212 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - if _stream: - response.read() # Load the body in memory and close the socket map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.RestError, pipeline_response) - raise HttpResponseError(response=response, model=error) + raise HttpResponseError(response=response) return pipeline_response return ItemPaged(get_next, extract_data) + + +class ServicesStorageOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.quantum.WorkspaceClient`'s + :attr:`storage` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: WorkspaceClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @overload + def get_sas_uri( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + blob_details: _models.BlobDetails, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SasUriResponse: + """Gets a URL with SAS token for a container/blob in the storage account associated with the + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param blob_details: The details (name and container) of the blob. Required. + :type blob_details: ~azure.quantum.models.BlobDetails + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SasUriResponse. The SasUriResponse is compatible with MutableMapping + :rtype: ~azure.quantum.models.SasUriResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_sas_uri( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + blob_details: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SasUriResponse: + """Gets a URL with SAS token for a container/blob in the storage account associated with the + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param blob_details: The details (name and container) of the blob. Required. + :type blob_details: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: SasUriResponse. The SasUriResponse is compatible with MutableMapping + :rtype: ~azure.quantum.models.SasUriResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_sas_uri( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + blob_details: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.SasUriResponse: + """Gets a URL with SAS token for a container/blob in the storage account associated with the + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param blob_details: The details (name and container) of the blob. Required. + :type blob_details: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: SasUriResponse. The SasUriResponse is compatible with MutableMapping + :rtype: ~azure.quantum.models.SasUriResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_sas_uri( + self, + subscription_id: str, + resource_group_name: str, + workspace_name: str, + blob_details: Union[_models.BlobDetails, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.SasUriResponse: + """Gets a URL with SAS token for a container/blob in the storage account associated with the + workspace. Starting with version 2026-01-15-preview, when used for a container the container is + also created if it does not already exist. The SAS URL can be used to upload job input and/or + download job output. + + :param subscription_id: The Azure subscription ID. Required. + :type subscription_id: str + :param resource_group_name: Name of the Azure resource group. Required. + :type resource_group_name: str + :param workspace_name: Name of the Azure Quantum workspace. Required. + :type workspace_name: str + :param blob_details: The details (name and container) of the blob. Is one of the following + types: BlobDetails, JSON, IO[bytes] Required. + :type blob_details: ~azure.quantum.models.BlobDetails or JSON or IO[bytes] + :return: SasUriResponse. The SasUriResponse is compatible with MutableMapping + :rtype: ~azure.quantum.models.SasUriResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.SasUriResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(blob_details, (IOBase, bytes)): + _content = blob_details + else: + _content = json.dumps(blob_details, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_services_storage_get_sas_uri_request( + subscription_id=subscription_id, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_models.SasUriResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_patch.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_patch.py index f7dd3251033..87676c65a8f 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_patch.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_client/operations/_patch.py @@ -1,14 +1,15 @@ -# ------------------------------------ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. -# ------------------------------------ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- """Customize generated code here. Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize """ -from typing import List -__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level def patch_sdk(): diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_constants.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_constants.py index a545480bc4d..c4574722b54 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_constants.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_constants.py @@ -53,10 +53,18 @@ class EnvironmentKind(Enum): DOGFOOD = 3 +class WorkspaceKind(Enum): + V1 = "V1" + V2 = "V2" + + class ConnectionConstants: DATA_PLANE_CREDENTIAL_SCOPE = "https://quantum.microsoft.com/.default" ARM_CREDENTIAL_SCOPE = "https://management.azure.com/.default" + DEFAULT_ARG_API_VERSION = "2021-03-01" + DEFAULT_WORKSPACE_API_VERSION = "2025-11-01-preview" + MSA_TENANT_ID = "9188040d-6c67-4c5b-b112-36a304b66dad" AUTHORITY = AzureIdentityInternals.get_default_authority() @@ -65,10 +73,14 @@ class ConnectionConstants: # pylint: disable=unnecessary-lambda-assignment GET_QUANTUM_PRODUCTION_ENDPOINT = \ lambda location: f"https://{location}.quantum.azure.com/" + GET_QUANTUM_PRODUCTION_ENDPOINT_v2 = \ + lambda location: f"https://{location}-v2.quantum.azure.com/" GET_QUANTUM_CANARY_ENDPOINT = \ lambda location: f"https://{location or 'eastus2euap'}.quantum.azure.com/" GET_QUANTUM_DOGFOOD_ENDPOINT = \ lambda location: f"https://{location}.quantum-test.azure.com/" + GET_QUANTUM_DOGFOOD_ENDPOINT_v2 = \ + lambda location: f"https://{location}-v2.quantum-test.azure.com/" ARM_PRODUCTION_ENDPOINT = "https://management.azure.com/" ARM_DOGFOOD_ENDPOINT = "https://api-dogfood.resources.windows-int.net/" @@ -95,3 +107,65 @@ class ConnectionConstants: GUID_REGEX_PATTERN = ( r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" ) + +VALID_WORKSPACE_NAME_PATTERN = r"^[a-zA-Z0-9]+(-*[a-zA-Z0-9])*$" + +VALID_AZURE_REGIONS = { + "australiacentral", + "australiacentral2", + "australiaeast", + "australiasoutheast", + "austriaeast", + "belgiumcentral", + "brazilsouth", + "brazilsoutheast", + "canadacentral", + "canadaeast", + "centralindia", + "centralus", + "centraluseuap", + "chilecentral", + "eastasia", + "eastus", + "eastus2", + "eastus2euap", + "francecentral", + "francesouth", + "germanynorth", + "germanywestcentral", + "indonesiacentral", + "israelcentral", + "italynorth", + "japaneast", + "japanwest", + "koreacentral", + "koreasouth", + "malaysiawest", + "mexicocentral", + "newzealandnorth", + "northcentralus", + "northeurope", + "norwayeast", + "norwaywest", + "polandcentral", + "qatarcentral", + "southafricanorth", + "southafricawest", + "southcentralus", + "southindia", + "southeastasia", + "spaincentral", + "swedencentral", + "switzerlandnorth", + "switzerlandwest", + "uaecentral", + "uaenorth", + "uksouth", + "ukwest", + "westcentralus", + "westeurope", + "westindia", + "westus", + "westus2", + "westus3", +} diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_mgmt_client.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_mgmt_client.py new file mode 100644 index 00000000000..7c78907c45d --- /dev/null +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_mgmt_client.py @@ -0,0 +1,252 @@ +## +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +## +""" +Module providing the WorkspaceMgmtClient class for managing workspace operations. +Created to do not add additional azure-mgmt-* dependencies that can conflict with existing ones. +""" + +import logging +from http import HTTPStatus +from typing import Any, Dict, Optional, cast +from azure.core import PipelineClient +from azure.core.credentials import TokenProvider +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest +from azure.core.exceptions import HttpResponseError +# from azure.quantum._workspace_connection_params import WorkspaceConnectionParams +from ._workspace_connection_params import WorkspaceConnectionParams +# from azure.quantum._constants import ConnectionConstants +from ._constants import ConnectionConstants +# from azure.quantum._client._configuration import VERSION +from ._client._configuration import VERSION + +logger = logging.getLogger(__name__) + +__all__ = ["WorkspaceMgmtClient"] + + +class WorkspaceMgmtClient(): + """ + Client for Azure Quantum Workspace related ARM/ARG operations. + Uses PipelineClient under the hood which is standard for all Azure SDK clients, + see https://learn.microsoft.com/en-us/azure/developer/python/sdk/fundamentals/http-pipeline-retries. + + :param credential: + The credential to use to connect to Azure services. + + :param base_url: + The base URL for the ARM endpoint. + + :param user_agent: + Add the specified value as a prefix to the HTTP User-Agent header. + """ + + # Constants + DEFAULT_RETRY_TOTAL = 3 + CONTENT_TYPE_JSON = "application/json" + CONNECT_DOC_LINK = "https://learn.microsoft.com/en-us/azure/quantum/how-to-connect-workspace" + CONNECT_DOC_MESSAGE = f"To find details on how to connect to your workspace, please see {CONNECT_DOC_LINK}." + + def __init__(self, credential: TokenProvider, base_url: str, user_agent: Optional[str] = None) -> None: + """ + Initialize the WorkspaceMgmtClient. + + :param credential: + The credential to use to connect to Azure services. + + :param base_url: + The base URL for the ARM endpoint. + """ + self._credential = credential + self._base_url = base_url + self._policies = [ + policies.RequestIdPolicy(), + policies.HeadersPolicy({ + "Content-Type": self.CONTENT_TYPE_JSON, + "Accept": self.CONTENT_TYPE_JSON, + }), + policies.UserAgentPolicy(user_agent=user_agent, sdk_moniker="quantum/{}".format(VERSION)), + policies.RetryPolicy(retry_total=self.DEFAULT_RETRY_TOTAL), + policies.BearerTokenCredentialPolicy(self._credential, ConnectionConstants.ARM_CREDENTIAL_SCOPE), + ] + self._client: PipelineClient = PipelineClient(base_url=cast(str, base_url), policies=self._policies) + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> 'WorkspaceMgmtClient': + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) + + def load_workspace_from_arg(self, connection_params: WorkspaceConnectionParams) -> None: + """ + Queries Azure Resource Graph to find a workspace by name and optionally location, resource group, subscription. + Provided workspace name, location, resource group, and subscription in connection params must be validated beforehand. + + :param connection_params: + The workspace connection parameters to use and update. + """ + if not connection_params.workspace_name: + raise ValueError("Workspace name must be specified to try to load workspace details from ARG.") + + query = f""" + Resources + | where type =~ 'microsoft.quantum/workspaces' + | where name =~ '{connection_params.workspace_name}' + """ + + if connection_params.resource_group: + query += f"\n | where resourceGroup =~ '{connection_params.resource_group}'" + + if connection_params.location: + query += f"\n | where location =~ '{connection_params.location}'" + + query += """ + | extend endpointUri = tostring(properties.endpointUri), workspaceKind = tostring(properties.workspaceKind) + | project name, subscriptionId, resourceGroup, location, endpointUri, workspaceKind + """ + + request_body = { + "query": query + } + + if connection_params.subscription_id: + request_body["subscriptions"] = [connection_params.subscription_id] + + # Create request to Azure Resource Graph API + request = HttpRequest( + method="POST", + url=self._client.format_url("/providers/Microsoft.ResourceGraph/resources"), + params={"api-version": ConnectionConstants.DEFAULT_ARG_API_VERSION}, + json=request_body + ) + + try: + response = self._client.send_request(request) + response.raise_for_status() + result = response.json() + except Exception as e: + raise RuntimeError( + f"Could not load workspace details from Azure Resource Graph: {str(e)}.\n{self.CONNECT_DOC_MESSAGE}" + ) from e + + data = result.get('data', []) + + if not data: + raise ValueError(f"No matching workspace found with name '{connection_params.workspace_name}'. {self.CONNECT_DOC_MESSAGE}") + + if len(data) > 1: + raise ValueError( + f"Multiple Azure Quantum workspaces found with name '{connection_params.workspace_name}'. " + f"Please specify additional connection parameters. {self.CONNECT_DOC_MESSAGE}" + ) + + workspace_data: Dict[str, Any] = data[0] + + connection_params.subscription_id = workspace_data.get('subscriptionId') + connection_params.resource_group = workspace_data.get('resourceGroup') + connection_params.location = workspace_data.get('location') + connection_params.quantum_endpoint = workspace_data.get('endpointUri') + connection_params.workspace_kind = workspace_data.get('workspaceKind') + + logger.debug( + "Found workspace '%s' in subscription '%s', resource group '%s', location '%s', endpoint '%s', kind '%s'.", + connection_params.workspace_name, + connection_params.subscription_id, + connection_params.resource_group, + connection_params.location, + connection_params.quantum_endpoint, + connection_params.workspace_kind + ) + + # If one of the required parameters is missing, probably workspace in failed provisioning state + if not connection_params.is_complete(): + raise ValueError( + f"Failed to retrieve complete workspace details for workspace '{connection_params.workspace_name}'. " + "Please check that workspace is in valid state." + ) + + def load_workspace_from_arm(self, connection_params: WorkspaceConnectionParams) -> None: + """ + Fetches the workspace resource from ARM and sets location and endpoint URI params. + Provided workspace name, resource group, and subscription in connection params must be validated beforehand. + + :param connection_params: + The workspace connection parameters to use and update. + """ + if not all([connection_params.subscription_id, connection_params.resource_group, connection_params.workspace_name]): + raise ValueError("Missing required connection parameters to load workspace details from ARM.") + + api_version = connection_params.api_version or ConnectionConstants.DEFAULT_WORKSPACE_API_VERSION + + url = ( + f"/subscriptions/{connection_params.subscription_id}" + f"/resourceGroups/{connection_params.resource_group}" + f"/providers/Microsoft.Quantum/workspaces/{connection_params.workspace_name}" + ) + + request = HttpRequest( + method="GET", + url=self._client.format_url(url), + params={"api-version": api_version}, + ) + + try: + response = self._client.send_request(request) + response.raise_for_status() + workspace_data: Dict[str, Any] = response.json() + except HttpResponseError as e: + if e.status_code == HTTPStatus.NOT_FOUND: + raise ValueError( + f"Azure Quantum workspace '{connection_params.workspace_name}' " + f"not found in resource group '{connection_params.resource_group}' " + f"and subscription '{connection_params.subscription_id}'. " + f"{self.CONNECT_DOC_MESSAGE}" + ) from e + # Re-raise for other HTTP errors + raise + except Exception as e: + raise RuntimeError( + f"Could not load workspace details from ARM: {str(e)}.\n{self.CONNECT_DOC_MESSAGE}" + ) from e + + # Extract and apply location + location = workspace_data.get("location") + if location: + connection_params.location = location + logger.debug( + "Updated workspace location from ARM: %s", + location + ) + else: + raise ValueError( + f"Failed to retrieve location for workspace '{connection_params.workspace_name}'. " + f"Please check that workspace is in valid state." + ) + + # Extract and apply endpoint URI from properties + properties: Dict[str, Any] = workspace_data.get("properties", {}) + endpoint_uri = properties.get("endpointUri") + if endpoint_uri: + connection_params.quantum_endpoint = endpoint_uri + logger.debug( + "Updated workspace endpoint from ARM: %s", connection_params.quantum_endpoint + ) + else: + raise ValueError( + f"Failed to retrieve endpoint uri for workspace '{connection_params.workspace_name}'. " + f"Please check that workspace is in valid state." + ) + + # Set workspaceKind if available + workspace_kind = properties.get("workspaceKind") + if workspace_kind: + connection_params.workspace_kind = workspace_kind + logger.debug( + "Updated workspace kind from ARM: %s", connection_params.workspace_kind + ) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_workspace_connection_params.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_workspace_connection_params.py index 3d25ed9ec3a..20e352a90c6 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_workspace_connection_params.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/_workspace_connection_params.py @@ -14,14 +14,17 @@ ) from azure.core.credentials import AzureKeyCredential from azure.core.pipeline.policies import AzureKeyCredentialPolicy -# from azure.quantum._authentication import _DefaultAzureCredential -from ._authentication import _DefaultAzureCredential +# from azure.identity import DefaultAzureCredential +from ..azure_identity import DefaultAzureCredential # from azure.quantum._constants import ( from ._constants import ( EnvironmentKind, + WorkspaceKind, EnvironmentVariables, ConnectionConstants, GUID_REGEX_PATTERN, + VALID_WORKSPACE_NAME_PATTERN, + VALID_AZURE_REGIONS, ) class WorkspaceConnectionParams: @@ -48,9 +51,19 @@ class WorkspaceConnectionParams: ResourceGroupName=(?P[^\s;]+); WorkspaceName=(?P[^\s;]+); ApiKey=(?P[^\s;]+); - QuantumEndpoint=(?Phttps://(?P[^\s\.]+).quantum(?:-test)?.azure.com/); + QuantumEndpoint=(?Phttps://(?P[a-zA-Z0-9]+)(?:-(?Pv2))?.quantum(?:-test)?.azure.com/); """, re.VERBOSE | re.IGNORECASE) + + WORKSPACE_NOT_FULLY_SPECIFIED_MSG = """ + Azure Quantum workspace not fully specified. + Please specify one of the following: + 1) A valid resource ID. + 2) A valid combination of subscription ID, + resource group name, and workspace name. + 3) A valid connection string (via Workspace.from_connection_string()). + 4) A valid workspace name. + """ def __init__( self, @@ -70,6 +83,7 @@ def __init__( api_version: Optional[str] = None, connection_string: Optional[str] = None, on_new_client_request: Optional[Callable] = None, + workspace_kind: Optional[str] = None, ): # fields are used for these properties since # they have special getters/setters @@ -77,6 +91,7 @@ def __init__( self._environment = None self._quantum_endpoint = None self._arm_endpoint = None + self._workspace_kind = None # regular connection properties self.subscription_id = None self.resource_group = None @@ -87,6 +102,8 @@ def __init__( self.client_id = None self.tenant_id = None self.api_version = None + # Track if connection string was used + self._used_connection_string = False # callback to create a new client if needed # for example, when changing the user agent self.on_new_client_request = on_new_client_request @@ -108,8 +125,84 @@ def __init__( user_agent=user_agent, user_agent_app_id=user_agent_app_id, workspace_name=workspace_name, + workspace_kind=workspace_kind, ) self.apply_resource_id(resource_id=resource_id) + # Validate connection parameters if they are set + self._validate_connection_params() + + def _validate_connection_params(self): + self._validate_subscription_id() + self._validate_resource_group() + self._validate_workspace_name() + self._validate_location() + + def _validate_subscription_id(self): + # Validate that subscription id is a valid GUID + if self.subscription_id is not None: + if not isinstance(self.subscription_id, str): + raise ValueError("Subscription ID must be a string.") + if not re.match(f"^{GUID_REGEX_PATTERN}$", self.subscription_id, re.IGNORECASE): + raise ValueError("Subscription ID must be a valid GUID.") + + def _validate_resource_group(self): + # Validate resource group, see https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/resource-name-rules#microsoftresources + # Length 1-90, valid characters: alphanumeric, underscore, parentheses, hyphen, period (except at end), and Unicode characters: + # Uppercase Letter - Signified by the Unicode designation "Lu" (letter, uppercase); + # Lowercase Letter - Signified by the Unicode designation "Ll" (letter, lowercase); + # Titlecase Letter - Signified by the Unicode designation "Lt" (letter, titlecase); + # Modifier Letter - Signified by the Unicode designation "Lm" (letter, modifier); + # Other Letter - Signified by the Unicode designation "Lo" (letter, other); + # Decimal Digit Number - Signified by the Unicode designation "Nd" (number, decimal digit). + if self.resource_group is not None: + if not isinstance(self.resource_group, str): + raise ValueError("Resource group name must be a string.") + + if len(self.resource_group) < 1 or len(self.resource_group) > 90: + raise ValueError( + "Resource group name must be between 1 and 90 characters long." + ) + + err_msg = "Resource group name can only include alphanumeric, underscore, parentheses, hyphen, period (except at end), and Unicode characters that match the allowed characters." + if self.resource_group.endswith('.'): + raise ValueError(err_msg) + + import unicodedata + for i, char in enumerate(self.resource_group): + category = unicodedata.category(char) + if not ( + char in ('_', '(', ')', '-', '.') or + category in ('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nd') + ): + raise ValueError(err_msg) + + def _validate_workspace_name(self): + # Validate workspace name, see https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/resource-name-rules#microsoftquantum + # Length 2-54, valid characters: alphanumerics (a-zA-Z0-9) and hyphens, can't start or end with hyphen + if self.workspace_name is not None: + if not isinstance(self.workspace_name, str): + raise ValueError("Workspace name must be a string.") + + if len(self.workspace_name) < 2 or len(self.workspace_name) > 54: + raise ValueError( + "Workspace name must be between 2 and 54 characters long." + ) + + err_msg = "Workspace name can only include alphanumerics (a-zA-Z0-9) and hyphens, and cannot start or end with hyphen." + + if self.workspace_name.startswith('-') or self.workspace_name.endswith('-'): + raise ValueError(err_msg) + + if not re.match(VALID_WORKSPACE_NAME_PATTERN, self.workspace_name): + raise ValueError(err_msg) + + def _validate_location(self): + # Validate that location is one of the Azure regions https://learn.microsoft.com/en-us/azure/reliability/regions-list + if self.location is not None: + if not isinstance(self.location, str): + raise ValueError("Location must be a string.") + if self.location not in VALID_AZURE_REGIONS: + raise ValueError(f"Location must be one of the Azure regions listed in https://learn.microsoft.com/en-us/azure/reliability/regions-list.") @property def location(self): @@ -144,19 +237,8 @@ def environment(self, value: Union[str, EnvironmentKind]): def quantum_endpoint(self): """ The Azure Quantum data plane endpoint. - Defaults to well-known endpoint based on the environment. - """ - if self._quantum_endpoint: - return self._quantum_endpoint - if not self.location: - raise ValueError("Location not specified") - if self.environment is EnvironmentKind.PRODUCTION: - return ConnectionConstants.GET_QUANTUM_PRODUCTION_ENDPOINT(self.location) - if self.environment is EnvironmentKind.CANARY: - return ConnectionConstants.GET_QUANTUM_CANARY_ENDPOINT(self.location) - if self.environment is EnvironmentKind.DOGFOOD: - return ConnectionConstants.GET_QUANTUM_DOGFOOD_ENDPOINT(self.location) - raise ValueError(f"Unknown environment `{self.environment}`.") + """ + return self._quantum_endpoint @quantum_endpoint.setter def quantum_endpoint(self, value: str): @@ -196,6 +278,19 @@ def api_key(self, value: str): self.credential = AzureKeyCredential(value) self._api_key = value + @property + def workspace_kind(self) -> WorkspaceKind: + """ + The workspace kind, such as V1 or V2. + Defaults to WorkspaceKind.V1 + """ + return self._workspace_kind or WorkspaceKind.V1 + + @workspace_kind.setter + def workspace_kind(self, value: str): + if isinstance(value, str): + self._workspace_kind = WorkspaceKind[value.upper()] + def __repr__(self): """ Print all fields and properties. @@ -237,6 +332,7 @@ def apply_connection_string(self, connection_string: str): if not match: raise ValueError("Invalid connection string") self._merge_re_match(match) + self._used_connection_string = True def merge( self, @@ -254,6 +350,7 @@ def merge( client_id: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, + workspace_kind: Optional[str] = None, ): """ Set all fields/properties with `not None` values @@ -275,6 +372,7 @@ def merge( user_agent_app_id=user_agent_app_id, workspace_name=workspace_name, api_key=api_key, + workspace_kind=workspace_kind, merge_default_mode=False, ) return self @@ -295,6 +393,7 @@ def apply_defaults( client_id: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, + workspace_kind: Optional[str] = None, ) -> WorkspaceConnectionParams: """ Set all fields/properties with `not None` values @@ -317,6 +416,7 @@ def apply_defaults( user_agent_app_id=user_agent_app_id, workspace_name=workspace_name, api_key=api_key, + workspace_kind=workspace_kind, merge_default_mode=True, ) return self @@ -338,6 +438,7 @@ def _merge( client_id: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, + workspace_kind: Optional[str] = None, ): """ Set all fields/properties with `not None` values @@ -370,6 +471,7 @@ def _get_value_or_default(old_value, new_value): # the private field as the old_value self.quantum_endpoint = _get_value_or_default(self._quantum_endpoint, quantum_endpoint) self.arm_endpoint = _get_value_or_default(self._arm_endpoint, arm_endpoint) + self.workspace_kind = _get_value_or_default(self._workspace_kind, workspace_kind) return self def _merge_connection_params( @@ -399,19 +501,17 @@ def _merge_connection_params( # pylint: disable=protected-access arm_endpoint=connection_params._arm_endpoint, quantum_endpoint=connection_params._quantum_endpoint, + workspace_kind=connection_params._workspace_kind, ) return self def get_credential_or_default(self) -> Any: """ Get the credential if one was set, - or defaults to a new _DefaultAzureCredential. + or defaults to a new DefaultAzureCredential. """ return (self.credential - or _DefaultAzureCredential( - subscription_id=self.subscription_id, - arm_endpoint=self.arm_endpoint, - tenant_id=self.tenant_id)) + or DefaultAzureCredential(exclude_interactive_browser_credential=False)) def get_auth_policy(self) -> Any: """ @@ -426,7 +526,7 @@ def get_auth_policy(self) -> Any: def append_user_agent(self, value: str): """ Append a new value to the Workspace's UserAgent and re-initialize the - QuantumClient. The values are appended using a dash. + WorkspaceClient. The values are appended using a dash. :param value: UserAgent value to add, e.g. "azure-quantum-" """ @@ -455,6 +555,32 @@ def get_full_user_agent(self): full_user_agent = (f"{app_id} {full_user_agent}" if full_user_agent else app_id) return full_user_agent + + def have_enough_for_discovery(self) -> bool: + """ + Returns true if we have enough parameters + to try to find the Azure Quantum Workspace. + """ + return (self.workspace_name + and self.get_credential_or_default()) + + def assert_have_enough_for_discovery(self): + """ + Raises ValueError if we don't have enough parameters + to try to find the Azure Quantum Workspace. + """ + if not self.have_enough_for_discovery(): + raise ValueError(self.WORKSPACE_NOT_FULLY_SPECIFIED_MSG) + + def can_build_resource_id(self) -> bool: + """ + Returns true if we have all necessary parameters + to identify the Azure Quantum Workspace resource. + """ + return (self.subscription_id + and self.resource_group + and self.workspace_name + and self.get_credential_or_default()) def is_complete(self) -> bool: """ @@ -465,6 +591,7 @@ def is_complete(self) -> bool: and self.subscription_id and self.resource_group and self.workspace_name + and self.quantum_endpoint and self.get_credential_or_default()) def assert_complete(self): @@ -473,15 +600,7 @@ def assert_complete(self): to connect to the Azure Quantum Workspace. """ if not self.is_complete(): - raise ValueError( - """ - Azure Quantum workspace not fully specified. - Please specify one of the following: - 1) A valid combination of location and resource ID. - 2) A valid combination of location, subscription ID, - resource group name, and workspace name. - 3) A valid connection string (via Workspace.from_connection_string()). - """) + raise ValueError(self.WORKSPACE_NOT_FULLY_SPECIFIED_MSG) def default_from_env_vars(self) -> WorkspaceConnectionParams: """ @@ -517,10 +636,13 @@ def default_from_env_vars(self) -> WorkspaceConnectionParams: or not self.workspace_name or not self.credential ): - self._merge_connection_params( - connection_params=WorkspaceConnectionParams( - connection_string=os.environ.get(EnvironmentVariables.CONNECTION_STRING)), - merge_default_mode=True) + env_connection_string = os.environ.get(EnvironmentVariables.CONNECTION_STRING) + if env_connection_string: + self._merge_connection_params( + connection_params=WorkspaceConnectionParams( + connection_string=env_connection_string), + merge_default_mode=True) + self._used_connection_string = True return self @classmethod @@ -544,4 +666,5 @@ def get_value(group_name): quantum_endpoint=get_value('quantum_endpoint'), api_key=get_value('api_key'), arm_endpoint=get_value('arm_endpoint'), + workspace_kind=get_value('workspace_kind'), ) diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/storage.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/storage.py index e3675502841..2b8dee60cca 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/storage.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/storage.py @@ -91,10 +91,10 @@ def upload_blob( """ Uploads the given data to a blob record. If a blob with the given name already exist, it throws an error. + Container must already exist in a storage. Returns a uri with a SAS token to access the newly created blob. """ - create_container_using_client(container) logger.info( f"Uploading blob '{blob_name}'" + f"to container '{container.container_name}'" @@ -131,10 +131,10 @@ def append_blob( """ Uploads the given data to a blob record. If a blob with the given name already exist, it throws an error. + Container must already exist in a storage. Returns a uri with a SAS token to access the newly created blob. """ - create_container_using_client(container) logger.info( f"Appending data to blob '{blob_name}'" + f"in container '{container.container_name}'" @@ -250,10 +250,10 @@ def init_blob_for_streaming_upload( """ Uploads the given data to a blob record. If a blob with the given name already exist, it throws an error. + Container must already exist in a storage. Returns a uri with a SAS token to access the newly created blob. """ - create_container_using_client(container) logger.info( f"Streaming blob '{blob_name}'" + f"to container '{container.container_name}' on account:" @@ -305,7 +305,8 @@ class StreamedBlob: Once all blocks have been added, call `commit()` to commit the blocks and make the blob available/readable. - :param container: The container client that the blob will be uploaded to + :param container: The container client that the blob will be uploaded to. + Container must already exist in a storage. :param blob_name: The name of the blob (including optional path) within the blob container :param content_type: The HTTP content type to apply to the blob metadata @@ -335,14 +336,11 @@ def upload_data(self, data): :param data: The data to be uploaded as a block. :type data: Union[Iterable[AnyStr], IO[AnyStr]] """ - if self.state == StreamedBlobState.not_initialized: - create_container_using_client(self.container) - logger.info( - f"Streaming blob '{self.blob_name}' to container" - + f"'{self.container.container_name}'" - + f"on account: '{self.container.account_name}'" - ) - self.initialized = True + logger.info( + f"Streaming blob '{self.blob_name}' to container" + + f"'{self.container.container_name}'" + + f"on account: '{self.container.account_name}'" + ) self.state = StreamedBlobState.uploading id = self._get_next_block_id() @@ -382,4 +380,4 @@ def getUri(self, with_sas_token: bool = False): return remove_sas_token(self.blob.url) def _get_next_block_id(self): - return f"{len(self.blocks):10}" + return f"{len(self.blocks):10}" \ No newline at end of file diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/version.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/version.py index 808b6836058..695361d0ee8 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/version.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/version.py @@ -5,4 +5,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. ## -__version__ = "2.2.0" +__version__ = "3.6.0" diff --git a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/workspace.py b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/workspace.py index f43961acd58..a8f1277ed66 100644 --- a/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/workspace.py +++ b/src/quantum/azext_quantum/vendored_sdks/azure_quantum_python/workspace.py @@ -10,6 +10,7 @@ from __future__ import annotations from datetime import datetime import logging +# from urllib.parse import quote from typing import ( Any, Dict, @@ -20,15 +21,19 @@ Tuple, Union, ) -# from azure.quantum._client import QuantumClient -from ._client import QuantumClient -# from azure.quantum._client.operations import ( -# JobsOperations, -# StorageOperations, -# QuotasOperations, -# SessionsOperations, -# TopLevelItemsOperations -# ) +from typing_extensions import Self +# from azure.core.paging import ItemPaged +# from azure.quantum._client import WorkspaceClient +from ._client import WorkspaceClient +# from azure.quantum._client.models import JobDetails, ItemDetails, SessionDetails +# from azure.quantum._client.operations._operations import ( +from ._client.operations._operations import ( + # ServicesJobsOperations, + ServicesStorageOperations, + # ServicesQuotasOperations, + # ServicesSessionsOperations, + # ServicesTopLevelItemsOperations +) # from azure.quantum._client.models import ( from ._client.models import ( BlobDetails, @@ -47,14 +52,13 @@ ) # from azure.quantum.storage import ( from .storage import ( - create_container_using_client, get_container_uri, - ContainerClient ) +# from azure.quantum._mgmt_client import WorkspaceMgmtClient +from ._mgmt_client import WorkspaceMgmtClient # if TYPE_CHECKING: # from azure.quantum.target import Target - logger = logging.getLogger(__name__) __all__ = ["Workspace"] @@ -65,10 +69,11 @@ class Workspace: """ Represents an Azure Quantum workspace. - When creating a Workspace object, callers have two options for + When creating a Workspace object, callers have several options for identifying the Azure Quantum workspace (in order of precedence): - 1. specify a valid location and resource ID; or - 2. specify a valid location, subscription ID, resource group, and workspace name. + 1. specify a valid resource ID; or + 2. specify a valid subscription ID, resource group, and workspace name; or + 3. specify a valid workspace name. You can also use a connection string to specify the connection parameters to an Azure Quantum Workspace by calling @@ -113,6 +118,13 @@ class Workspace: Add the specified value as a prefix to the HTTP User-Agent header when communicating to the Azure Quantum service. """ + + # Internal parameter names + _FROM_CONNECTION_STRING_PARAM = '_from_connection_string' + _QUANTUM_ENDPOINT_PARAM = '_quantum_endpoint' + _WORKSPACE_KIND_PARAM = '_workspace_kind' + _MGMT_CLIENT_PARAM = '_mgmt_client' + def __init__( self, subscription_id: Optional[str] = None, @@ -125,6 +137,15 @@ def __init__( user_agent: Optional[str] = None, **kwargs: Any, ) -> None: + # Extract internal params before passing kwargs to WorkspaceConnectionParams + # Param to track whether the workspace was created from a connection string + from_connection_string = kwargs.pop(Workspace._FROM_CONNECTION_STRING_PARAM, False) + # In case from connection string, quantum_endpoint must be passed + quantum_endpoint = kwargs.pop(Workspace._QUANTUM_ENDPOINT_PARAM, None) + workspace_kind = kwargs.pop(Workspace._WORKSPACE_KIND_PARAM, None) + # Params to pass a mock in tests + self._mgmt_client = kwargs.pop(Workspace._MGMT_CLIENT_PARAM, None) + connection_params = WorkspaceConnectionParams( location=location, subscription_id=subscription_id, @@ -132,20 +153,48 @@ def __init__( workspace_name=name, credential=credential, resource_id=resource_id, + quantum_endpoint=quantum_endpoint, user_agent=user_agent, + workspace_kind=workspace_kind, **kwargs ).default_from_env_vars() logger.info("Using %s environment.", connection_params.environment) - connection_params.assert_complete() + connection_params.assert_have_enough_for_discovery() connection_params.on_new_client_request = self._on_new_client_request self._connection_params = connection_params self._storage = storage - # Create QuantumClient + if not self._mgmt_client: + credential = connection_params.get_credential_or_default() + self._mgmt_client = WorkspaceMgmtClient( + credential=credential, + base_url=connection_params.arm_endpoint, + user_agent=connection_params.get_full_user_agent(), + ) + + # pylint: disable=protected-access + using_connection_string = ( + from_connection_string + or connection_params._used_connection_string + ) + + # Populate workspace details from ARG if not using connection string and + # name is provided but missing subscription and/or resource group + if not using_connection_string \ + and not connection_params.can_build_resource_id(): + self._mgmt_client.load_workspace_from_arg(connection_params) + + # Populate workspace details from ARM if not using connection string and not loaded from ARG + if not using_connection_string and not connection_params.is_complete(): + self._mgmt_client.load_workspace_from_arm(connection_params) + + connection_params.assert_complete() + + # Create WorkspaceClient self._client = self._create_client() def _on_new_client_request(self) -> None: @@ -168,35 +217,35 @@ def _on_new_client_request(self) -> None: # """ # return self._connection_params.location - # @property - # def subscription_id(self) -> str: - # """ - # Returns the Azure Subscription ID of the Quantum Workspace. + @property + def subscription_id(self) -> str: + """ + Returns the Azure Subscription ID of the Quantum Workspace. - # :return: Azure Subscription ID. - # :rtype: str - # """ - # return self._connection_params.subscription_id + :return: Azure Subscription ID. + :rtype: str + """ + return self._connection_params.subscription_id - # @property - # def resource_group(self) -> str: - # """ - # Returns the Azure Resource Group of the Quantum Workspace. + @property + def resource_group(self) -> str: + """ + Returns the Azure Resource Group of the Quantum Workspace. - # :return: Azure Resource Group name. - # :rtype: str - # """ - # return self._connection_params.resource_group + :return: Azure Resource Group name. + :rtype: str + """ + return self._connection_params.resource_group - # @property - # def name(self) -> str: - # """ - # Returns the Name of the Quantum Workspace. + @property + def name(self) -> str: + """ + Returns the Name of the Quantum Workspace. - # :return: Azure Quantum Workspace name. - # :rtype: str - # """ - # return self._connection_params.workspace_name + :return: Azure Quantum Workspace name. + :rtype: str + """ + return self._connection_params.workspace_name # @property # def credential(self) -> Any: @@ -218,23 +267,23 @@ def storage(self) -> str: """ return self._storage - def _create_client(self) -> QuantumClient: + def _create_client(self) -> WorkspaceClient: """" An internal method to (re)create the underlying Azure SDK REST API client. :return: Azure SDK REST API client for Azure Quantum. - :rtype: QuantumClient + :rtype: WorkspaceClient """ connection_params = self._connection_params kwargs = {} if connection_params.api_version: kwargs["api_version"] = connection_params.api_version - client = QuantumClient( + client = WorkspaceClient( + region=connection_params.location, credential=connection_params.get_credential_or_default(), subscription_id=connection_params.subscription_id, resource_group_name=connection_params.resource_group, workspace_name=connection_params.workspace_name, - azure_region=connection_params.location, user_agent=connection_params.get_full_user_agent(), credential_scopes = [ConnectionConstants.DATA_PLANE_CREDENTIAL_SCOPE], endpoint=connection_params.quantum_endpoint, @@ -277,6 +326,9 @@ def _create_client(self) -> QuantumClient: # :rtype: Workspace # """ # connection_params = WorkspaceConnectionParams(connection_string=connection_string) + # kwargs[cls._FROM_CONNECTION_STRING_PARAM] = True + # kwargs[cls._QUANTUM_ENDPOINT_PARAM] = connection_params.quantum_endpoint + # kwargs[cls._WORKSPACE_KIND_PARAM] = connection_params.workspace_kind.value if connection_params.workspace_kind else None # return cls( # subscription_id=connection_params.subscription_id, # resource_group=connection_params.resource_group, @@ -285,55 +337,55 @@ def _create_client(self) -> QuantumClient: # credential=connection_params.get_credential_or_default(), # **kwargs) - # def _get_top_level_items_client(self) -> TopLevelItemsOperations: + # def _get_top_level_items_client(self) -> ServicesTopLevelItemsOperations: # """ # Returns the internal Azure SDK REST API client # for the `{workspace}/topLevelItems` API. # :return: REST API client for the `topLevelItems` API. - # :rtype: TopLevelItemsOperations + # :rtype: ServicesTopLevelItemsOperations # """ - # return self._client.top_level_items + # return self._client.services.top_level_items - # def _get_sessions_client(self) -> SessionsOperations: + # def _get_sessions_client(self) -> ServicesSessionsOperations: # """ # Returns the internal Azure SDK REST API client # for the `{workspace}/sessions` API. # :return: REST API client for the `sessions` API. - # :rtype: SessionsOperations + # :rtype: ServicesSessionsOperations # """ - # return self._client.sessions + # return self._client.services.sessions - # def _get_jobs_client(self) -> JobsOperations: + # def _get_jobs_client(self) -> ServicesJobsOperations: # """ # Returns the internal Azure SDK REST API client # for the `{workspace}/jobs` API. # :return: REST API client for the `jobs` API. - # :rtype: JobsOperations + # :rtype: ServicesJobsOperations # """ - # return self._client.jobs + # return self._client.services.jobs - def _get_workspace_storage_client(self) -> StorageOperations: + def _get_workspace_storage_client(self) -> ServicesStorageOperations: """ Returns the internal Azure SDK REST API client for the `{workspace}/storage` API. :return: REST API client for the `storage` API. - :rtype: StorageOperations + :rtype: ServicesStorageOperations """ - return self._client.storage + return self._client.services.storage - # def _get_quotas_client(self) -> QuotasOperations: + # def _get_quotas_client(self) -> ServicesQuotasOperations: # """ # Returns the internal Azure SDK REST API client # for the `{workspace}/quotas` API. # :return: REST API client for the `quotas` API. - # :rtype: QuotasOperations + # :rtype: ServicesQuotasOperations # """ - # return self._client.quotas + # return self._client.services.quotas def _get_linked_storage_sas_uri( self, @@ -357,7 +409,11 @@ def _get_linked_storage_sas_uri( blob_details = BlobDetails( container_name=container_name, blob_name=blob_name ) - container_uri = client.sas_uri(blob_details=blob_details) + container_uri = client.get_sas_uri( + self.subscription_id, + self.resource_group, + self.name, + blob_details=blob_details) logger.debug("Container URI from service: %s", container_uri) return container_uri.sas_uri @@ -374,7 +430,11 @@ def _get_linked_storage_sas_uri( # """ # client = self._get_jobs_client() # details = client.create( - # job.details.id, job.details + # self.subscription_id, + # self.resource_group, + # self.name, + # job.details.id, + # job.details # ) # return Job(self, details) @@ -390,8 +450,16 @@ def _get_linked_storage_sas_uri( # :rtype: Job # """ # client = self._get_jobs_client() - # client.cancel(job.details.id) - # details = client.get(job.id) + # client.delete( + # self.subscription_id, + # self.resource_group, + # self.name, + # job.details.id) + # details = client.get( + # self.subscription_id, + # self.resource_group, + # self.name, + # job.id) # return Job(self, details) # def get_job(self, job_id: str) -> Job: @@ -409,7 +477,11 @@ def _get_linked_storage_sas_uri( # from azure.quantum.target import Target # client = self._get_jobs_client() - # details = client.get(job_id) + # details = client.get( + # self.subscription_id, + # self.resource_group, + # self.name, + # job_id) # target_factory = TargetFactory(base_cls=Target, workspace=self) # # pylint: disable=protected-access # target_cls = target_factory._target_cls( @@ -421,8 +493,14 @@ def _get_linked_storage_sas_uri( # def list_jobs( # self, # name_match: Optional[str] = None, - # status: Optional[JobStatus] = None, - # created_after: Optional[datetime] = None + # job_type: Optional[list[str]]= None, + # provider: Optional[list[str]]= None, + # target: Optional[list[str]]= None, + # status: Optional[list[JobStatus]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True # ) -> List[Job]: # """ # Returns list of jobs that meet optional (limited) filter criteria. @@ -439,17 +517,54 @@ def _get_linked_storage_sas_uri( # :return: Jobs that matched the search criteria. # :rtype: typing.List[Job] # """ - # client = self._get_jobs_client() - # jobs = client.list() + # paginator = self.list_jobs_paginated ( + # name_match=name_match, + # job_type=job_type, + # provider=provider, + # target=target, + # status=status, + # created_after=created_after, + # created_before=created_before, + # orderby_property=orderby_property, + # is_asc=is_asc) # result = [] - # for j in jobs: + # for j in paginator: # deserialized_job = Job(self, j) - # if deserialized_job.matches_filter(name_match, status, created_after): - # result.append(deserialized_job) + # result.append(deserialized_job) # return result + # def list_jobs_paginated( + # self, + # *, + # name_match: Optional[str] = None, + # job_type: Optional[str]= None, + # provider: Optional[list[str]]= None, + # target: Optional[list[str]]= None, + # status: Optional[list[JobStatus]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None, + # skip: Optional[int] = 0, + # top: Optional[int]=100, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True + # ) -> ItemPaged[JobDetails]: + # client = self._get_jobs_client() + + # job_filter = self._create_filter( + # job_name=name_match, + # job_type=job_type, + # provider_ids=provider, + # target=target, + # status=status, + # created_after=created_after, + # created_before=created_before + # ) + # orderby = self._create_orderby(orderby_property, is_asc) + + # return client.list(subscription_id=self.subscription_id, resource_group_name=self.resource_group, workspace_name=self.name, filter=job_filter, orderby=orderby, top = top, skip = skip) + # def _get_target_status( # self, # name: Optional[str] = None, @@ -470,7 +585,10 @@ def _get_linked_storage_sas_uri( # """ # return [ # (provider.id, target) - # for provider in self._client.providers.get_status() + # for provider in self._client.services.providers.list( + # self.subscription_id, + # self.resource_group, + # self.name) # for target in provider.targets # if (provider_id is None or provider.id.lower() == provider_id.lower()) # and (name is None or target.id.lower() == name.lower()) @@ -526,10 +644,24 @@ def _get_linked_storage_sas_uri( # :rtype: typing.List[typing.Dict[str, typing.Any] # """ # client = self._get_quotas_client() - # return [q.as_dict() for q in client.list()] + # return [q.as_dict() for q in client.list( + # self.subscription_id, + # self.resource_group, + # self.name + # )] # def list_top_level_items( - # self + # self, + # name_match: Optional[str] = None, + # item_type: Optional[list[str]]= None, + # job_type: Optional[list[str]]= None, + # provider: Optional[list[str]]= None, + # target: Optional[list[str]]= None, + # status: Optional[list[JobStatus]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True # ) -> List[Union[Job, Session]]: # """ # Get a list of top level items for the given workspace, @@ -539,14 +671,64 @@ def _get_linked_storage_sas_uri( # :return: List of Workspace top level Jobs or Sessions. # :rtype: typing.List[typing.Union[Job, Session]] # """ - # client = self._get_top_level_items_client() - # item_details_list = client.list() + # paginator = self.list_top_level_items_paginated( + # name_match=name_match, + # item_type=item_type, + # job_type=job_type, + # provider=provider, + # target=target, + # status=status, + # created_after=created_after, + # created_before=created_before, + # orderby_property=orderby_property, + # is_asc=is_asc + # ) + # result = [WorkspaceItemFactory.__new__(workspace=self, item_details=item_details) - # for item_details in item_details_list] + # for item_details in paginator] # return result + # def list_top_level_items_paginated( + # self, + # *, + # name_match: Optional[str] = None, + # item_type: Optional[str]= None, + # job_type: Optional[str]= None, + # provider: Optional[list[str]]= None, + # target: Optional[list[str]]= None, + # status: Optional[list[JobStatus]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None, + # skip: Optional[int] = 0, + # top: Optional[int]=100, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True + # ) -> ItemPaged[ItemDetails]: + # client = self._get_top_level_items_client() + + # top_level_item_filter = self._create_filter( + # job_name=name_match, + # item_type=item_type, + # job_type=job_type, + # provider_ids=provider, + # target=target, + # status=status, + # created_after=created_after, + # created_before=created_before + # ) + # orderby = self._create_orderby(orderby_property, is_asc) + + # return client.listv2(subscription_id=self.subscription_id, resource_group_name=self.resource_group, workspace_name=self.name, filter=top_level_item_filter, orderby=orderby, top = top, skip = skip) + # def list_sessions( - # self + # self, + # provider: Optional[list[str]]= None, + # target: Optional[list[str]]= None, + # status: Optional[list[JobStatus]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True # ) -> List[Session]: # """ # Get the list of sessions in the given workspace. @@ -554,11 +736,50 @@ def _get_linked_storage_sas_uri( # :return: List of Workspace Sessions. # :rtype: typing.List[Session] # """ - # client = self._get_sessions_client() - # session_details_list = client.list() + # paginator = self.list_sessions_paginated( + # provider=provider, + # target=target, + # status=status, + # created_after=created_after, + # created_before=created_before, + # orderby_property=orderby_property, + # is_asc=is_asc) + # result = [Session(workspace=self,details=session_details) - # for session_details in session_details_list] + # for session_details in paginator] # return result + + # def list_sessions_paginated( + # self, + # *, + # provider: Optional[list[str]]= None, + # target: Optional[list[str]]= None, + # status: Optional[list[JobStatus]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None, + # skip: Optional[int] = 0, + # top: Optional[int]=100, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True + # ) -> ItemPaged[SessionDetails]: + # """ + # Get the list of sessions in the given workspace. + + # :return: List of Workspace Sessions. + # :rtype: typing.List[Session] + # """ + # client = self._get_sessions_client() + # session_filter = self._create_filter( + # provider_ids=provider, + # target=target, + # status=status, + # created_after=created_after, + # created_before=created_before + # ) + + # orderby = self._create_orderby(orderby_property=orderby_property, is_asc=is_asc) + + # return client.listv2(subscription_id=self.subscription_id, resource_group_name=self.resource_group, workspace_name=self.name, filter = session_filter, orderby=orderby, skip=skip, top=top) # def open_session( # self, @@ -575,8 +796,11 @@ def _get_linked_storage_sas_uri( # """ # client = self._get_sessions_client() # session.details = client.open( - # session_id=session.id, - # session=session.details) + # self.subscription_id, + # self.resource_group, + # self.name, + # session.id, + # session.details) # def close_session( # self, @@ -592,9 +816,17 @@ def _get_linked_storage_sas_uri( # """ # client = self._get_sessions_client() # if not session.is_in_terminal_state(): - # session.details = client.close(session_id=session.id) + # session.details = client.close( + # self.subscription_id, + # self.resource_group, + # self.name, + # session_id=session.id) # else: - # session.details = client.get(session_id=session.id) + # session.details = client.get( + # self.subscription_id, + # self.resource_group, + # self.name, + # session_id=session.id) # if session.target: # if (session.target.latest_session @@ -628,13 +860,21 @@ def _get_linked_storage_sas_uri( # :rtype: Session # """ # client = self._get_sessions_client() - # session_details = client.get(session_id=session_id) + # session_details = client.get( + # self.subscription_id, + # self.resource_group, + # self.name, + # session_id=session_id) # result = Session(workspace=self, details=session_details) # return result # def list_session_jobs( # self, - # session_id: str + # session_id: str, + # name_match: Optional[str] = None, + # status: Optional[list[JobStatus]] = None, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True # ) -> List[Job]: # """ # Gets all jobs associated with a session. @@ -645,12 +885,48 @@ def _get_linked_storage_sas_uri( # :return: List of all jobs associated with a session. # :rtype: typing.List[Job] # """ - # client = self._get_sessions_client() - # job_details_list = client.jobs_list(session_id=session_id) + # paginator = self.list_session_jobs_paginated( + # session_id=session_id, + # name_match=name_match, + # status=status, + # orderby_property=orderby_property, + # is_asc=is_asc) + # result = [Job(workspace=self, job_details=job_details) - # for job_details in job_details_list] + # for job_details in paginator] # return result + # def list_session_jobs_paginated( + # self, + # *, + # session_id: str, + # name_match: Optional[str] = None, + # status: Optional[list[JobStatus]] = None, + # skip: Optional[int] = 0, + # top: Optional[int]=100, + # orderby_property: Optional[str] = None, + # is_asc: Optional[bool] = True + # ) -> ItemPaged[JobDetails]: + # """ + # Gets all jobs associated with a session. + + # :param session_id: + # The id of session. + + # :return: List of all jobs associated with a session. + # :rtype: typing.List[Job] + # """ + # client = self._get_sessions_client() + + # session_job_filter = self._create_filter( + # job_name=name_match, + # status=status + # ) + + # orderby = self._create_orderby(orderby_property=orderby_property, is_asc=is_asc) + + # return client.jobs_list(subscription_id=self.subscription_id, resource_group_name=self.resource_group, workspace_name=self.name, session_id=session_id, filter = session_job_filter, orderby=orderby, skip=skip, top=top) + def get_container_uri( self, job_id: Optional[str] = None, @@ -680,15 +956,11 @@ def get_container_uri( container_name = f"{self.name}-data" # Create container URI and get container client if self.storage is None: - # Get linked storage account from the service, create - # a new container if it does not yet exist + # Get linked storage account from the service, a new container + # is created by the service if it does not yet exist container_uri = self._get_linked_storage_sas_uri( container_name ) - container_client = ContainerClient.from_container_url( - container_uri - ) - create_container_using_client(container_client) else: # Use the storage acount specified to generate container URI, # create a new container if it does not yet exist @@ -696,3 +968,119 @@ def get_container_uri( self.storage, container_name ) return container_uri + + # def _create_filter(self, + # job_name: Optional[str] = None, + # item_type: Optional[List[str]] = None, + # job_type: Optional[List[str]] = None, + # provider_ids: Optional[List[str]] = None, + # target: Optional[List[str]] = None, + # status: Optional[List[str]] = None, + # created_after: Optional[datetime] = None, + # created_before: Optional[datetime] = None,) -> str: + # has_filter = False + # filter_string = "" + + # if job_name: + # filter_string += f"startswith(Name, '{job_name}')" + # has_filter = True + + # if (item_type is not None and len(item_type) != 0): + # if has_filter: + # filter_string += " and " + + # filter_string += "(" + + # item_type_filter = " or ".join([f"ItemType eq '{iid}'" for iid in item_type]) + + # filter_string += f"{item_type_filter})" + # has_filter = True + + # if (job_type is not None and len(job_type) != 0): + # if has_filter: + # filter_string += " and " + + # filter_string += "(" + + # job_type_filter = " or ".join([f"JobType eq '{jid}'" for jid in job_type]) + + # filter_string += f"{job_type_filter})" + # has_filter = True + + # if (provider_ids is not None and len(provider_ids) != 0): + # if has_filter: + # filter_string += " and " + + # filter_string += "(" + + # provider_filter = " or ".join([f"ProviderId eq '{pid}'" for pid in provider_ids]) + + # filter_string += f"{provider_filter})" + # has_filter = True + + # if (target is not None and len(target) != 0): + # if has_filter: + # filter_string += " and " + + # filter_string += "(" + + # target_filter = " or ".join([f"Target eq '{tid}'" for tid in target]) + + # filter_string += f"{target_filter})" + # has_filter = True + + # if (status is not None and len(status) != 0): + # if has_filter: + # filter_string += " and " + + # filter_string += "(" + + # status_filter = " or ".join([f"State eq '{sid}'" for sid in status]) + + # filter_string += f"{status_filter})" + # has_filter = True + + # if created_after is not None: + # if has_filter: + # filter_string += " and " + + # iso_date_string = created_after.date().isoformat() + # filter_string += f"CreationTime ge {iso_date_string}" + + # if created_before is not None: + # if has_filter: + # filter_string += " and " + + # iso_date_string = created_before.date().isoformat() + # filter_string += f"CreationTime le {iso_date_string}" + + # if filter_string: + # return filter_string + # else: + # return None + + # def _create_orderby(self, orderby_property: str, is_asc: bool) -> str: + # if orderby_property: + # var_names = ["Name", "ItemType", "JobType", "ProviderId", "Target", "State", "CreationTime"] + + # if orderby_property in var_names: + # orderby = f"{orderby_property} asc" if is_asc else f"{orderby_property} desc" + # else: + # raise ValueError(f"Invalid orderby property: {orderby_property}") + + # return orderby + # else: + # return None + + def close(self) -> None: + self._mgmt_client.close() + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + self._mgmt_client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._mgmt_client.__exit__(*exc_details) + self._client.__exit__(*exc_details) diff --git a/src/quantum/setup.py b/src/quantum/setup.py index efe8e93d500..fbbfaffc3e7 100644 --- a/src/quantum/setup.py +++ b/src/quantum/setup.py @@ -17,7 +17,7 @@ # This version should match the latest entry in HISTORY.rst # Also, when updating this, please review the version used by the extension to # submit requests, which can be found at './azext_quantum/__init__.py' -VERSION = '1.0.0b11' +VERSION = '1.0.0b12' # The full list of classifiers is available at # https://pypi.python.org/pypi?%3Aaction=list_classifiers