diff --git a/src/loganalytics/HISTORY.rst b/src/loganalytics/HISTORY.rst new file mode 100644 index 00000000000..1c139576ba0 --- /dev/null +++ b/src/loganalytics/HISTORY.rst @@ -0,0 +1,8 @@ +.. :changelog: + +Release History +=============== + +0.1.0 +++++++ +* Initial release. diff --git a/src/loganalytics/README.md b/src/loganalytics/README.md new file mode 100644 index 00000000000..6cbbe3ff037 --- /dev/null +++ b/src/loganalytics/README.md @@ -0,0 +1,273 @@ +# Azure CLI loganalytics Extension # +This is the extension for loganalytics + +### How to use ### +Install this extension using the below CLI command +``` +az extension add --name loganalytics +``` + +### Included Features ### +#### loganalytics data-export #### +##### Create ##### +``` +az loganalytics data-export create --name "export1" \ + --resource-id "/subscriptions/192b9f85-a39a-4276-b96d-d5cd351703f9/resourceGroups/OIAutoRest1234/providers/Microsoft.EventHub/namespaces/test" \ + --table-names "Heartbeat" --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +##### Show ##### +``` +az loganalytics data-export show --name "export1" --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +##### List ##### +``` +az loganalytics data-export list --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +##### Delete ##### +``` +az loganalytics data-export delete --name "export1" --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +#### loganalytics data-source #### +##### Create ##### +``` +az loganalytics data-source create --name "AzTestDS774" --kind "AzureActivityLog" \ + --properties "{\\"LinkedResourceId\\":\\"/subscriptions/00000000-0000-0000-0000-00000000000/providers/microsoft.insights/eventtypes/management\\"}" \ + --resource-group "OIAutoRest5123" --workspace-name "AzTest9724" +``` +##### Show ##### +``` +az loganalytics data-source show --name "AzTestDS774" --resource-group "OIAutoRest5123" --workspace-name "AzTest9724" +``` +##### List ##### +``` +az loganalytics data-source list --filter "kind=\'WindowsEvent\'" --resource-group "OIAutoRest5123" \ + --workspace-name "AzTest9724" +``` +##### Delete ##### +``` +az loganalytics data-source delete --name "AzTestDS774" --resource-group "OIAutoRest5123" --workspace-name "AzTest9724" +``` +#### loganalytics intelligence-pack #### +##### List ##### +``` +az loganalytics intelligence-pack list --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Disable ##### +``` +az loganalytics intelligence-pack disable --name "ChangeTracking" --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Enable ##### +``` +az loganalytics intelligence-pack enable --name "ChangeTracking" --resource-group "rg1" --workspace-name "TestLinkWS" +``` +#### loganalytics linked-service #### +##### Create ##### +``` +az loganalytics linked-service create --name "Cluster" \ + --write-access-resource-id "/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.OperationalInsights/clusters/testcluster" \ + --resource-group "mms-eus" --workspace-name "TestLinkWS" + +az loganalytics linked-service wait --created --name "{myLinkedService}" --resource-group "{rg_5}" +``` +##### Show ##### +``` +az loganalytics linked-service show --name "Cluster" --resource-group "mms-eus" --workspace-name "TestLinkWS" +``` +##### List ##### +``` +az loganalytics linked-service list --resource-group "mms-eus" --workspace-name "TestLinkWS" +``` +##### Delete ##### +``` +az loganalytics linked-service delete --name "Cluster" --resource-group "rg1" --workspace-name "TestLinkWS" +``` +#### loganalytics linked-storage-account #### +##### Create ##### +``` +az loganalytics linked-storage-account create --data-source-type "CustomLogs" \ + --storage-account-ids "/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.Storage/storageAccounts/testStorageA" "/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.Storage/storageAccounts/testStorageB" \ + --resource-group "mms-eus" --workspace-name "testLinkStorageAccountsWS" +``` +##### Show ##### +``` +az loganalytics linked-storage-account show --data-source-type "CustomLogs" --resource-group "mms-eus" \ + --workspace-name "testLinkStorageAccountsWS" +``` +##### List ##### +``` +az loganalytics linked-storage-account list --resource-group "mms-eus" --workspace-name "testLinkStorageAccountsWS" +``` +##### Delete ##### +``` +az loganalytics linked-storage-account delete --data-source-type "CustomLogs" --resource-group "mms-eus" \ + --workspace-name "testLinkStorageAccountsWS" +``` +#### loganalytics management-group #### +##### List ##### +``` +az loganalytics management-group list --resource-group "rg1" --workspace-name "TestLinkWS" +``` +#### loganalytics operation-statuses #### +##### Show ##### +``` +az loganalytics operation-statuses show --async-operation-id "713192d7-503f-477a-9cfe-4efc3ee2bd11" \ + --location "West US" +``` +#### loganalytics shared-key #### +##### Get-shared-key ##### +``` +az loganalytics shared-key get-shared-key --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Regenerate ##### +``` +az loganalytics shared-key regenerate --resource-group "rg1" --workspace-name "workspace1" +``` +#### loganalytics usage #### +##### List ##### +``` +az loganalytics usage list --resource-group "rg1" --workspace-name "TestLinkWS" +``` +#### loganalytics storage-insight-config #### +##### Create ##### +``` +az loganalytics storage-insight-config create --containers "wad-iis-logfiles" \ + --storage-account id="/subscriptions/00000000-0000-0000-0000-000000000005/resourcegroups/OIAutoRest6987/providers/microsoft.storage/storageaccounts/AzTestFakeSA9945" key="1234" \ + --tables "WADWindowsEventLogsTable" "LinuxSyslogVer2v0" --resource-group "OIAutoRest5123" \ + --storage-insight-name "AzTestSI1110" --workspace-name "aztest5048" +``` +##### Show ##### +``` +az loganalytics storage-insight-config show --resource-group "OIAutoRest5123" --storage-insight-name "AzTestSI1110" \ + --workspace-name "aztest5048" +``` +##### List ##### +``` +az loganalytics storage-insight-config list --resource-group "OIAutoRest5123" --workspace-name "aztest5048" +``` +##### Delete ##### +``` +az loganalytics storage-insight-config delete --resource-group "OIAutoRest5123" --storage-insight-name "AzTestSI1110" \ + --workspace-name "aztest5048" +``` +#### loganalytics saved-search #### +##### Create ##### +``` +az loganalytics saved-search create --category "Saved Search Test Category" \ + --display-name "Create or Update Saved Search Test" --function-alias "heartbeat_func" \ + --function-parameters "a:int=1" --query "Heartbeat | summarize Count() by Computer | take a" \ + --tags name="Group" value="Computer" --version 2 --resource-group "TestRG" \ + --saved-search-id "00000000-0000-0000-0000-00000000000" --workspace-name "TestWS" +``` +##### Show ##### +``` +az loganalytics saved-search show --resource-group "TestRG" --saved-search-id "00000000-0000-0000-0000-00000000000" \ + --workspace-name "TestWS" +``` +##### List ##### +``` +az loganalytics saved-search list --resource-group "TestRG" --workspace-name "TestWS" +``` +##### Delete ##### +``` +az loganalytics saved-search delete --resource-group "TestRG" --saved-search-id "00000000-0000-0000-0000-00000000000" \ + --workspace-name "TestWS" +``` +#### loganalytics available-service-tier #### +##### List ##### +``` +az loganalytics available-service-tier list --resource-group "rg1" --workspace-name "workspace1" +``` +#### loganalytics gateway #### +##### Delete ##### +``` +az loganalytics gateway delete --gateway-id "00000000-0000-0000-0000-00000000000" --resource-group "OIAutoRest5123" \ + --workspace-name "aztest5048" +``` +#### loganalytics schema #### +##### Get ##### +``` +az loganalytics schema get --resource-group "mms-eus" --workspace-name "atlantisdemo" +``` +#### loganalytics workspace-purge #### +##### Purge ##### +``` +az loganalytics workspace-purge purge \ + --filters "[{\\"column\\":\\"TimeGenerated\\",\\"operator\\":\\">\\",\\"value\\":\\"2017-09-01T00:00:00\\"}]" \ + --table "Heartbeat" --resource-group "OIAutoRest5123" --workspace-name "aztest5048" +``` +##### Show-purge-status ##### +``` +az loganalytics workspace-purge show-purge-status --purge-id "purge-970318e7-b859-4edb-8903-83b1b54d0b74" \ + --resource-group "OIAutoRest5123" --workspace-name "aztest5048" +``` +#### loganalytics table #### +##### List ##### +``` +az loganalytics table list --resource-group "oiautorest6685" --workspace-name "oiautorest6685" +``` +##### Show ##### +``` +az loganalytics table show --resource-group "oiautorest6685" --name "table1" --workspace-name "oiautorest6685" +``` +##### Update ##### +``` +az loganalytics table update --retention-in-days 30 --resource-group "oiautorest6685" --name "table1" \ + --workspace-name "oiautorest6685" +``` +#### loganalytics cluster #### +##### Create ##### +``` +az loganalytics cluster create --name "oiautorest6685" --location "australiasoutheast" \ + --sku name="CapacityReservation" capacity=1000 --tags tag1="val1" --resource-group "oiautorest6685" + +az loganalytics cluster wait --created --name "{rg_8}" --resource-group "{rg_8}" +``` +##### Show ##### +``` +az loganalytics cluster show --name "oiautorest6685" --resource-group "oiautorest6685" +``` +##### List ##### +``` +az loganalytics cluster list --resource-group "oiautorest6685" +``` +##### Update ##### +``` +az loganalytics cluster update --name "oiautorest6685" --type "UserAssigned" \ + --user-assigned-identities "{\\"/subscriptions/00000000-0000-0000-0000-00000000000/resourcegroups/oiautorest6685/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity\\":{}}" \ + --key-vault-properties key-name="aztest2170cert" key-rsa-size=1024 key-vault-uri="https://aztest2170.vault.azure.net" key-version="654ft6c4e63845cbb50fd6fg51540429" \ + --sku name="CapacityReservation" capacity=1000 --tags tag1="val1" --resource-group "oiautorest6685" +``` +##### Delete ##### +``` +az loganalytics cluster delete --name "oiautorest6685" --resource-group "oiautorest6685" +``` +#### loganalytics workspace #### +##### Create ##### +``` +az loganalytics workspace create --location "australiasoutheast" --retention-in-days 30 --sku name="PerGB2018" \ + --tags tag1="val1" --resource-group "oiautorest6685" --name "oiautorest6685" + +az loganalytics workspace wait --created --resource-group "{rg_8}" --name "{rg_8}" +``` +##### Show ##### +``` +az loganalytics workspace show --resource-group "oiautorest6685" --name "oiautorest6685" +``` +##### List ##### +``` +az loganalytics workspace list --resource-group "oiautorest6685" +``` +##### Update ##### +``` +az loganalytics workspace update --retention-in-days 30 --sku name="PerGB2018" --daily-quota-gb -1 \ + --resource-group "oiautorest6685" --name "oiautorest6685" +``` +##### Delete ##### +``` +az loganalytics workspace delete --resource-group "oiautorest6685" --name "oiautorest6685" +``` +#### loganalytics deleted-workspace #### +##### List ##### +``` +az loganalytics deleted-workspace list --resource-group "oiautorest6685" +``` \ No newline at end of file diff --git a/src/loganalytics/azext_loganalytics/__init__.py b/src/loganalytics/azext_loganalytics/__init__.py new file mode 100644 index 00000000000..df0ee02a62a --- /dev/null +++ b/src/loganalytics/azext_loganalytics/__init__.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from azure.cli.core import AzCommandsLoader +from azext_loganalytics.generated._help import helps # pylint: disable=unused-import +try: + from azext_loganalytics.manual._help import helps # pylint: disable=reimported +except ImportError: + pass + + +class OperationalInsightsManagementClientCommandsLoader(AzCommandsLoader): + + def __init__(self, cli_ctx=None): + from azure.cli.core.commands import CliCommandType + from azext_loganalytics.generated._client_factory import cf_loganalytics_cl + loganalytics_custom = CliCommandType( + operations_tmpl='azext_loganalytics.custom#{}', + client_factory=cf_loganalytics_cl) + parent = super(OperationalInsightsManagementClientCommandsLoader, self) + parent.__init__(cli_ctx=cli_ctx, custom_command_type=loganalytics_custom) + + def load_command_table(self, args): + from azext_loganalytics.generated.commands import load_command_table + load_command_table(self, args) + try: + from azext_loganalytics.manual.commands import load_command_table as load_command_table_manual + load_command_table_manual(self, args) + except ImportError: + pass + return self.command_table + + def load_arguments(self, command): + from azext_loganalytics.generated._params import load_arguments + load_arguments(self, command) + try: + from azext_loganalytics.manual._params import load_arguments as load_arguments_manual + load_arguments_manual(self, command) + except ImportError: + pass + + +COMMAND_LOADER_CLS = OperationalInsightsManagementClientCommandsLoader diff --git a/src/loganalytics/azext_loganalytics/action.py b/src/loganalytics/azext_loganalytics/action.py new file mode 100644 index 00000000000..d95d53bf711 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/action.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.action import * # noqa: F403 +try: + from .manual.action import * # noqa: F403 +except ImportError: + pass diff --git a/src/loganalytics/azext_loganalytics/azext_metadata.json b/src/loganalytics/azext_loganalytics/azext_metadata.json new file mode 100644 index 00000000000..cfc30c747c7 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/azext_metadata.json @@ -0,0 +1,4 @@ +{ + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.15.0" +} \ No newline at end of file diff --git a/src/loganalytics/azext_loganalytics/custom.py b/src/loganalytics/azext_loganalytics/custom.py new file mode 100644 index 00000000000..dbe9d5f9742 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/custom.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.custom import * # noqa: F403 +try: + from .manual.custom import * # noqa: F403 +except ImportError: + pass diff --git a/src/loganalytics/azext_loganalytics/generated/__init__.py b/src/loganalytics/azext_loganalytics/generated/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/loganalytics/azext_loganalytics/generated/_client_factory.py b/src/loganalytics/azext_loganalytics/generated/_client_factory.py new file mode 100644 index 00000000000..245cc4e01d8 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/_client_factory.py @@ -0,0 +1,92 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +def cf_loganalytics_cl(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from azext_loganalytics.vendored_sdks.loganalytics import OperationalInsightsManagementClient + return get_mgmt_service_client(cli_ctx, + OperationalInsightsManagementClient) + + +def cf_data_export(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).data_exports + + +def cf_data_source(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).data_sources + + +def cf_intelligence_pack(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).intelligence_packs + + +def cf_linked_service(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).linked_services + + +def cf_linked_storage_account(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).linked_storage_accounts + + +def cf_management_group(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).management_groups + + +def cf_operation_statuses(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).operation_statuses + + +def cf_shared_key(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).shared_keys + + +def cf_usage(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).usages + + +def cf_storage_insight_config(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).storage_insight_configs + + +def cf_saved_search(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).saved_searches + + +def cf_available_service_tier(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).available_service_tiers + + +def cf_gateway(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).gateways + + +def cf_schema(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).schema + + +def cf_workspace_purge(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).workspace_purge + + +def cf_table(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).tables + + +def cf_cluster(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).clusters + + +def cf_workspace(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).workspaces + + +def cf_deleted_workspace(cli_ctx, *_): + return cf_loganalytics_cl(cli_ctx).deleted_workspaces diff --git a/src/loganalytics/azext_loganalytics/generated/_help.py b/src/loganalytics/azext_loganalytics/generated/_help.py new file mode 100644 index 00000000000..9d693e09977 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/_help.py @@ -0,0 +1,801 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.help_files import helps + + +helps['loganalytics data-export'] = """ + type: group + short-summary: Manage data export with loganalytics +""" + +helps['loganalytics data-export list'] = """ + type: command + short-summary: "Lists the data export instances within a workspace." + examples: + - name: DataExportGet + text: |- + az loganalytics data-export list --resource-group "RgTest1" --workspace-name "DeWnTest1234" +""" + +helps['loganalytics data-export show'] = """ + type: command + short-summary: "Gets a data export instance." + examples: + - name: DataExportGet + text: |- + az loganalytics data-export show --name "export1" --resource-group "RgTest1" --workspace-name \ +"DeWnTest1234" +""" + +helps['loganalytics data-export create'] = """ + type: command + short-summary: "Create a data export." + examples: + - name: DataExportCreate + text: |- + az loganalytics data-export create --name "export1" --resource-id "/subscriptions/192b9f85-a39a-4276-b96\ +d-d5cd351703f9/resourceGroups/OIAutoRest1234/providers/Microsoft.EventHub/namespaces/test" --table-names "Heartbeat" \ +--resource-group "RgTest1" --workspace-name "DeWnTest1234" +""" + +helps['loganalytics data-export update'] = """ + type: command + short-summary: "Update a data export." +""" + +helps['loganalytics data-export delete'] = """ + type: command + short-summary: "Deletes the specified data export in a given workspace.." + examples: + - name: DataExportDelete + text: |- + az loganalytics data-export delete --name "export1" --resource-group "RgTest1" --workspace-name \ +"DeWnTest1234" +""" + +helps['loganalytics data-source'] = """ + type: group + short-summary: Manage data source with loganalytics +""" + +helps['loganalytics data-source list'] = """ + type: command + short-summary: "Gets the first page of data source instances in a workspace with the link to the next page." + examples: + - name: DataSourcesListByWorkspace + text: |- + az loganalytics data-source list --filter "kind=\'WindowsEvent\'" --resource-group "OIAutoRest5123" \ +--workspace-name "AzTest9724" +""" + +helps['loganalytics data-source show'] = """ + type: command + short-summary: "Gets a datasource instance." + examples: + - name: DataSourcesGet + text: |- + az loganalytics data-source show --name "AzTestDS774" --resource-group "OIAutoRest5123" \ +--workspace-name "AzTest9724" +""" + +helps['loganalytics data-source create'] = """ + type: command + short-summary: "Create a data source." + examples: + - name: DataSourcesCreate + text: |- + az loganalytics data-source create --name "AzTestDS774" --kind "AzureActivityLog" --properties \ +"{\\"LinkedResourceId\\":\\"/subscriptions/00000000-0000-0000-0000-00000000000/providers/microsoft.insights/eventtypes/\ +management\\"}" --resource-group "OIAutoRest5123" --workspace-name "AzTest9724" +""" + +helps['loganalytics data-source update'] = """ + type: command + short-summary: "Update a data source." +""" + +helps['loganalytics data-source delete'] = """ + type: command + short-summary: "Deletes a data source instance." + examples: + - name: DataSourcesDelete + text: |- + az loganalytics data-source delete --name "AzTestDS774" --resource-group "OIAutoRest5123" \ +--workspace-name "AzTest9724" +""" + +helps['loganalytics intelligence-pack'] = """ + type: group + short-summary: Manage intelligence pack with loganalytics +""" + +helps['loganalytics intelligence-pack list'] = """ + type: command + short-summary: "Lists all the intelligence packs possible and whether they are enabled or disabled for a given \ +workspace." + examples: + - name: IntelligencePacksList + text: |- + az loganalytics intelligence-pack list --resource-group "rg1" --workspace-name "TestLinkWS" +""" + +helps['loganalytics intelligence-pack disable'] = """ + type: command + short-summary: "Disables an intelligence pack for a given workspace." + examples: + - name: IntelligencePacksDisable + text: |- + az loganalytics intelligence-pack disable --name "ChangeTracking" --resource-group "rg1" \ +--workspace-name "TestLinkWS" +""" + +helps['loganalytics intelligence-pack enable'] = """ + type: command + short-summary: "Enables an intelligence pack for a given workspace." + examples: + - name: IntelligencePacksEnable + text: |- + az loganalytics intelligence-pack enable --name "ChangeTracking" --resource-group "rg1" \ +--workspace-name "TestLinkWS" +""" + +helps['loganalytics linked-service'] = """ + type: group + short-summary: Manage linked service with loganalytics +""" + +helps['loganalytics linked-service list'] = """ + type: command + short-summary: "Gets the linked services instances in a workspace." + examples: + - name: LinkedServicesListByWorkspace + text: |- + az loganalytics linked-service list --resource-group "mms-eus" --workspace-name "TestLinkWS" +""" + +helps['loganalytics linked-service show'] = """ + type: command + short-summary: "Gets a linked service instance." + examples: + - name: LinkedServicesGet + text: |- + az loganalytics linked-service show --name "Cluster" --resource-group "mms-eus" --workspace-name \ +"TestLinkWS" +""" + +helps['loganalytics linked-service create'] = """ + type: command + short-summary: "Create a linked service." + examples: + - name: LinkedServicesCreate + text: |- + az loganalytics linked-service create --name "Cluster" --write-access-resource-id \ +"/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.OperationalInsights/clus\ +ters/testcluster" --resource-group "mms-eus" --workspace-name "TestLinkWS" +""" + +helps['loganalytics linked-service update'] = """ + type: command + short-summary: "Update a linked service." +""" + +helps['loganalytics linked-service delete'] = """ + type: command + short-summary: "Deletes a linked service instance." + examples: + - name: LinkedServicesDelete + text: |- + az loganalytics linked-service delete --name "Cluster" --resource-group "rg1" --workspace-name \ +"TestLinkWS" +""" + +helps['loganalytics linked-service wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the loganalytics linked-service is met. + examples: + - name: Pause executing next line of CLI script until the loganalytics linked-service is successfully created. + text: |- + az loganalytics linked-service wait --name "Cluster" --resource-group "mms-eus" --workspace-name \ +"TestLinkWS" --created + - name: Pause executing next line of CLI script until the loganalytics linked-service is successfully updated. + text: |- + az loganalytics linked-service wait --name "Cluster" --resource-group "mms-eus" --workspace-name \ +"TestLinkWS" --updated + - name: Pause executing next line of CLI script until the loganalytics linked-service is successfully deleted. + text: |- + az loganalytics linked-service wait --name "Cluster" --resource-group "mms-eus" --workspace-name \ +"TestLinkWS" --deleted +""" + +helps['loganalytics linked-storage-account'] = """ + type: group + short-summary: Manage linked storage account with loganalytics +""" + +helps['loganalytics linked-storage-account list'] = """ + type: command + short-summary: "Gets all linked storage accounts associated with the specified workspace, storage accounts will be \ +sorted by their data source type." + examples: + - name: Gets list of linked storage accounts on a workspace. + text: |- + az loganalytics linked-storage-account list --resource-group "mms-eus" --workspace-name \ +"testLinkStorageAccountsWS" +""" + +helps['loganalytics linked-storage-account show'] = """ + type: command + short-summary: "Gets all linked storage account of a specific data source type associated with the specified \ +workspace." + examples: + - name: LinkedStorageAccountsGet + text: |- + az loganalytics linked-storage-account show --data-source-type "CustomLogs" --resource-group "mms-eus" \ +--workspace-name "testLinkStorageAccountsWS" +""" + +helps['loganalytics linked-storage-account create'] = """ + type: command + short-summary: "Create a link relation between current workspace and a group of storage accounts of a specific \ +data source type." + examples: + - name: LinkedStorageAccountsCreate + text: |- + az loganalytics linked-storage-account create --data-source-type "CustomLogs" --storage-account-ids \ +"/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.Storage/storageAccounts/\ +testStorageA" "/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.Storage/st\ +orageAccounts/testStorageB" --resource-group "mms-eus" --workspace-name "testLinkStorageAccountsWS" +""" + +helps['loganalytics linked-storage-account update'] = """ + type: command + short-summary: "Update a link relation between current workspace and a group of storage accounts of a specific \ +data source type." +""" + +helps['loganalytics linked-storage-account delete'] = """ + type: command + short-summary: "Deletes all linked storage accounts of a specific data source type associated with the specified \ +workspace." + examples: + - name: LinkedStorageAccountsDelete + text: |- + az loganalytics linked-storage-account delete --data-source-type "CustomLogs" --resource-group \ +"mms-eus" --workspace-name "testLinkStorageAccountsWS" +""" + +helps['loganalytics management-group'] = """ + type: group + short-summary: Manage management group with loganalytics +""" + +helps['loganalytics management-group list'] = """ + type: command + short-summary: "Gets a list of management groups connected to a workspace." + examples: + - name: WorkspacesListManagementGroups + text: |- + az loganalytics management-group list --resource-group "rg1" --workspace-name "TestLinkWS" +""" + +helps['loganalytics operation-statuses'] = """ + type: group + short-summary: Manage operation statuses with loganalytics +""" + +helps['loganalytics operation-statuses show'] = """ + type: command + short-summary: "Get the status of a long running azure asynchronous operation." + examples: + - name: Get specific operation status + text: |- + az loganalytics operation-statuses show --async-operation-id "713192d7-503f-477a-9cfe-4efc3ee2bd11" \ +--location "West US" +""" + +helps['loganalytics shared-key'] = """ + type: group + short-summary: Manage shared key with loganalytics +""" + +helps['loganalytics shared-key get-shared-key'] = """ + type: command + short-summary: "Gets the shared keys for a workspace." + examples: + - name: SharedKeysList + text: |- + az loganalytics shared-key get-shared-key --resource-group "rg1" --workspace-name "TestLinkWS" +""" + +helps['loganalytics shared-key regenerate'] = """ + type: command + short-summary: "Regenerates the shared keys for a Log Analytics Workspace. These keys are used to connect \ +Microsoft Operational Insights agents to the workspace." + examples: + - name: RegenerateSharedKeys + text: |- + az loganalytics shared-key regenerate --resource-group "rg1" --workspace-name "workspace1" +""" + +helps['loganalytics usage'] = """ + type: group + short-summary: Manage usage with loganalytics +""" + +helps['loganalytics usage list'] = """ + type: command + short-summary: "Gets a list of usage metrics for a workspace." + examples: + - name: UsagesList + text: |- + az loganalytics usage list --resource-group "rg1" --workspace-name "TestLinkWS" +""" + +helps['loganalytics storage-insight-config'] = """ + type: group + short-summary: Manage storage insight config with loganalytics +""" + +helps['loganalytics storage-insight-config list'] = """ + type: command + short-summary: "Lists the storage insight instances within a workspace." + examples: + - name: StorageInsightsList + text: |- + az loganalytics storage-insight-config list --resource-group "OIAutoRest5123" --workspace-name \ +"aztest5048" +""" + +helps['loganalytics storage-insight-config show'] = """ + type: command + short-summary: "Gets a storage insight instance." + examples: + - name: StorageInsightsGet + text: |- + az loganalytics storage-insight-config show --resource-group "OIAutoRest5123" --storage-insight-name \ +"AzTestSI1110" --workspace-name "aztest5048" +""" + +helps['loganalytics storage-insight-config create'] = """ + type: command + short-summary: "Create a storage insight." + parameters: + - name: --storage-account + short-summary: "The storage account connection details" + long-summary: | + Usage: --storage-account id=XX key=XX + + id: Required. The Azure Resource Manager ID of the storage account resource. + key: Required. The storage account key. + examples: + - name: StorageInsightsCreate + text: |- + az loganalytics storage-insight-config create --containers "wad-iis-logfiles" --storage-account \ +id="/subscriptions/00000000-0000-0000-0000-000000000005/resourcegroups/OIAutoRest6987/providers/microsoft.storage/stora\ +geaccounts/AzTestFakeSA9945" key="1234" --tables "WADWindowsEventLogsTable" "LinuxSyslogVer2v0" --resource-group \ +"OIAutoRest5123" --storage-insight-name "AzTestSI1110" --workspace-name "aztest5048" +""" + +helps['loganalytics storage-insight-config update'] = """ + type: command + short-summary: "Update a storage insight." + parameters: + - name: --storage-account + short-summary: "The storage account connection details" + long-summary: | + Usage: --storage-account id=XX key=XX + + id: Required. The Azure Resource Manager ID of the storage account resource. + key: Required. The storage account key. +""" + +helps['loganalytics storage-insight-config delete'] = """ + type: command + short-summary: "Deletes a storageInsightsConfigs resource." + examples: + - name: StorageInsightsDelete + text: |- + az loganalytics storage-insight-config delete --resource-group "OIAutoRest5123" --storage-insight-name \ +"AzTestSI1110" --workspace-name "aztest5048" +""" + +helps['loganalytics saved-search'] = """ + type: group + short-summary: Manage saved search with loganalytics +""" + +helps['loganalytics saved-search list'] = """ + type: command + short-summary: "Gets the saved searches for a given Log Analytics Workspace." + examples: + - name: SavedSearchesList + text: |- + az loganalytics saved-search list --resource-group "TestRG" --workspace-name "TestWS" +""" + +helps['loganalytics saved-search show'] = """ + type: command + short-summary: "Gets the specified saved search for a given workspace." + examples: + - name: SavedSearchesGet + text: |- + az loganalytics saved-search show --resource-group "TestRG" --saved-search-id \ +"00000000-0000-0000-0000-00000000000" --workspace-name "TestWS" +""" + +helps['loganalytics saved-search create'] = """ + type: command + short-summary: "Create a saved search for a given workspace." + examples: + - name: SavedSearchCreateOrUpdate + text: |- + az loganalytics saved-search create --category "Saved Search Test Category" --display-name "Create or \ +Update Saved Search Test" --function-alias "heartbeat_func" --function-parameters "a:int=1" --query "Heartbeat | \ +summarize Count() by Computer | take a" --tags name="Group" value="Computer" --version 2 --resource-group "TestRG" \ +--saved-search-id "00000000-0000-0000-0000-00000000000" --workspace-name "TestWS" +""" + +helps['loganalytics saved-search update'] = """ + type: command + short-summary: "Update a saved search for a given workspace." +""" + +helps['loganalytics saved-search delete'] = """ + type: command + short-summary: "Deletes the specified saved search in a given workspace." + examples: + - name: SavedSearchesDelete + text: |- + az loganalytics saved-search delete --resource-group "TestRG" --saved-search-id \ +"00000000-0000-0000-0000-00000000000" --workspace-name "TestWS" +""" + +helps['loganalytics available-service-tier'] = """ + type: group + short-summary: Manage available service tier with loganalytics +""" + +helps['loganalytics available-service-tier list'] = """ + type: command + short-summary: "Gets the available service tiers for the workspace." + examples: + - name: AvailableServiceTiers + text: |- + az loganalytics available-service-tier list --resource-group "rg1" --workspace-name "workspace1" +""" + +helps['loganalytics gateway'] = """ + type: group + short-summary: Manage gateway with loganalytics +""" + +helps['loganalytics gateway delete'] = """ + type: command + short-summary: "Delete a Log Analytics gateway." + examples: + - name: DeleteGateways + text: |- + az loganalytics gateway delete --gateway-id "00000000-0000-0000-0000-00000000000" --resource-group \ +"OIAutoRest5123" --workspace-name "aztest5048" +""" + +helps['loganalytics schema'] = """ + type: group + short-summary: Manage schema with loganalytics +""" + +helps['loganalytics schema get'] = """ + type: command + short-summary: "Gets the schema for a given workspace." + examples: + - name: WorkspacesGetSchema + text: |- + az loganalytics schema get --resource-group "mms-eus" --workspace-name "atlantisdemo" +""" + +helps['loganalytics workspace-purge'] = """ + type: group + short-summary: Manage workspace purge with loganalytics +""" + +helps['loganalytics workspace-purge purge'] = """ + type: command + short-summary: "Purges data in an Log Analytics workspace by a set of user-defined filters. In order to manage \ +system resources, purge requests are throttled at 50 requests per hour. You should batch the execution of purge \ +requests by sending a single command whose predicate includes all user identities that require purging. Use the in \ +operator to specify multiple identities. You should run the query prior to using for a purge request to verify that \ +the results are expected." + examples: + - name: WorkspacePurge + text: |- + az loganalytics workspace-purge purge --filters "[{\\"column\\":\\"TimeGenerated\\",\\"operator\\":\\">\ +\\",\\"value\\":\\"2017-09-01T00:00:00\\"}]" --table "Heartbeat" --resource-group "OIAutoRest5123" --workspace-name \ +"aztest5048" +""" + +helps['loganalytics workspace-purge show-purge-status'] = """ + type: command + short-summary: "Gets status of an ongoing purge operation." + examples: + - name: WorkspacePurgeOperation + text: |- + az loganalytics workspace-purge show-purge-status --purge-id "purge-970318e7-b859-4edb-8903-83b1b54d0b74\ +" --resource-group "OIAutoRest5123" --workspace-name "aztest5048" +""" + +helps['loganalytics table'] = """ + type: group + short-summary: Manage table with loganalytics +""" + +helps['loganalytics table list'] = """ + type: command + short-summary: "Gets all the tables for the specified Log Analytics workspace." + examples: + - name: TablesListByWorkspace + text: |- + az loganalytics table list --resource-group "oiautorest6685" --workspace-name "oiautorest6685" +""" + +helps['loganalytics table show'] = """ + type: command + short-summary: "Gets a Log Analytics workspace table." + examples: + - name: TablesGet + text: |- + az loganalytics table show --resource-group "oiautorest6685" --name "table1" --workspace-name \ +"oiautorest6685" +""" + +helps['loganalytics table update'] = """ + type: command + short-summary: "Updates a Log Analytics workspace table properties." + examples: + - name: TablesSet + text: |- + az loganalytics table update --retention-in-days 30 --resource-group "oiautorest6685" --name "table1" \ +--workspace-name "oiautorest6685" +""" + +helps['loganalytics cluster'] = """ + type: group + short-summary: Manage cluster with loganalytics +""" + +helps['loganalytics cluster list'] = """ + type: command + short-summary: "Gets Log Analytics clusters in a resource group. And Gets the Log Analytics clusters in a \ +subscription." + examples: + - name: ClustersGet + text: |- + az loganalytics cluster list --resource-group "oiautorest6685" + - name: ClustersSubscriptionList + text: |- + az loganalytics cluster list +""" + +helps['loganalytics cluster show'] = """ + type: command + short-summary: "Gets a Log Analytics cluster instance." + examples: + - name: ClustersGet + text: |- + az loganalytics cluster show --name "oiautorest6685" --resource-group "oiautorest6685" +""" + +helps['loganalytics cluster create'] = """ + type: command + short-summary: "Create a Log Analytics cluster." + parameters: + - name: --sku + short-summary: "The sku properties." + long-summary: | + Usage: --sku capacity=XX name=XX + + capacity: The capacity value + name: The name of the SKU. + - name: --key-vault-properties + short-summary: "The associated key properties." + long-summary: | + Usage: --key-vault-properties key-vault-uri=XX key-name=XX key-version=XX key-rsa-size=XX + + key-vault-uri: The Key Vault uri which holds they key associated with the Log Analytics cluster. + key-name: The name of the key associated with the Log Analytics cluster. + key-version: The version of the key associated with the Log Analytics cluster. + key-rsa-size: Selected key minimum required size. + examples: + - name: ClustersCreate + text: |- + az loganalytics cluster create --name "oiautorest6685" --location "australiasoutheast" --sku \ +name="CapacityReservation" capacity=1000 --tags tag1="val1" --resource-group "oiautorest6685" +""" + +helps['loganalytics cluster update'] = """ + type: command + short-summary: "Updates a Log Analytics cluster." + parameters: + - name: --sku + short-summary: "The sku properties." + long-summary: | + Usage: --sku capacity=XX name=XX + + capacity: The capacity value + name: The name of the SKU. + - name: --key-vault-properties + short-summary: "The associated key properties." + long-summary: | + Usage: --key-vault-properties key-vault-uri=XX key-name=XX key-version=XX key-rsa-size=XX + + key-vault-uri: The Key Vault uri which holds they key associated with the Log Analytics cluster. + key-name: The name of the key associated with the Log Analytics cluster. + key-version: The version of the key associated with the Log Analytics cluster. + key-rsa-size: Selected key minimum required size. + examples: + - name: ClustersPatch + text: |- + az loganalytics cluster update --name "oiautorest6685" --type "UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/00000000-0000-0000-0000-00000000000/resourcegroups/oiautorest6685/providers/Microsoft.ManagedIdenti\ +ty/userAssignedIdentities/myidentity\\":{}}" --key-vault-properties key-name="aztest2170cert" key-rsa-size=1024 \ +key-vault-uri="https://aztest2170.vault.azure.net" key-version="654ft6c4e63845cbb50fd6fg51540429" --sku \ +name="CapacityReservation" capacity=1000 --tags tag1="val1" --resource-group "oiautorest6685" +""" + +helps['loganalytics cluster delete'] = """ + type: command + short-summary: "Deletes a cluster instance." + examples: + - name: ClustersDelete + text: |- + az loganalytics cluster delete --name "oiautorest6685" --resource-group "oiautorest6685" +""" + +helps['loganalytics cluster wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the loganalytics cluster is met. + examples: + - name: Pause executing next line of CLI script until the loganalytics cluster is successfully created. + text: |- + az loganalytics cluster wait --name "oiautorest6685" --resource-group "oiautorest6685" --created + - name: Pause executing next line of CLI script until the loganalytics cluster is successfully deleted. + text: |- + az loganalytics cluster wait --name "oiautorest6685" --resource-group "oiautorest6685" --deleted +""" + +helps['loganalytics workspace'] = """ + type: group + short-summary: Manage workspace with loganalytics +""" + +helps['loganalytics workspace list'] = """ + type: command + short-summary: "Gets workspaces in a resource group. And Gets the workspaces in a subscription." + examples: + - name: WorkspacesGet + text: |- + az loganalytics workspace list --resource-group "oiautorest6685" + - name: WorkspacesSubscriptionList + text: |- + az loganalytics workspace list +""" + +helps['loganalytics workspace show'] = """ + type: command + short-summary: "Gets a workspace instance." + examples: + - name: WorkspaceGet + text: |- + az loganalytics workspace show --resource-group "oiautorest6685" --name "oiautorest6685" +""" + +helps['loganalytics workspace create'] = """ + type: command + short-summary: "Create a workspace." + parameters: + - name: --sku + short-summary: "The SKU of the workspace." + long-summary: | + Usage: --sku name=XX capacity-reservation-level=XX + + name: Required. The name of the SKU. + capacity-reservation-level: The capacity reservation level for this workspace, when CapacityReservation \ +sku is selected. + - name: --features + short-summary: "Workspace features." + long-summary: | + Usage: --features enable-data-export=XX immediate-purge-data-on30-days=XX enable-log-access-using-only-reso\ +urce-permissions=XX cluster-resource-id=XX disable-local-auth=XX + + enable-data-export: Flag that indicate if data should be exported. + immediate-purge-data-on30-days: Flag that describes if we want to remove the data after 30 days. + enable-log-access-using-only-resource-permissions: Flag that indicate which permission to use - resource \ +or workspace or both. + cluster-resource-id: Dedicated LA cluster resourceId that is linked to the workspaces. + disable-local-auth: Disable Non-AAD based Auth. + examples: + - name: WorkspacesCreate + text: |- + az loganalytics workspace create --location "australiasoutheast" --retention-in-days 30 --sku \ +name="PerGB2018" --tags tag1="val1" --resource-group "oiautorest6685" --name "oiautorest6685" +""" + +helps['loganalytics workspace update'] = """ + type: command + short-summary: "Updates a workspace." + parameters: + - name: --sku + short-summary: "The SKU of the workspace." + long-summary: | + Usage: --sku name=XX capacity-reservation-level=XX + + name: Required. The name of the SKU. + capacity-reservation-level: The capacity reservation level for this workspace, when CapacityReservation \ +sku is selected. + - name: --features + short-summary: "Workspace features." + long-summary: | + Usage: --features enable-data-export=XX immediate-purge-data-on30-days=XX enable-log-access-using-only-reso\ +urce-permissions=XX cluster-resource-id=XX disable-local-auth=XX + + enable-data-export: Flag that indicate if data should be exported. + immediate-purge-data-on30-days: Flag that describes if we want to remove the data after 30 days. + enable-log-access-using-only-resource-permissions: Flag that indicate which permission to use - resource \ +or workspace or both. + cluster-resource-id: Dedicated LA cluster resourceId that is linked to the workspaces. + disable-local-auth: Disable Non-AAD based Auth. + examples: + - name: WorkspacesPatch + text: |- + az loganalytics workspace update --retention-in-days 30 --sku name="PerGB2018" --daily-quota-gb -1 \ +--resource-group "oiautorest6685" --name "oiautorest6685" +""" + +helps['loganalytics workspace delete'] = """ + type: command + short-summary: "Deletes a workspace resource. To recover the workspace, create it again with the same name, in the \ +same subscription, resource group and location. The name is kept for 14 days and cannot be used for another workspace. \ +To remove the workspace completely and release the name, use the force flag." + examples: + - name: WorkspacesDelete + text: |- + az loganalytics workspace delete --resource-group "oiautorest6685" --name "oiautorest6685" +""" + +helps['loganalytics workspace wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the loganalytics workspace is met. + examples: + - name: Pause executing next line of CLI script until the loganalytics workspace is successfully created. + text: |- + az loganalytics workspace wait --resource-group "oiautorest6685" --name "oiautorest6685" --created + - name: Pause executing next line of CLI script until the loganalytics workspace is successfully deleted. + text: |- + az loganalytics workspace wait --resource-group "oiautorest6685" --name "oiautorest6685" --deleted +""" + +helps['loganalytics deleted-workspace'] = """ + type: group + short-summary: Manage deleted workspace with loganalytics +""" + +helps['loganalytics deleted-workspace list'] = """ + type: command + short-summary: "Gets recently deleted workspaces in a resource group, available for recovery. And Gets recently \ +deleted workspaces in a subscription, available for recovery." + examples: + - name: WorkspacesGet + text: |- + az loganalytics deleted-workspace list --resource-group "oiautorest6685" + - name: WorkspacesSubscriptionList + text: |- + az loganalytics deleted-workspace list +""" diff --git a/src/loganalytics/azext_loganalytics/generated/_params.py b/src/loganalytics/azext_loganalytics/generated/_params.py new file mode 100644 index 00000000000..2c15f14433b --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/_params.py @@ -0,0 +1,538 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from azure.cli.core.commands.parameters import ( + tags_type, + get_three_state_flag, + get_enum_type, + resource_group_name_type, + get_location_type +) +from azure.cli.core.commands.validators import ( + get_default_location_from_resource_group, + validate_file_or_dict +) +from azext_loganalytics.action import ( + AddStorageAccount, + AddClustersSku, + AddKeyVaultProperties, + AddWorkspacesSku, + AddFeatures +) + + +def load_arguments(self, _): + + with self.argument_context('loganalytics data-export list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics data-export show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_export_name', options_list=['--name', '-n', '--data-export-name'], type=str, help='The data ' + 'export rule name.') + + with self.argument_context('loganalytics data-export create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_export_name', options_list=['--name', '-n', '--data-export-name'], type=str, help='The data ' + 'export rule name.') + c.argument('data_export_id', type=str, help='The data export rule ID.') + c.argument('table_names', nargs='+', help='An array of tables to export, for example: [“Heartbeat, ' + 'SecurityEvent”].') + c.argument('enable', arg_type=get_three_state_flag(), help='Active when enabled.') + c.argument('created_date', type=str, help='The latest data export rule modification time.') + c.argument('last_modified_date', type=str, help='Date and time when the export was last modified.') + c.argument('resource_id', type=str, help='The destination resource ID. This can be copied from the Properties ' + 'entry of the destination resource in Azure.', arg_group='Destination') + c.argument('event_hub_name', type=str, help='Optional. Allows to define an Event Hub name. Not applicable when ' + 'destination is Storage Account.', arg_group='Destination Meta Data') + + with self.argument_context('loganalytics data-export update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_export_name', options_list=['--name', '-n', '--data-export-name'], type=str, help='The data ' + 'export rule name.') + c.argument('data_export_id', type=str, help='The data export rule ID.') + c.argument('table_names', nargs='+', help='An array of tables to export, for example: [“Heartbeat, ' + 'SecurityEvent”].') + c.argument('enable', arg_type=get_three_state_flag(), help='Active when enabled.') + c.argument('created_date', type=str, help='The latest data export rule modification time.') + c.argument('last_modified_date', type=str, help='Date and time when the export was last modified.') + c.argument('resource_id', type=str, help='The destination resource ID. This can be copied from the Properties ' + 'entry of the destination resource in Azure.', arg_group='Destination') + c.argument('event_hub_name', type=str, help='Optional. Allows to define an Event Hub name. Not applicable when ' + 'destination is Storage Account.', arg_group='Destination Meta Data') + c.ignore('parameters') + + with self.argument_context('loganalytics data-export delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_export_name', options_list=['--name', '-n', '--data-export-name'], type=str, help='The data ' + 'export rule name.') + + with self.argument_context('loganalytics data-source list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('filter_', options_list=['--filter'], type=str, help='The filter to apply on the operation.') + c.argument('skiptoken', type=str, help='Starting point of the collection of data source instances.') + + with self.argument_context('loganalytics data-source show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_name', options_list=['--name', '-n', '--data-source-name'], type=str, help='Name of ' + 'the datasource') + + with self.argument_context('loganalytics data-source create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_name', options_list=['--name', '-n', '--data-source-name'], type=str, help='The name ' + 'of the datasource resource.') + c.argument('properties', type=validate_file_or_dict, help='The data source properties in raw json format, each ' + 'kind of data source have it\'s own schema. Expected value: json-string/@json-file.') + c.argument('etag', type=str, help='The ETag of the data source.') + c.argument('kind', arg_type=get_enum_type(['WindowsEvent', 'WindowsPerformanceCounter', 'IISLogs', + 'LinuxSyslog', 'LinuxSyslogCollection', 'LinuxPerformanceObject', + 'LinuxPerformanceCollection', 'CustomLog', 'CustomLogCollection', + 'AzureAuditLog', 'AzureActivityLog', 'GenericDataSource', + 'ChangeTrackingCustomPath', 'ChangeTrackingPath', + 'ChangeTrackingServices', 'ChangeTrackingDataTypeConfiguration', + 'ChangeTrackingDefaultRegistry', 'ChangeTrackingRegistry', + 'ChangeTrackingLinuxPath', 'LinuxChangeTrackingPath', + 'ChangeTrackingContentLocation', 'WindowsTelemetry', 'Office365', + 'SecurityWindowsBaselineConfiguration', + 'SecurityCenterSecurityWindowsBaselineConfiguration', + 'SecurityEventCollectionConfiguration', + 'SecurityInsightsSecurityEventCollectionConfiguration', + 'ImportComputerGroup', 'NetworkMonitoring', 'Itsm', 'DnsAnalytics', + 'ApplicationInsights', 'SqlDataClassification']), help='The kind of ' + 'the DataSource.') + c.argument('tags', tags_type) + + with self.argument_context('loganalytics data-source update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_name', options_list=['--name', '-n', '--data-source-name'], type=str, help='The name ' + 'of the datasource resource.') + c.argument('properties', type=validate_file_or_dict, help='The data source properties in raw json format, each ' + 'kind of data source have it\'s own schema. Expected value: json-string/@json-file.') + c.argument('etag', type=str, help='The ETag of the data source.') + c.argument('kind', arg_type=get_enum_type(['WindowsEvent', 'WindowsPerformanceCounter', 'IISLogs', + 'LinuxSyslog', 'LinuxSyslogCollection', 'LinuxPerformanceObject', + 'LinuxPerformanceCollection', 'CustomLog', 'CustomLogCollection', + 'AzureAuditLog', 'AzureActivityLog', 'GenericDataSource', + 'ChangeTrackingCustomPath', 'ChangeTrackingPath', + 'ChangeTrackingServices', 'ChangeTrackingDataTypeConfiguration', + 'ChangeTrackingDefaultRegistry', 'ChangeTrackingRegistry', + 'ChangeTrackingLinuxPath', 'LinuxChangeTrackingPath', + 'ChangeTrackingContentLocation', 'WindowsTelemetry', 'Office365', + 'SecurityWindowsBaselineConfiguration', + 'SecurityCenterSecurityWindowsBaselineConfiguration', + 'SecurityEventCollectionConfiguration', + 'SecurityInsightsSecurityEventCollectionConfiguration', + 'ImportComputerGroup', 'NetworkMonitoring', 'Itsm', 'DnsAnalytics', + 'ApplicationInsights', 'SqlDataClassification']), help='The kind of ' + 'the DataSource.') + c.argument('tags', tags_type) + c.ignore('parameters') + + with self.argument_context('loganalytics data-source delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_name', options_list=['--name', '-n', '--data-source-name'], type=str, help='Name of ' + 'the datasource.') + + with self.argument_context('loganalytics intelligence-pack list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics intelligence-pack disable') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('intelligence_pack_name', options_list=['--name', '-n', '--intelligence-pack-name'], type=str, + help='The name of the intelligence pack to be disabled.') + + with self.argument_context('loganalytics intelligence-pack enable') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('intelligence_pack_name', options_list=['--name', '-n', '--intelligence-pack-name'], type=str, + help='The name of the intelligence pack to be enabled.') + + with self.argument_context('loganalytics linked-service list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics linked-service show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='Name ' + 'of the linked service.') + + with self.argument_context('loganalytics linked-service create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='Name ' + 'of the linkedServices resource') + c.argument('tags', tags_type) + c.argument('resource_id', type=str, help='The resource id of the resource that will be linked to the ' + 'workspace. This should be used for linking resources which require read access') + c.argument('write_access_resource_id', type=str, help='The resource id of the resource that will be linked to ' + 'the workspace. This should be used for linking resources which require write access') + c.argument('provisioning_state', arg_type=get_enum_type(['Succeeded', 'Deleting', 'ProvisioningAccount', + 'Updating']), help='The provisioning state of the ' + 'linked service.') + + with self.argument_context('loganalytics linked-service update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='Name ' + 'of the linkedServices resource') + c.argument('tags', tags_type) + c.argument('resource_id', type=str, help='The resource id of the resource that will be linked to the ' + 'workspace. This should be used for linking resources which require read access') + c.argument('write_access_resource_id', type=str, help='The resource id of the resource that will be linked to ' + 'the workspace. This should be used for linking resources which require write access') + c.argument('provisioning_state', arg_type=get_enum_type(['Succeeded', 'Deleting', 'ProvisioningAccount', + 'Updating']), help='The provisioning state of the ' + 'linked service.') + c.ignore('parameters') + + with self.argument_context('loganalytics linked-service delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='Name ' + 'of the linked service.') + + with self.argument_context('loganalytics linked-service wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('linked_service_name', options_list=['--name', '-n', '--linked-service-name'], type=str, help='Name ' + 'of the linked service.') + + with self.argument_context('loganalytics linked-storage-account list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics linked-storage-account show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_type', arg_type=get_enum_type(['CustomLogs', 'AzureWatson', 'Query', 'Alerts']), + help='Linked storage accounts type.') + + with self.argument_context('loganalytics linked-storage-account create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_type', arg_type=get_enum_type(['CustomLogs', 'AzureWatson', 'Query', 'Alerts']), + help='Linked storage accounts type.') + c.argument('storage_account_ids', nargs='+', help='Linked storage accounts resources ids.') + + with self.argument_context('loganalytics linked-storage-account update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_type', arg_type=get_enum_type(['CustomLogs', 'AzureWatson', 'Query', 'Alerts']), + help='Linked storage accounts type.') + c.argument('storage_account_ids', nargs='+', help='Linked storage accounts resources ids.') + c.ignore('parameters') + + with self.argument_context('loganalytics linked-storage-account delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('data_source_type', arg_type=get_enum_type(['CustomLogs', 'AzureWatson', 'Query', 'Alerts']), + help='Linked storage accounts type.') + + with self.argument_context('loganalytics management-group list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics operation-statuses show') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('async_operation_id', type=str, help='The operation Id.', id_part='child_name_1') + + with self.argument_context('loganalytics shared-key get-shared-key') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics shared-key regenerate') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics usage list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics storage-insight-config list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics storage-insight-config show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('storage_insight_name', type=str, help='Name of the storageInsightsConfigs resource') + + with self.argument_context('loganalytics storage-insight-config create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('storage_insight_name', type=str, help='Name of the storageInsightsConfigs resource') + c.argument('e_tag', type=str, help='The ETag of the storage insight.') + c.argument('tags', tags_type) + c.argument('containers', nargs='+', help='The names of the blob containers that the workspace should read') + c.argument('tables', nargs='+', help='The names of the Azure tables that the workspace should read') + c.argument('storage_account', action=AddStorageAccount, nargs='+', help='The storage account connection ' + 'details') + + with self.argument_context('loganalytics storage-insight-config update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('storage_insight_name', type=str, help='Name of the storageInsightsConfigs resource') + c.argument('e_tag', type=str, help='The ETag of the storage insight.') + c.argument('tags', tags_type) + c.argument('containers', nargs='+', help='The names of the blob containers that the workspace should read') + c.argument('tables', nargs='+', help='The names of the Azure tables that the workspace should read') + c.argument('storage_account', action=AddStorageAccount, nargs='+', help='The storage account connection ' + 'details') + c.ignore('parameters') + + with self.argument_context('loganalytics storage-insight-config delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('storage_insight_name', type=str, help='Name of the storageInsightsConfigs resource') + + with self.argument_context('loganalytics saved-search list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics saved-search show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('saved_search_id', type=str, help='The id of the saved search.') + + with self.argument_context('loganalytics saved-search create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('saved_search_id', type=str, help='The id of the saved search.') + c.argument('etag', type=str, help='The ETag of the saved search. To override an existing saved search, use "*" ' + 'or specify the current Etag') + c.argument('category', type=str, help='The category of the saved search. This helps the user to find a saved ' + 'search faster.') + c.argument('display_name', type=str, help='Saved search display name.') + c.argument('query', type=str, help='The query expression for the saved search.') + c.argument('function_alias', type=str, help='The function alias if query serves as a function.') + c.argument('function_parameters', type=str, help='The optional function parameters if query serves as a ' + 'function. Value should be in the following format: \'param-name1:type1 = default_value1, ' + 'param-name2:type2 = default_value2\'. For more examples and proper syntax please refer to ' + 'https://docs.microsoft.com/en-us/azure/kusto/query/functions/user-defined-functions.') + c.argument('version', type=int, help='The version number of the query language. The current version is 2 and ' + 'is the default.') + c.argument('tags', tags_type) + + with self.argument_context('loganalytics saved-search update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('saved_search_id', type=str, help='The id of the saved search.') + c.argument('etag', type=str, help='The ETag of the saved search. To override an existing saved search, use "*" ' + 'or specify the current Etag') + c.argument('category', type=str, help='The category of the saved search. This helps the user to find a saved ' + 'search faster.') + c.argument('display_name', type=str, help='Saved search display name.') + c.argument('query', type=str, help='The query expression for the saved search.') + c.argument('function_alias', type=str, help='The function alias if query serves as a function.') + c.argument('function_parameters', type=str, help='The optional function parameters if query serves as a ' + 'function. Value should be in the following format: \'param-name1:type1 = default_value1, ' + 'param-name2:type2 = default_value2\'. For more examples and proper syntax please refer to ' + 'https://docs.microsoft.com/en-us/azure/kusto/query/functions/user-defined-functions.') + c.argument('version', type=int, help='The version number of the query language. The current version is 2 and ' + 'is the default.') + c.argument('tags', tags_type) + c.ignore('parameters') + + with self.argument_context('loganalytics saved-search delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('saved_search_id', type=str, help='The id of the saved search.') + + with self.argument_context('loganalytics available-service-tier list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics gateway delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('gateway_id', type=str, help='The Log Analytics gateway Id.') + + with self.argument_context('loganalytics schema get') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics workspace-purge purge') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.', id_part='name') + c.argument('table', type=str, help='Table from which to purge data.') + c.argument('filters', type=validate_file_or_dict, help='The set of columns and filters (queries) to run over ' + 'them to purge the resulting data. Expected value: json-string/@json-file.') + + with self.argument_context('loganalytics workspace-purge show-purge-status') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.', id_part='name') + c.argument('purge_id', type=str, help='In a purge status request, this is the Id of the operation the status ' + 'of which is returned.', id_part='child_name_1') + + with self.argument_context('loganalytics table list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + + with self.argument_context('loganalytics table show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('table_name', options_list=['--name', '-n', '--table-name'], type=str, + help='The name of the table.') + + with self.argument_context('loganalytics table update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='The name of the workspace.') + c.argument('table_name', options_list=['--name', '-n', '--table-name'], type=str, + help='The name of the table.') + c.argument('retention_in_days', type=int, help='The data table data retention in days, between 30 and 730. ' + 'Setting this property to null will default to the workspace retention.') + + with self.argument_context('loganalytics cluster list') as c: + c.argument('resource_group_name', resource_group_name_type) + + with self.argument_context('loganalytics cluster show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('cluster_name', options_list=['--name', '-n', '--cluster-name'], type=str, help='Name of the Log ' + 'Analytics Cluster.') + + with self.argument_context('loganalytics cluster create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('cluster_name', options_list=['--name', '-n', '--cluster-name'], type=str, help='The name of the ' + 'Log Analytics cluster.') + c.argument('tags', tags_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('sku', action=AddClustersSku, nargs='+', help='The sku properties.') + c.argument('is_double_encryption_enabled', arg_type=get_three_state_flag(), help='Configures whether cluster ' + 'will use double encryption. This Property can not be modified after cluster creation. Default ' + 'value is \'true\'') + c.argument('is_availability_zones_enabled', arg_type=get_three_state_flag(), help='Sets whether the cluster ' + 'will support availability zones. This can be set as true only in regions where Azure Data Explorer ' + 'support Availability Zones. This Property can not be modified after cluster creation. Default ' + 'value is \'true\' if region supports Availability Zones.') + c.argument('billing_type', arg_type=get_enum_type(['Cluster', 'Workspaces']), help='The cluster\'s billing ' + 'type.') + c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='The associated key ' + 'properties.') + c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', + 'None']), help='Type of managed service ' + 'identity.', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with the resource. The user identity dictionary key references will be ARM resource ids ' + 'in the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microso' + 'ft.ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: ' + 'json-string/@json-file.', arg_group='Identity') + + with self.argument_context('loganalytics cluster update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('cluster_name', options_list=['--name', '-n', '--cluster-name'], type=str, help='Name of the Log ' + 'Analytics Cluster.') + c.argument('sku', action=AddClustersSku, nargs='+', help='The sku properties.') + c.argument('tags', tags_type) + c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='The associated key ' + 'properties.') + c.argument('billing_type', arg_type=get_enum_type(['Cluster', 'Workspaces']), help='The cluster\'s billing ' + 'type.') + c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', + 'None']), help='Type of managed service ' + 'identity.', arg_group='Identity') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with the resource. The user identity dictionary key references will be ARM resource ids ' + 'in the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microso' + 'ft.ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: ' + 'json-string/@json-file.', arg_group='Identity') + + with self.argument_context('loganalytics cluster delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('cluster_name', options_list=['--name', '-n', '--cluster-name'], type=str, help='Name of the Log ' + 'Analytics Cluster.') + + with self.argument_context('loganalytics cluster wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('cluster_name', options_list=['--name', '-n', '--cluster-name'], type=str, help='Name of the Log ' + 'Analytics Cluster.') + + with self.argument_context('loganalytics workspace list') as c: + c.argument('resource_group_name', resource_group_name_type) + + with self.argument_context('loganalytics workspace show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='The name of ' + 'the workspace.') + + with self.argument_context('loganalytics workspace create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='The name of ' + 'the workspace.') + c.argument('tags', tags_type) + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, + validator=get_default_location_from_resource_group) + c.argument('e_tag', type=str, help='The ETag of the workspace.') + c.argument('provisioning_state', arg_type=get_enum_type(['Creating', 'Succeeded', 'Failed', 'Canceled', + 'Deleting', 'ProvisioningAccount', 'Updating']), + help='The provisioning state of the workspace.') + c.argument('sku', action=AddWorkspacesSku, nargs='+', help='The SKU of the workspace.') + c.argument('retention_in_days', type=int, help='The workspace data retention in days. Allowed values are per ' + 'pricing plan. See pricing tiers documentation for details.') + c.argument('public_network_access_for_ingestion', arg_type=get_enum_type(['Enabled', 'Disabled']), help='The ' + 'network access type for accessing Log Analytics ingestion.') + c.argument('public_network_access_for_query', arg_type=get_enum_type(['Enabled', 'Disabled']), help='The ' + 'network access type for accessing Log Analytics query.') + c.argument('force_cmk_for_query', arg_type=get_three_state_flag(), help='Indicates whether customer managed ' + 'storage is mandatory for query management.') + c.argument('features', action=AddFeatures, nargs='+', help='Workspace features.') + c.argument('daily_quota_gb', type=float, help='The workspace daily quota for ingestion.', arg_group='Workspace ' + 'Capping') + + with self.argument_context('loganalytics workspace update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='The name of ' + 'the workspace.') + c.argument('tags', tags_type) + c.argument('provisioning_state', arg_type=get_enum_type(['Creating', 'Succeeded', 'Failed', 'Canceled', + 'Deleting', 'ProvisioningAccount', 'Updating']), + help='The provisioning state of the workspace.') + c.argument('sku', action=AddWorkspacesSku, nargs='+', help='The SKU of the workspace.') + c.argument('retention_in_days', type=int, help='The workspace data retention in days. Allowed values are per ' + 'pricing plan. See pricing tiers documentation for details.') + c.argument('public_network_access_for_ingestion', arg_type=get_enum_type(['Enabled', 'Disabled']), help='The ' + 'network access type for accessing Log Analytics ingestion.') + c.argument('public_network_access_for_query', arg_type=get_enum_type(['Enabled', 'Disabled']), help='The ' + 'network access type for accessing Log Analytics query.') + c.argument('force_cmk_for_query', arg_type=get_three_state_flag(), help='Indicates whether customer managed ' + 'storage is mandatory for query management.') + c.argument('features', action=AddFeatures, nargs='+', help='Workspace features.') + c.argument('daily_quota_gb', type=float, help='The workspace daily quota for ingestion.', arg_group='Workspace ' + 'Capping') + + with self.argument_context('loganalytics workspace delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='The name of ' + 'the workspace.') + c.argument('force', arg_type=get_three_state_flag(), help='Deletes the workspace without the recovery option. ' + 'A workspace that was deleted with this flag cannot be recovered.') + + with self.argument_context('loganalytics workspace wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='The name of ' + 'the workspace.') + + with self.argument_context('loganalytics deleted-workspace list') as c: + c.argument('resource_group_name', resource_group_name_type) diff --git a/src/loganalytics/azext_loganalytics/generated/_validators.py b/src/loganalytics/azext_loganalytics/generated/_validators.py new file mode 100644 index 00000000000..b33a44c1ebf --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/_validators.py @@ -0,0 +1,9 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- diff --git a/src/loganalytics/azext_loganalytics/generated/action.py b/src/loganalytics/azext_loganalytics/generated/action.py new file mode 100644 index 00000000000..fca2e43fd1e --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/action.py @@ -0,0 +1,160 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access + +import argparse +from collections import defaultdict +from knack.util import CLIError + + +class AddStorageAccount(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.storage_account = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'id': + d['id'] = v[0] + elif kl == 'key': + d['key'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter storage_account. All possible keys are: ' + 'id, key'.format(k)) + return d + + +class AddClustersSku(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.sku = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'capacity': + d['capacity'] = v[0] + elif kl == 'name': + d['name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: capacity, ' + 'name'.format(k)) + return d + + +class AddKeyVaultProperties(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.key_vault_properties = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'key-vault-uri': + d['key_vault_uri'] = v[0] + elif kl == 'key-name': + d['key_name'] = v[0] + elif kl == 'key-version': + d['key_version'] = v[0] + elif kl == 'key-rsa-size': + d['key_rsa_size'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter key_vault_properties. All possible keys ' + 'are: key-vault-uri, key-name, key-version, key-rsa-size'.format(k)) + return d + + +class AddWorkspacesSku(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.sku = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'name': + d['name'] = v[0] + elif kl == 'capacity-reservation-level': + d['capacity_reservation_level'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, ' + 'capacity-reservation-level'.format(k)) + return d + + +class AddFeatures(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.features = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'enable-data-export': + d['enable_data_export'] = v[0] + elif kl == 'immediate-purge-data-on30-days': + d['immediate_purge_data_on30_days'] = v[0] + elif kl == 'enable-log-access-using-only-resource-permissions': + d['enable_log_access_using_only_resource_permissions'] = v[0] + elif kl == 'cluster-resource-id': + d['cluster_resource_id'] = v[0] + elif kl == 'disable-local-auth': + d['disable_local_auth'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter features. All possible keys are: ' + 'enable-data-export, immediate-purge-data-on30-days, enable-log-access-using-only-resour' + 'ce-permissions, cluster-resource-id, disable-local-auth'.format(k)) + return d diff --git a/src/loganalytics/azext_loganalytics/generated/commands.py b/src/loganalytics/azext_loganalytics/generated/commands.py new file mode 100644 index 00000000000..80ce3a3fe2d --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/commands.py @@ -0,0 +1,223 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-statements +# pylint: disable=too-many-locals + +from azure.cli.core.commands import CliCommandType + + +def load_command_table(self, _): + + from azext_loganalytics.generated._client_factory import cf_data_export + loganalytics_data_export = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._data_exports_operations#DataExportsO' + 'perations.{}', + client_factory=cf_data_export) + with self.command_group('loganalytics data-export', loganalytics_data_export, client_factory=cf_data_export) as g: + g.custom_command('list', 'loganalytics_data_export_list') + g.custom_show_command('show', 'loganalytics_data_export_show') + g.custom_command('create', 'loganalytics_data_export_create') + g.generic_update_command('update', custom_func_name='loganalytics_data_export_update') + g.custom_command('delete', 'loganalytics_data_export_delete', confirmation=True) + + from azext_loganalytics.generated._client_factory import cf_data_source + loganalytics_data_source = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._data_sources_operations#DataSourcesO' + 'perations.{}', + client_factory=cf_data_source) + with self.command_group('loganalytics data-source', loganalytics_data_source, client_factory=cf_data_source) as g: + g.custom_command('list', 'loganalytics_data_source_list') + g.custom_show_command('show', 'loganalytics_data_source_show') + g.custom_command('create', 'loganalytics_data_source_create') + g.generic_update_command('update', custom_func_name='loganalytics_data_source_update') + g.custom_command('delete', 'loganalytics_data_source_delete', confirmation=True) + + from azext_loganalytics.generated._client_factory import cf_intelligence_pack + loganalytics_intelligence_pack = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._intelligence_packs_operations#Intell' + 'igencePacksOperations.{}', + client_factory=cf_intelligence_pack) + with self.command_group('loganalytics intelligence-pack', loganalytics_intelligence_pack, + client_factory=cf_intelligence_pack) as g: + g.custom_command('list', 'loganalytics_intelligence_pack_list') + g.custom_command('disable', 'loganalytics_intelligence_pack_disable') + g.custom_command('enable', 'loganalytics_intelligence_pack_enable') + + from azext_loganalytics.generated._client_factory import cf_linked_service + loganalytics_linked_service = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._linked_services_operations#LinkedSer' + 'vicesOperations.{}', + client_factory=cf_linked_service) + with self.command_group('loganalytics linked-service', loganalytics_linked_service, + client_factory=cf_linked_service) as g: + g.custom_command('list', 'loganalytics_linked_service_list') + g.custom_show_command('show', 'loganalytics_linked_service_show') + g.custom_command('create', 'loganalytics_linked_service_create', supports_no_wait=True) + g.generic_update_command('update', setter_name='begin_create_or_update', + custom_func_name='loganalytics_linked_service_update', supports_no_wait=True) + g.custom_command('delete', 'loganalytics_linked_service_delete', supports_no_wait=True, confirmation=True) + g.custom_wait_command('wait', 'loganalytics_linked_service_show') + + from azext_loganalytics.generated._client_factory import cf_linked_storage_account + loganalytics_linked_storage_account = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._linked_storage_accounts_operations#L' + 'inkedStorageAccountsOperations.{}', + client_factory=cf_linked_storage_account) + with self.command_group('loganalytics linked-storage-account', loganalytics_linked_storage_account, + client_factory=cf_linked_storage_account) as g: + g.custom_command('list', 'loganalytics_linked_storage_account_list') + g.custom_show_command('show', 'loganalytics_linked_storage_account_show') + g.custom_command('create', 'loganalytics_linked_storage_account_create') + g.generic_update_command('update', custom_func_name='loganalytics_linked_storage_account_update') + g.custom_command('delete', 'loganalytics_linked_storage_account_delete', confirmation=True) + + from azext_loganalytics.generated._client_factory import cf_management_group + loganalytics_management_group = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._management_groups_operations#Managem' + 'entGroupsOperations.{}', + client_factory=cf_management_group) + with self.command_group('loganalytics management-group', loganalytics_management_group, + client_factory=cf_management_group) as g: + g.custom_command('list', 'loganalytics_management_group_list') + + from azext_loganalytics.generated._client_factory import cf_operation_statuses + loganalytics_operation_statuses = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._operation_statuses_operations#Operat' + 'ionStatusesOperations.{}', + client_factory=cf_operation_statuses) + with self.command_group('loganalytics operation-statuses', loganalytics_operation_statuses, + client_factory=cf_operation_statuses) as g: + g.custom_show_command('show', 'loganalytics_operation_statuses_show') + + from azext_loganalytics.generated._client_factory import cf_shared_key + loganalytics_shared_key = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._shared_keys_operations#SharedKeysOpe' + 'rations.{}', + client_factory=cf_shared_key) + with self.command_group('loganalytics shared-key', loganalytics_shared_key, client_factory=cf_shared_key) as g: + g.custom_command('get-shared-key', 'loganalytics_shared_key_get_shared_key') + g.custom_command('regenerate', 'loganalytics_shared_key_regenerate') + + from azext_loganalytics.generated._client_factory import cf_usage + loganalytics_usage = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._usages_operations#UsagesOperations.{' + '}', + client_factory=cf_usage) + with self.command_group('loganalytics usage', loganalytics_usage, client_factory=cf_usage) as g: + g.custom_command('list', 'loganalytics_usage_list') + + from azext_loganalytics.generated._client_factory import cf_storage_insight_config + loganalytics_storage_insight_config = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._storage_insight_configs_operations#S' + 'torageInsightConfigsOperations.{}', + client_factory=cf_storage_insight_config) + with self.command_group('loganalytics storage-insight-config', loganalytics_storage_insight_config, + client_factory=cf_storage_insight_config) as g: + g.custom_command('list', 'loganalytics_storage_insight_config_list') + g.custom_show_command('show', 'loganalytics_storage_insight_config_show') + g.custom_command('create', 'loganalytics_storage_insight_config_create') + g.generic_update_command('update', custom_func_name='loganalytics_storage_insight_config_update') + g.custom_command('delete', 'loganalytics_storage_insight_config_delete', confirmation=True) + + from azext_loganalytics.generated._client_factory import cf_saved_search + loganalytics_saved_search = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._saved_searches_operations#SavedSearc' + 'hesOperations.{}', + client_factory=cf_saved_search) + with self.command_group('loganalytics saved-search', loganalytics_saved_search, + client_factory=cf_saved_search) as g: + g.custom_command('list', 'loganalytics_saved_search_list') + g.custom_show_command('show', 'loganalytics_saved_search_show') + g.custom_command('create', 'loganalytics_saved_search_create') + g.generic_update_command('update', custom_func_name='loganalytics_saved_search_update') + g.custom_command('delete', 'loganalytics_saved_search_delete', confirmation=True) + + from azext_loganalytics.generated._client_factory import cf_available_service_tier + loganalytics_available_service_tier = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._available_service_tiers_operations#A' + 'vailableServiceTiersOperations.{}', + client_factory=cf_available_service_tier) + with self.command_group('loganalytics available-service-tier', loganalytics_available_service_tier, + client_factory=cf_available_service_tier) as g: + g.custom_command('list', 'loganalytics_available_service_tier_list') + + from azext_loganalytics.generated._client_factory import cf_gateway + loganalytics_gateway = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._gateways_operations#GatewaysOperatio' + 'ns.{}', + client_factory=cf_gateway) + with self.command_group('loganalytics gateway', loganalytics_gateway, client_factory=cf_gateway) as g: + g.custom_command('delete', 'loganalytics_gateway_delete', confirmation=True) + + from azext_loganalytics.generated._client_factory import cf_schema + loganalytics_schema = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._schema_operations#SchemaOperations.{' + '}', + client_factory=cf_schema) + with self.command_group('loganalytics schema', loganalytics_schema, client_factory=cf_schema) as g: + g.custom_command('get', 'loganalytics_schema_get') + + from azext_loganalytics.generated._client_factory import cf_workspace_purge + loganalytics_workspace_purge = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._workspace_purge_operations#Workspace' + 'PurgeOperations.{}', + client_factory=cf_workspace_purge) + with self.command_group('loganalytics workspace-purge', loganalytics_workspace_purge, + client_factory=cf_workspace_purge) as g: + g.custom_command('purge', 'loganalytics_workspace_purge_purge') + g.custom_command('show-purge-status', 'loganalytics_workspace_purge_show_purge_status') + + from azext_loganalytics.generated._client_factory import cf_table + loganalytics_table = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._tables_operations#TablesOperations.{' + '}', + client_factory=cf_table) + with self.command_group('loganalytics table', loganalytics_table, client_factory=cf_table) as g: + g.custom_command('list', 'loganalytics_table_list') + g.custom_show_command('show', 'loganalytics_table_show') + g.custom_command('update', 'loganalytics_table_update') + + from azext_loganalytics.generated._client_factory import cf_cluster + loganalytics_cluster = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._clusters_operations#ClustersOperatio' + 'ns.{}', + client_factory=cf_cluster) + with self.command_group('loganalytics cluster', loganalytics_cluster, client_factory=cf_cluster) as g: + g.custom_command('list', 'loganalytics_cluster_list') + g.custom_show_command('show', 'loganalytics_cluster_show') + g.custom_command('create', 'loganalytics_cluster_create', supports_no_wait=True) + g.custom_command('update', 'loganalytics_cluster_update') + g.custom_command('delete', 'loganalytics_cluster_delete', supports_no_wait=True, confirmation=True) + g.custom_wait_command('wait', 'loganalytics_cluster_show') + + from azext_loganalytics.generated._client_factory import cf_workspace + loganalytics_workspace = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._workspaces_operations#WorkspacesOper' + 'ations.{}', + client_factory=cf_workspace) + with self.command_group('loganalytics workspace', loganalytics_workspace, client_factory=cf_workspace) as g: + g.custom_command('list', 'loganalytics_workspace_list') + g.custom_show_command('show', 'loganalytics_workspace_show') + g.custom_command('create', 'loganalytics_workspace_create', supports_no_wait=True) + g.custom_command('update', 'loganalytics_workspace_update') + g.custom_command('delete', 'loganalytics_workspace_delete', supports_no_wait=True, confirmation=True) + g.custom_wait_command('wait', 'loganalytics_workspace_show') + + from azext_loganalytics.generated._client_factory import cf_deleted_workspace + loganalytics_deleted_workspace = CliCommandType( + operations_tmpl='azext_loganalytics.vendored_sdks.loganalytics.operations._deleted_workspaces_operations#Delete' + 'dWorkspacesOperations.{}', + client_factory=cf_deleted_workspace) + with self.command_group('loganalytics deleted-workspace', loganalytics_deleted_workspace, + client_factory=cf_deleted_workspace) as g: + g.custom_command('list', 'loganalytics_deleted_workspace_list') + + with self.command_group('loganalytics', is_experimental=True): + pass diff --git a/src/loganalytics/azext_loganalytics/generated/custom.py b/src/loganalytics/azext_loganalytics/generated/custom.py new file mode 100644 index 00000000000..af6c6290363 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/generated/custom.py @@ -0,0 +1,743 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long +# pylint: disable=too-many-lines +# pylint: disable=unused-argument + +from azure.cli.core.util import sdk_no_wait + + +def loganalytics_data_export_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_data_export_show(client, + resource_group_name, + workspace_name, + data_export_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_export_name=data_export_name) + + +def loganalytics_data_export_create(client, + resource_group_name, + workspace_name, + data_export_name, + data_export_id=None, + table_names=None, + enable=None, + created_date=None, + last_modified_date=None, + resource_id=None, + event_hub_name=None): + parameters = {} + parameters['data_export_id'] = data_export_id + parameters['table_names'] = table_names + parameters['enable'] = enable + parameters['created_date'] = created_date + parameters['last_modified_date'] = last_modified_date + parameters['resource_id'] = resource_id + parameters['event_hub_name'] = event_hub_name + return client.create_or_update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_export_name=data_export_name, + parameters=parameters) + + +def loganalytics_data_export_update(instance, + resource_group_name, + workspace_name, + data_export_name, + data_export_id=None, + table_names=None, + enable=None, + created_date=None, + last_modified_date=None, + resource_id=None, + event_hub_name=None): + if data_export_id is not None: + instance.data_export_id = data_export_id + if table_names is not None: + instance.table_names = table_names + if enable is not None: + instance.enable = enable + if created_date is not None: + instance.created_date = created_date + if last_modified_date is not None: + instance.last_modified_date = last_modified_date + if resource_id is not None: + instance.resource_id = resource_id + if event_hub_name is not None: + instance.event_hub_name = event_hub_name + return instance + + +def loganalytics_data_export_delete(client, + resource_group_name, + workspace_name, + data_export_name): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_export_name=data_export_name) + + +def loganalytics_data_source_list(client, + resource_group_name, + workspace_name, + filter_, + skiptoken=None): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name, + filter=filter_, + skiptoken=skiptoken) + + +def loganalytics_data_source_show(client, + resource_group_name, + workspace_name, + data_source_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_source_name=data_source_name) + + +def loganalytics_data_source_create(client, + resource_group_name, + workspace_name, + data_source_name, + properties, + kind, + etag=None, + tags=None): + parameters = {} + parameters['properties'] = properties + parameters['etag'] = etag + parameters['kind'] = kind + parameters['tags'] = tags + return client.create_or_update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_source_name=data_source_name, + parameters=parameters) + + +def loganalytics_data_source_update(instance, + resource_group_name, + workspace_name, + data_source_name, + properties, + kind, + etag=None, + tags=None): + if properties is not None: + instance.properties = properties + if etag is not None: + instance.etag = etag + if kind is not None: + instance.kind = kind + if tags is not None: + instance.tags = tags + return instance + + +def loganalytics_data_source_delete(client, + resource_group_name, + workspace_name, + data_source_name): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_source_name=data_source_name) + + +def loganalytics_intelligence_pack_list(client, + resource_group_name, + workspace_name): + return client.list(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_intelligence_pack_disable(client, + resource_group_name, + workspace_name, + intelligence_pack_name): + return client.disable(resource_group_name=resource_group_name, + workspace_name=workspace_name, + intelligence_pack_name=intelligence_pack_name) + + +def loganalytics_intelligence_pack_enable(client, + resource_group_name, + workspace_name, + intelligence_pack_name): + return client.enable(resource_group_name=resource_group_name, + workspace_name=workspace_name, + intelligence_pack_name=intelligence_pack_name) + + +def loganalytics_linked_service_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_linked_service_show(client, + resource_group_name, + workspace_name, + linked_service_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name) + + +def loganalytics_linked_service_create(client, + resource_group_name, + workspace_name, + linked_service_name, + tags=None, + resource_id=None, + write_access_resource_id=None, + provisioning_state=None, + no_wait=False): + parameters = {} + parameters['tags'] = tags + parameters['resource_id'] = resource_id + parameters['write_access_resource_id'] = write_access_resource_id + parameters['provisioning_state'] = provisioning_state + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name, + parameters=parameters) + + +def loganalytics_linked_service_update(instance, + resource_group_name, + workspace_name, + linked_service_name, + tags=None, + resource_id=None, + write_access_resource_id=None, + provisioning_state=None, + no_wait=False): + if tags is not None: + instance.tags = tags + if resource_id is not None: + instance.resource_id = resource_id + if write_access_resource_id is not None: + instance.write_access_resource_id = write_access_resource_id + if provisioning_state is not None: + instance.provisioning_state = provisioning_state + return instance + + +def loganalytics_linked_service_delete(client, + resource_group_name, + workspace_name, + linked_service_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name) + + +def loganalytics_linked_storage_account_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_linked_storage_account_show(client, + resource_group_name, + workspace_name, + data_source_type): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_source_type=data_source_type) + + +def loganalytics_linked_storage_account_create(client, + resource_group_name, + workspace_name, + data_source_type, + storage_account_ids=None): + parameters = {} + parameters['storage_account_ids'] = storage_account_ids + return client.create_or_update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_source_type=data_source_type, + parameters=parameters) + + +def loganalytics_linked_storage_account_update(instance, + resource_group_name, + workspace_name, + data_source_type, + storage_account_ids=None): + if storage_account_ids is not None: + instance.storage_account_ids = storage_account_ids + return instance + + +def loganalytics_linked_storage_account_delete(client, + resource_group_name, + workspace_name, + data_source_type): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + data_source_type=data_source_type) + + +def loganalytics_management_group_list(client, + resource_group_name, + workspace_name): + return client.list(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_operation_statuses_show(client, + location, + async_operation_id): + return client.get(location=location, + async_operation_id=async_operation_id) + + +def loganalytics_shared_key_get_shared_key(client, + resource_group_name, + workspace_name): + return client.get_shared_keys(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_shared_key_regenerate(client, + resource_group_name, + workspace_name): + return client.regenerate(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_usage_list(client, + resource_group_name, + workspace_name): + return client.list(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_storage_insight_config_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_storage_insight_config_show(client, + resource_group_name, + workspace_name, + storage_insight_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + storage_insight_name=storage_insight_name) + + +def loganalytics_storage_insight_config_create(client, + resource_group_name, + workspace_name, + storage_insight_name, + e_tag=None, + tags=None, + containers=None, + tables=None, + storage_account=None): + parameters = {} + parameters['e_tag'] = e_tag + parameters['tags'] = tags + parameters['containers'] = containers + parameters['tables'] = tables + parameters['storage_account'] = storage_account + return client.create_or_update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + storage_insight_name=storage_insight_name, + parameters=parameters) + + +def loganalytics_storage_insight_config_update(instance, + resource_group_name, + workspace_name, + storage_insight_name, + e_tag=None, + tags=None, + containers=None, + tables=None, + storage_account=None): + if e_tag is not None: + instance.e_tag = e_tag + if tags is not None: + instance.tags = tags + if containers is not None: + instance.containers = containers + if tables is not None: + instance.tables = tables + if storage_account is not None: + instance.storage_account = storage_account + return instance + + +def loganalytics_storage_insight_config_delete(client, + resource_group_name, + workspace_name, + storage_insight_name): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + storage_insight_name=storage_insight_name) + + +def loganalytics_saved_search_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_saved_search_show(client, + resource_group_name, + workspace_name, + saved_search_id): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + saved_search_id=saved_search_id) + + +def loganalytics_saved_search_create(client, + resource_group_name, + workspace_name, + saved_search_id, + category, + display_name, + query, + etag=None, + function_alias=None, + function_parameters=None, + version=None, + tags=None): + parameters = {} + parameters['etag'] = etag + parameters['category'] = category + parameters['display_name'] = display_name + parameters['query'] = query + parameters['function_alias'] = function_alias + parameters['function_parameters'] = function_parameters + parameters['version'] = version + parameters['tags'] = tags + return client.create_or_update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + saved_search_id=saved_search_id, + parameters=parameters) + + +def loganalytics_saved_search_update(instance, + resource_group_name, + workspace_name, + saved_search_id, + category, + display_name, + query, + etag=None, + function_alias=None, + function_parameters=None, + version=None, + tags=None): + if etag is not None: + instance.etag = etag + if category is not None: + instance.category = category + if display_name is not None: + instance.display_name = display_name + if query is not None: + instance.query = query + if function_alias is not None: + instance.function_alias = function_alias + if function_parameters is not None: + instance.function_parameters = function_parameters + if version is not None: + instance.version = version + if tags is not None: + instance.tags = tags + return instance + + +def loganalytics_saved_search_delete(client, + resource_group_name, + workspace_name, + saved_search_id): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + saved_search_id=saved_search_id) + + +def loganalytics_available_service_tier_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_gateway_delete(client, + resource_group_name, + workspace_name, + gateway_id): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + gateway_id=gateway_id) + + +def loganalytics_schema_get(client, + resource_group_name, + workspace_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_workspace_purge_purge(client, + resource_group_name, + workspace_name, + table, + filters): + body = {} + body['table'] = table + body['filters'] = filters + return client.purge(resource_group_name=resource_group_name, + workspace_name=workspace_name, + body=body) + + +def loganalytics_workspace_purge_show_purge_status(client, + resource_group_name, + workspace_name, + purge_id): + return client.get_purge_status(resource_group_name=resource_group_name, + workspace_name=workspace_name, + purge_id=purge_id) + + +def loganalytics_table_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_table_show(client, + resource_group_name, + workspace_name, + table_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + table_name=table_name) + + +def loganalytics_table_update(client, + resource_group_name, + workspace_name, + table_name, + retention_in_days=None): + parameters = {} + parameters['retention_in_days'] = retention_in_days + return client.update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + table_name=table_name, + parameters=parameters) + + +def loganalytics_cluster_list(client, + resource_group_name=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name) + return client.list() + + +def loganalytics_cluster_show(client, + resource_group_name, + cluster_name): + return client.get(resource_group_name=resource_group_name, + cluster_name=cluster_name) + + +def loganalytics_cluster_create(client, + resource_group_name, + cluster_name, + location, + tags=None, + sku=None, + is_double_encryption_enabled=None, + is_availability_zones_enabled=None, + billing_type=None, + key_vault_properties=None, + type_=None, + user_assigned_identities=None, + no_wait=False): + parameters = {} + parameters['tags'] = tags + parameters['location'] = location + parameters['sku'] = sku + parameters['is_double_encryption_enabled'] = is_double_encryption_enabled + parameters['is_availability_zones_enabled'] = is_availability_zones_enabled + parameters['billing_type'] = billing_type + parameters['key_vault_properties'] = key_vault_properties + parameters['identity'] = {} + parameters['identity']['type'] = type_ + parameters['identity']['user_assigned_identities'] = user_assigned_identities + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters) + + +def loganalytics_cluster_update(client, + resource_group_name, + cluster_name, + sku=None, + tags=None, + key_vault_properties=None, + billing_type=None, + type_=None, + user_assigned_identities=None): + parameters = {} + parameters['sku'] = sku + parameters['tags'] = tags + parameters['key_vault_properties'] = key_vault_properties + parameters['billing_type'] = billing_type + parameters['identity'] = {} + parameters['identity']['type'] = type_ + parameters['identity']['user_assigned_identities'] = user_assigned_identities + return client.update(resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters) + + +def loganalytics_cluster_delete(client, + resource_group_name, + cluster_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + cluster_name=cluster_name) + + +def loganalytics_workspace_list(client, + resource_group_name=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name) + return client.list() + + +def loganalytics_workspace_show(client, + resource_group_name, + workspace_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def loganalytics_workspace_create(client, + resource_group_name, + workspace_name, + location, + tags=None, + e_tag=None, + provisioning_state=None, + sku=None, + retention_in_days=None, + public_network_access_for_ingestion=None, + public_network_access_for_query=None, + force_cmk_for_query=None, + features=None, + daily_quota_gb=None, + no_wait=False): + if public_network_access_for_ingestion is None: + public_network_access_for_ingestion = "Enabled" + if public_network_access_for_query is None: + public_network_access_for_query = "Enabled" + parameters = {} + parameters['tags'] = tags + parameters['location'] = location + parameters['e_tag'] = e_tag + parameters['provisioning_state'] = provisioning_state + parameters['sku'] = sku + parameters['retention_in_days'] = retention_in_days + parameters['public_network_access_for_ingestion'] = "Enabled" if public_network_access_for_ingestion is None else public_network_access_for_ingestion + parameters['public_network_access_for_query'] = "Enabled" if public_network_access_for_query is None else public_network_access_for_query + parameters['force_cmk_for_query'] = force_cmk_for_query + parameters['features'] = features + parameters['workspace_capping'] = {} + parameters['workspace_capping']['daily_quota_gb'] = daily_quota_gb + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters) + + +def loganalytics_workspace_update(client, + resource_group_name, + workspace_name, + tags=None, + provisioning_state=None, + sku=None, + retention_in_days=None, + public_network_access_for_ingestion=None, + public_network_access_for_query=None, + force_cmk_for_query=None, + features=None, + daily_quota_gb=None): + if public_network_access_for_ingestion is None: + public_network_access_for_ingestion = "Enabled" + if public_network_access_for_query is None: + public_network_access_for_query = "Enabled" + parameters = {} + parameters['tags'] = tags + parameters['provisioning_state'] = provisioning_state + parameters['sku'] = sku + parameters['retention_in_days'] = retention_in_days + parameters['public_network_access_for_ingestion'] = "Enabled" if public_network_access_for_ingestion is None else public_network_access_for_ingestion + parameters['public_network_access_for_query'] = "Enabled" if public_network_access_for_query is None else public_network_access_for_query + parameters['force_cmk_for_query'] = force_cmk_for_query + parameters['features'] = features + parameters['workspace_capping'] = {} + parameters['workspace_capping']['daily_quota_gb'] = daily_quota_gb + return client.update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters) + + +def loganalytics_workspace_delete(client, + resource_group_name, + workspace_name, + force=None, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + force=force) + + +def loganalytics_deleted_workspace_list(client, + resource_group_name=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name) + return client.list() diff --git a/src/loganalytics/azext_loganalytics/manual/__init__.py b/src/loganalytics/azext_loganalytics/manual/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/manual/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/loganalytics/azext_loganalytics/tests/__init__.py b/src/loganalytics/azext_loganalytics/tests/__init__.py new file mode 100644 index 00000000000..70488e93851 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/tests/__init__.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +import inspect +import logging +import os +import sys +import traceback +import datetime as dt + +from azure.core.exceptions import AzureError +from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError + + +logger = logging.getLogger('azure.cli.testsdk') +logger.addHandler(logging.StreamHandler()) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) +exceptions = [] +test_map = dict() +SUCCESSED = "successed" +FAILED = "failed" + + +def try_manual(func): + def import_manual_function(origin_func): + from importlib import import_module + decorated_path = inspect.getfile(origin_func).lower() + module_path = __path__[0].lower() + if not decorated_path.startswith(module_path): + raise Exception("Decorator can only be used in submodules!") + manual_path = os.path.join( + decorated_path[module_path.rfind(os.path.sep) + 1:]) + manual_file_path, manual_file_name = os.path.split(manual_path) + module_name, _ = os.path.splitext(manual_file_name) + manual_module = "..manual." + \ + ".".join(manual_file_path.split(os.path.sep) + [module_name, ]) + return getattr(import_module(manual_module, package=__name__), origin_func.__name__) + + def get_func_to_call(): + func_to_call = func + try: + func_to_call = import_manual_function(func) + logger.info("Found manual override for %s(...)", func.__name__) + except (ImportError, AttributeError): + pass + return func_to_call + + def wrapper(*args, **kwargs): + func_to_call = get_func_to_call() + logger.info("running %s()...", func.__name__) + try: + test_map[func.__name__] = dict() + test_map[func.__name__]["result"] = SUCCESSED + test_map[func.__name__]["error_message"] = "" + test_map[func.__name__]["error_stack"] = "" + test_map[func.__name__]["error_normalized"] = "" + test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() + ret = func_to_call(*args, **kwargs) + except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, + JMESPathCheckAssertionError) as e: + use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") + if use_exception_cache is None or use_exception_cache.lower() != "true": + raise + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + test_map[func.__name__]["result"] = FAILED + test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( + "\r\n", " ").replace("\n", " ")[:500] + logger.info("--------------------------------------") + logger.info("step exception: %s", e) + logger.error("--------------------------------------") + logger.error("step exception in %s: %s", func.__name__, e) + logger.info(traceback.format_exc()) + exceptions.append((func.__name__, sys.exc_info())) + else: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + return ret + + if inspect.isclass(func): + return get_func_to_call() + return wrapper + + +def calc_coverage(filename): + filename = filename.split(".")[0] + coverage_name = filename + "_coverage.md" + with open(coverage_name, "w") as f: + f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + total = len(test_map) + covered = 0 + for k, v in test_map.items(): + if not k.startswith("step_"): + total -= 1 + continue + if v["result"] == SUCCESSED: + covered += 1 + f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v)) + f.write("Coverage: {}/{}\n".format(covered, total)) + print("Create coverage\n", file=sys.stderr) + + +def raise_if(): + if exceptions: + if len(exceptions) <= 1: + raise exceptions[0][1][1] + message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1])) + message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]]) + raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2]) diff --git a/src/loganalytics/azext_loganalytics/tests/latest/__init__.py b/src/loganalytics/azext_loganalytics/tests/latest/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/tests/latest/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/loganalytics/azext_loganalytics/tests/latest/example_steps.py b/src/loganalytics/azext_loganalytics/tests/latest/example_steps.py new file mode 100644 index 00000000000..68a66cb5721 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/tests/latest/example_steps.py @@ -0,0 +1,658 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +from .. import try_manual + + +# EXAMPLE: /Workspaces/put/WorkspacesCreate +@try_manual +def step_workspace_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace create ' + '--location "australiasoutheast" ' + '--retention-in-days 30 ' + '--sku name="PerGB2018" ' + '--tags tag1="val1" ' + '--resource-group "{rg_8}" ' + '--name "{rg_8}"', + checks=[]) + test.cmd('az loganalytics workspace wait --created ' + '--resource-group "{rg_8}" ' + '--name "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Workspaces/get/WorkspaceGet +@try_manual +def step_workspace_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace show ' + '--resource-group "{rg_8}" ' + '--name "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Workspaces/get/WorkspacesGet +@try_manual +def step_workspace_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace list ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Workspaces/get/WorkspacesSubscriptionList +@try_manual +def step_workspace_list2(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Workspaces/patch/WorkspacesPatch +@try_manual +def step_workspace_update(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace update ' + '--retention-in-days 30 ' + '--sku name="PerGB2018" ' + '--daily-quota-gb -1 ' + '--resource-group "{rg_8}" ' + '--name "{rg_8}"', + checks=checks) + + +# EXAMPLE: /AvailableServiceTiers/get/AvailableServiceTiers +@try_manual +def step_available_service_tier_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics available-service-tier list ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace5}"', + checks=checks) + + +# EXAMPLE: /Clusters/put/ClustersCreate +@try_manual +def step_cluster_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics cluster create ' + '--name "{rg_8}" ' + '--location "australiasoutheast" ' + '--sku name="CapacityReservation" capacity=1000 ' + '--tags tag1="val1" ' + '--resource-group "{rg_8}"', + checks=[]) + test.cmd('az loganalytics cluster wait --created ' + '--name "{rg_8}" ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Clusters/get/ClustersGet +@try_manual +def step_cluster_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics cluster list ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Clusters/get/ClustersSubscriptionList +@try_manual +def step_cluster_list2(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics cluster list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Clusters/patch/ClustersPatch +@try_manual +def step_cluster_update(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics cluster update ' + '--name "{rg_8}" ' + '--type "UserAssigned" ' + '--user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-00000000000/resourcegroups/oiauto' + 'rest6685/providers/Microsoft.ManagedIdentity/userAssignedIdentities/myidentity\\":{{}}}}" ' + '--key-vault-properties key-name="aztest2170cert" key-rsa-size=1024 key-vault-uri="https://aztest2170.vaul' + 't.azure.net" key-version="654ft6c4e63845cbb50fd6fg51540429" ' + '--sku name="CapacityReservation" capacity=1000 ' + '--tags tag1="val1" ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/put/LinkedServicesCreate +@try_manual +def step_linked_service_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-service create ' + '--name "{myLinkedService}" ' + '--write-access-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.Op' + 'erationalInsights/clusters/{myCluster}" ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace3}"', + checks=[]) + test.cmd('az loganalytics linked-service wait --created ' + '--name "{myLinkedService}" ' + '--resource-group "{rg_5}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/get/LinkedServicesGet +@try_manual +def step_linked_service_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-service show ' + '--name "{myLinkedService}" ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/get/LinkedServicesListByWorkspace +@try_manual +def step_linked_service_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-service list ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/delete/LinkedServicesDelete +@try_manual +def step_linked_service_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-service delete -y ' + '--name "{myLinkedService}" ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /Clusters/delete/ClustersDelete +@try_manual +def step_cluster_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics cluster delete -y ' + '--name "{rg_8}" ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /DataExports/put/DataExportCreate +@try_manual +def step_data_export_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-export create ' + '--name "{myDataExport}" ' + '--resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.EventHub/namesp' + 'aces/test" ' + '--table-names "Heartbeat" ' + '--resource-group "{rg}" ' + '--workspace-name "{myWorkspace}"', + checks=checks) + + +# EXAMPLE: /DataExports/get/DataExportGet +@try_manual +def step_data_export_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-export list ' + '--resource-group "{rg}" ' + '--workspace-name "{myWorkspace}"', + checks=checks) + + +# EXAMPLE: /DataExports/delete/DataExportDelete +@try_manual +def step_data_export_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-export delete -y ' + '--name "{myDataExport}" ' + '--resource-group "{rg}" ' + '--workspace-name "{myWorkspace}"', + checks=checks) + + +# EXAMPLE: /DataSources/put/DataSourcesCreate +@try_manual +def step_data_source_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-source create ' + '--name "{myDataSource}" ' + '--kind "AzureActivityLog" ' + '--properties "{{\\"LinkedResourceId\\":\\"/subscriptions/{subscription_id}/providers/microsoft.insights/e' + 'venttypes/management\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=checks) + + +# EXAMPLE: /DataSources/get/DataSourcesGet +@try_manual +def step_data_source_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-source show ' + '--name "{myDataSource}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=checks) + + +# EXAMPLE: /DataSources/get/DataSourcesListByWorkspace +@try_manual +def step_data_source_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-source list ' + '--filter "kind=\'WindowsEvent\'" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=checks) + + +# EXAMPLE: /DataSources/delete/DataSourcesDelete +@try_manual +def step_data_source_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics data-source delete -y ' + '--name "{myDataSource}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=checks) + + +# EXAMPLE: /DeletedWorkspaces/get/WorkspacesGet +@try_manual +def step_deleted_workspace_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics deleted-workspace list -y ' + '--resource-group "{rg_8}"', + checks=checks) + + +# EXAMPLE: /DeletedWorkspaces/get/WorkspacesSubscriptionList +@try_manual +def step_deleted_workspace_list2(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics deleted-workspace list -y ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Gateways/delete/DeleteGateways +@try_manual +def step_gateway_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics gateway delete -y ' + '--gateway-id "00000000-0000-0000-0000-00000000000" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /IntelligencePacks/get/IntelligencePacksList +@try_manual +def step_intelligence_pack_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics intelligence-pack list ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /IntelligencePacks/post/IntelligencePacksDisable +@try_manual +def step_intelligence_pack_disable(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics intelligence-pack disable ' + '--name "{myIntelligencePack}" ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /IntelligencePacks/post/IntelligencePacksEnable +@try_manual +def step_intelligence_pack_enable(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics intelligence-pack enable ' + '--name "{myIntelligencePack}" ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /LinkedStorageAccounts/put/LinkedStorageAccountsCreate +@try_manual +def step_linked_storage_account_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-storage-account create ' + '--data-source-type "CustomLogs" ' + '--storage-account-ids "/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.Storage' + '/storageAccounts/{sa}" "/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.Storag' + 'e/storageAccounts/{sa_2}" ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace4}"', + checks=checks) + + +# EXAMPLE: /LinkedStorageAccounts/get/Gets list of linked storage accounts on a workspace. +@try_manual +def step_linked_storage_account_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-storage-account list ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace4}"', + checks=checks) + + +# EXAMPLE: /LinkedStorageAccounts/get/LinkedStorageAccountsGet +@try_manual +def step_linked_storage_account_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-storage-account show ' + '--data-source-type "CustomLogs" ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace4}"', + checks=checks) + + +# EXAMPLE: /LinkedStorageAccounts/delete/LinkedStorageAccountsDelete +@try_manual +def step_linked_storage_account_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics linked-storage-account delete -y ' + '--data-source-type "CustomLogs" ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace4}"', + checks=checks) + + +# EXAMPLE: /ManagementGroups/get/WorkspacesListManagementGroups +@try_manual +def step_management_group_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics management-group list ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /OperationStatuses/get/Get specific operation status +@try_manual +def step_operation_statuses_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics operation-statuses show ' + '--async-operation-id "713192d7-503f-477a-9cfe-4efc3ee2bd11" ' + '--location "West US"', + checks=checks) + + +# EXAMPLE: /SavedSearches/put/SavedSearchCreateOrUpdate +@try_manual +def step_saved_search_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics saved-search create ' + '--category "Saved Search Test Category" ' + '--display-name "Create or Update Saved Search Test" ' + '--function-alias "heartbeat_func" ' + '--function-parameters "a:int=1" ' + '--query "Heartbeat | summarize Count() by Computer | take a" ' + '--tags name="Group" value="Computer" ' + '--version 2 ' + '--resource-group "{rg_7}" ' + '--saved-search-id "00000000-0000-0000-0000-00000000000" ' + '--workspace-name "{myWorkspace7}"', + checks=checks) + + +# EXAMPLE: /SavedSearches/get/SavedSearchesGet +@try_manual +def step_saved_search_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics saved-search show ' + '--resource-group "{rg_7}" ' + '--saved-search-id "00000000-0000-0000-0000-00000000000" ' + '--workspace-name "{myWorkspace7}"', + checks=checks) + + +# EXAMPLE: /SavedSearches/get/SavedSearchesList +@try_manual +def step_saved_search_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics saved-search list ' + '--resource-group "{rg_7}" ' + '--workspace-name "{myWorkspace7}"', + checks=checks) + + +# EXAMPLE: /SavedSearches/delete/SavedSearchesDelete +@try_manual +def step_saved_search_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics saved-search delete -y ' + '--resource-group "{rg_7}" ' + '--saved-search-id "00000000-0000-0000-0000-00000000000" ' + '--workspace-name "{myWorkspace7}"', + checks=checks) + + +# EXAMPLE: /Schema/post/WorkspacesGetSchema +@try_manual +def step_schema_get(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics schema get ' + '--resource-group "{rg_5}" ' + '--workspace-name "{myWorkspace8}"', + checks=checks) + + +# EXAMPLE: /SharedKeys/post/RegenerateSharedKeys +@try_manual +def step_shared_key_regenerate(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics shared-key regenerate ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace5}"', + checks=checks) + + +# EXAMPLE: /SharedKeys/post/SharedKeysList +@try_manual +def step_shared_key_get_shared_key(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics shared-key get-shared-key ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /Tables/get/TablesGet +@try_manual +def step_table_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics table show ' + '--resource-group "{rg_8}" ' + '--name "{myTable}" ' + '--workspace-name "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Tables/get/TablesListByWorkspace +@try_manual +def step_table_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics table list ' + '--resource-group "{rg_8}" ' + '--workspace-name "{rg_8}"', + checks=checks) + + +# EXAMPLE: /Tables/patch/TablesSet +@try_manual +def step_table_update(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics table update ' + '--retention-in-days 30 ' + '--resource-group "{rg_8}" ' + '--name "{myTable}" ' + '--workspace-name "{rg_8}"', + checks=checks) + + +# EXAMPLE: /StorageInsightConfigs/put/StorageInsightsCreate +@try_manual +def step_storage_insight_config_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics storage-insight-config create ' + '--containers "wad-iis-logfiles" ' + '--storage-account id="/subscriptions/{subscription_id}/resourcegroups/{rg_6}/providers/microsoft.storage/' + 'storageaccounts/{sa_3}" key="1234" ' + '--tables "WADWindowsEventLogsTable" "LinuxSyslogVer2v0" ' + '--resource-group "{rg_3}" ' + '--storage-insight-name "AzTestSI1110" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /StorageInsightConfigs/get/StorageInsightsGet +@try_manual +def step_storage_insight_config_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics storage-insight-config show ' + '--resource-group "{rg_3}" ' + '--storage-insight-name "AzTestSI1110" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /StorageInsightConfigs/get/StorageInsightsList +@try_manual +def step_storage_insight_config_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics storage-insight-config list ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /StorageInsightConfigs/delete/StorageInsightsDelete +@try_manual +def step_storage_insight_config_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics storage-insight-config delete -y ' + '--resource-group "{rg_3}" ' + '--storage-insight-name "AzTestSI1110" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /Usages/get/UsagesList +@try_manual +def step_usage_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics usage list ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace3}"', + checks=checks) + + +# EXAMPLE: /WorkspacePurge/get/WorkspacePurgeOperation +@try_manual +def step_workspace_purge_show_purge_status(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace-purge show-purge-status ' + '--purge-id "purge-970318e7-b859-4edb-8903-83b1b54d0b74" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /WorkspacePurge/post/WorkspacePurge +@try_manual +def step_workspace_purge_purge(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace-purge purge ' + '--filters "[{{\\"column\\":\\"TimeGenerated\\",\\"operator\\":\\">\\",\\"value\\":\\"2017-09-01T00:00:00' + '\\"}}]" ' + '--table "Heartbeat" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace6}"', + checks=checks) + + +# EXAMPLE: /Workspaces/delete/WorkspacesDelete +@try_manual +def step_workspace_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=None): + if checks is None: + checks = [] + test.cmd('az loganalytics workspace delete -y ' + '--resource-group "{rg_8}" ' + '--name "{rg_8}"', + checks=checks) + diff --git a/src/loganalytics/azext_loganalytics/tests/latest/test_loganalytics_scenario.py b/src/loganalytics/azext_loganalytics/tests/latest/test_loganalytics_scenario.py new file mode 100644 index 00000000000..924bd6d5264 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/tests/latest/test_loganalytics_scenario.py @@ -0,0 +1,261 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import os +from azure.cli.testsdk import ScenarioTest +from azure.cli.testsdk import ResourceGroupPreparer +from azure.cli.testsdk import StorageAccountPreparer +from .example_steps import step_workspace_create +from .example_steps import step_workspace_show +from .example_steps import step_workspace_list +from .example_steps import step_workspace_list2 +from .example_steps import step_workspace_update +from .example_steps import step_available_service_tier_list +from .example_steps import step_cluster_create +from .example_steps import step_cluster_list +from .example_steps import step_cluster_list2 +from .example_steps import step_cluster_update +from .example_steps import step_linked_service_create +from .example_steps import step_linked_service_show +from .example_steps import step_linked_service_list +from .example_steps import step_linked_service_delete +from .example_steps import step_cluster_delete +from .example_steps import step_data_export_create +from .example_steps import step_data_export_list +from .example_steps import step_data_export_delete +from .example_steps import step_data_source_create +from .example_steps import step_data_source_show +from .example_steps import step_data_source_list +from .example_steps import step_data_source_delete +from .example_steps import step_deleted_workspace_list +from .example_steps import step_deleted_workspace_list2 +from .example_steps import step_gateway_delete +from .example_steps import step_intelligence_pack_list +from .example_steps import step_intelligence_pack_disable +from .example_steps import step_intelligence_pack_enable +from .example_steps import step_linked_storage_account_create +from .example_steps import step_linked_storage_account_list +from .example_steps import step_linked_storage_account_show +from .example_steps import step_linked_storage_account_delete +from .example_steps import step_management_group_list +from .example_steps import step_operation_statuses_show +from .example_steps import step_saved_search_create +from .example_steps import step_saved_search_show +from .example_steps import step_saved_search_list +from .example_steps import step_saved_search_delete +from .example_steps import step_schema_get +from .example_steps import step_shared_key_regenerate +from .example_steps import step_shared_key_get_shared_key +from .example_steps import step_table_show +from .example_steps import step_table_list +from .example_steps import step_table_update +from .example_steps import step_storage_insight_config_create +from .example_steps import step_storage_insight_config_show +from .example_steps import step_storage_insight_config_list +from .example_steps import step_storage_insight_config_delete +from .example_steps import step_usage_list +from .example_steps import step_workspace_purge_show_purge_status +from .example_steps import step_workspace_purge_purge +from .example_steps import step_workspace_delete +from .. import ( + try_manual, + raise_if, + calc_coverage +) + + +TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) + + +# Env setup_scenario +@try_manual +def setup_scenario(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8): + pass + + +# Env cleanup_scenario +@try_manual +def cleanup_scenario(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8): + pass + + +# Testcase: Scenario +@try_manual +def call_scenario(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8): + setup_scenario(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8) + step_workspace_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("location", "australiasoutheast", case_sensitive=False), + test.check("retentionInDays", 30), + test.check("sku.name", "PerGB2018", case_sensitive=False), + test.check("tags.tag1", "val1", case_sensitive=False), + ]) + step_workspace_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_workspace_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_workspace_list2(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_workspace_update(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("location", "australiasoutheast", case_sensitive=False), + test.check("retentionInDays", 30), + test.check("sku.name", "PerGB2018", case_sensitive=False), + test.check("tags.tag1", "val1", case_sensitive=False), + test.check("workspaceCapping.dailyQuotaGb", -1), + ]) + step_available_service_tier_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_cluster_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("name", "{rg_8}", case_sensitive=False), + test.check("location", "australiasoutheast", case_sensitive=False), + test.check("sku.name", "CapacityReservation", case_sensitive=False), + test.check("sku.capacity", 1000), + test.check("name", "{rg_8}", case_sensitive=False), + ]) + step_cluster_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_cluster_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_cluster_list2(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_cluster_update(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("location", "australiasoutheast", case_sensitive=False), + test.check("sku.name", "CapacityReservation", case_sensitive=False), + test.check("sku.capacity", 1000), + test.check("tags.tag1", "val1", case_sensitive=False), + test.check("identity.type", "UserAssigned", case_sensitive=False), + ]) + step_linked_service_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("writeAccessResourceId", "/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft" + ".OperationalInsights/clusters/{myCluster}", case_sensitive=False), + ]) + step_linked_service_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("writeAccessResourceId", "/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft" + ".OperationalInsights/clusters/{myCluster}", case_sensitive=False), + ]) + step_linked_service_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_linked_service_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_cluster_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_data_export_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("name", "{myDataExport}", case_sensitive=False), + test.check("destination.resourceId", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsof" + "t.EventHub/namespaces/test", case_sensitive=False), + test.check("tableNames[0]", "Heartbeat", case_sensitive=False), + ]) + step_data_export_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_data_export_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check('length(@)', 1), + ]) + step_data_export_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_data_source_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("name", "{myDataSource}", case_sensitive=False), + test.check("kind", "AzureActivityLog", case_sensitive=False), + test.check("properties.LinkedResourceId", "/subscriptions/{subscription_id}/providers/microsoft.insights/eventt" + "ypes/management", case_sensitive=False), + ]) + step_data_source_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("name", "{myDataSource}", case_sensitive=False), + test.check("kind", "AzureActivityLog", case_sensitive=False), + test.check("properties.LinkedResourceId", "/subscriptions/{subscription_id}/providers/microsoft.insights/eventt" + "ypes/management", case_sensitive=False), + ]) + step_data_source_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_data_source_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_deleted_workspace_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_deleted_workspace_list2(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_gateway_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_intelligence_pack_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_intelligence_pack_disable(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_intelligence_pack_enable(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_linked_storage_account_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_linked_storage_account_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_linked_storage_account_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_linked_storage_account_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_management_group_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_operation_statuses_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_saved_search_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_saved_search_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_saved_search_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_saved_search_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_schema_get(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_shared_key_regenerate(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_shared_key_get_shared_key(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_table_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_table_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_table_update(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[ + test.check("retentionInDays", 30), + test.check("name", "{myTable}", case_sensitive=False), + ]) + step_storage_insight_config_create(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_storage_insight_config_show(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_storage_insight_config_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_storage_insight_config_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_usage_list(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_workspace_purge_show_purge_status(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_workspace_purge_purge(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + step_workspace_delete(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8, checks=[]) + cleanup_scenario(test, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8) + + +# Test class for Scenario +@try_manual +class LoganalyticsScenarioTest(ScenarioTest): + + def __init__(self, *args, **kwargs): + super(LoganalyticsScenarioTest, self).__init__(*args, **kwargs) + self.kwargs.update({ + 'subscription_id': self.get_subscription_id() + }) + + self.kwargs.update({ + 'myCluster': 'testcluster', + 'myCluster2': 'oiautorest6685', + 'myWorkspace': 'DeWnTest1234', + 'myWorkspace2': 'AzTest9724', + 'myWorkspace3': 'TestLinkWS', + 'myWorkspace4': 'testLinkStorageAccountsWS', + 'myWorkspace5': 'workspace1', + 'myWorkspace6': 'aztest5048', + 'myWorkspace7': 'TestWS', + 'myWorkspace8': 'atlantisdemo', + 'myWorkspace9': 'oiautorest6685', + 'myDataExport': 'export1', + 'myDataSource': 'AzTestDS774', + 'myIntelligencePack': 'ChangeTracking', + 'myLinkedService': 'Cluster', + 'myTable': 'table1', + }) + + + @ResourceGroupPreparer(name_prefix='clitestloganalytics_OIAutoRest1234'[:7], key='rg_2', parameter_name='rg_2') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_mms-eus'[:7], key='rg_5', parameter_name='rg_5') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_OIAutoRest6987'[:7], key='rg_6', parameter_name='rg_6') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_RgTest1'[:7], key='rg', parameter_name='rg') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_OIAutoRest5123'[:7], key='rg_3', parameter_name='rg_3') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_rg1'[:7], key='rg_4', parameter_name='rg_4') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_TestRG'[:7], key='rg_7', parameter_name='rg_7') + @ResourceGroupPreparer(name_prefix='clitestloganalytics_oiautorest6685'[:7], key='rg_8', parameter_name='rg_8') + @StorageAccountPreparer(name_prefix='clitestloganalytics_testStorageA'[:7], key='sa', + resource_group_parameter_name='rg_5') + @StorageAccountPreparer(name_prefix='clitestloganalytics_testStorageB'[:7], key='sa_2', + resource_group_parameter_name='rg_5') + @StorageAccountPreparer(name_prefix='clitestloganalytics_AzTestFakeSA9945'[:7], key='sa_3', + resource_group_parameter_name='rg_6') + def test_loganalytics_Scenario(self, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8): + call_scenario(self, rg_2, rg_5, rg_6, rg, rg_3, rg_4, rg_7, rg_8) + calc_coverage(__file__) + raise_if() + diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/__init__.py b/src/loganalytics/azext_loganalytics/vendored_sdks/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/__init__.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/__init__.py new file mode 100644 index 00000000000..ba03dda50e9 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operational_insights_management_client import OperationalInsightsManagementClient +__all__ = ['OperationalInsightsManagementClient'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/_configuration.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/_configuration.py new file mode 100644 index 00000000000..66abffcd7bc --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/_configuration.py @@ -0,0 +1,69 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + +VERSION = "unknown" + +class OperationalInsightsManagementClientConfiguration(Configuration): + """Configuration for OperationalInsightsManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(OperationalInsightsManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'operationalinsightsmanagementclient/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/_operational_insights_management_client.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/_operational_insights_management_client.py new file mode 100644 index 00000000000..54d60c1f8e7 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/_operational_insights_management_client.py @@ -0,0 +1,164 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +from ._configuration import OperationalInsightsManagementClientConfiguration +from .operations import DataExportsOperations +from .operations import DataSourcesOperations +from .operations import IntelligencePacksOperations +from .operations import LinkedServicesOperations +from .operations import LinkedStorageAccountsOperations +from .operations import ManagementGroupsOperations +from .operations import OperationStatusesOperations +from .operations import SharedKeysOperations +from .operations import UsagesOperations +from .operations import StorageInsightConfigsOperations +from .operations import SavedSearchesOperations +from .operations import AvailableServiceTiersOperations +from .operations import GatewaysOperations +from .operations import SchemaOperations +from .operations import WorkspacePurgeOperations +from .operations import TablesOperations +from .operations import ClustersOperations +from .operations import Operations +from .operations import WorkspacesOperations +from .operations import DeletedWorkspacesOperations +from . import models + + +class OperationalInsightsManagementClient(object): + """Operational Insights Client. + + :ivar data_exports: DataExportsOperations operations + :vartype data_exports: operational_insights_management_client.operations.DataExportsOperations + :ivar data_sources: DataSourcesOperations operations + :vartype data_sources: operational_insights_management_client.operations.DataSourcesOperations + :ivar intelligence_packs: IntelligencePacksOperations operations + :vartype intelligence_packs: operational_insights_management_client.operations.IntelligencePacksOperations + :ivar linked_services: LinkedServicesOperations operations + :vartype linked_services: operational_insights_management_client.operations.LinkedServicesOperations + :ivar linked_storage_accounts: LinkedStorageAccountsOperations operations + :vartype linked_storage_accounts: operational_insights_management_client.operations.LinkedStorageAccountsOperations + :ivar management_groups: ManagementGroupsOperations operations + :vartype management_groups: operational_insights_management_client.operations.ManagementGroupsOperations + :ivar operation_statuses: OperationStatusesOperations operations + :vartype operation_statuses: operational_insights_management_client.operations.OperationStatusesOperations + :ivar shared_keys: SharedKeysOperations operations + :vartype shared_keys: operational_insights_management_client.operations.SharedKeysOperations + :ivar usages: UsagesOperations operations + :vartype usages: operational_insights_management_client.operations.UsagesOperations + :ivar storage_insight_configs: StorageInsightConfigsOperations operations + :vartype storage_insight_configs: operational_insights_management_client.operations.StorageInsightConfigsOperations + :ivar saved_searches: SavedSearchesOperations operations + :vartype saved_searches: operational_insights_management_client.operations.SavedSearchesOperations + :ivar available_service_tiers: AvailableServiceTiersOperations operations + :vartype available_service_tiers: operational_insights_management_client.operations.AvailableServiceTiersOperations + :ivar gateways: GatewaysOperations operations + :vartype gateways: operational_insights_management_client.operations.GatewaysOperations + :ivar schema: SchemaOperations operations + :vartype schema: operational_insights_management_client.operations.SchemaOperations + :ivar workspace_purge: WorkspacePurgeOperations operations + :vartype workspace_purge: operational_insights_management_client.operations.WorkspacePurgeOperations + :ivar tables: TablesOperations operations + :vartype tables: operational_insights_management_client.operations.TablesOperations + :ivar clusters: ClustersOperations operations + :vartype clusters: operational_insights_management_client.operations.ClustersOperations + :ivar operations: Operations operations + :vartype operations: operational_insights_management_client.operations.Operations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: operational_insights_management_client.operations.WorkspacesOperations + :ivar deleted_workspaces: DeletedWorkspacesOperations operations + :vartype deleted_workspaces: operational_insights_management_client.operations.DeletedWorkspacesOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = OperationalInsightsManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.data_exports = DataExportsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.data_sources = DataSourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.intelligence_packs = IntelligencePacksOperations( + self._client, self._config, self._serialize, self._deserialize) + self.linked_services = LinkedServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.linked_storage_accounts = LinkedStorageAccountsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.management_groups = ManagementGroupsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operation_statuses = OperationStatusesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.shared_keys = SharedKeysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.storage_insight_configs = StorageInsightConfigsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.saved_searches = SavedSearchesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.available_service_tiers = AvailableServiceTiersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.gateways = GatewaysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.schema = SchemaOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_purge = WorkspacePurgeOperations( + self._client, self._config, self._serialize, self._deserialize) + self.tables = TablesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.clusters = ClustersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.deleted_workspaces = DeletedWorkspacesOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> OperationalInsightsManagementClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/__init__.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/__init__.py new file mode 100644 index 00000000000..34097a2faa7 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operational_insights_management_client import OperationalInsightsManagementClient +__all__ = ['OperationalInsightsManagementClient'] diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/_configuration.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/_configuration.py new file mode 100644 index 00000000000..f6f6ecce4b7 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +VERSION = "unknown" + +class OperationalInsightsManagementClientConfiguration(Configuration): + """Configuration for OperationalInsightsManagementClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(OperationalInsightsManagementClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'operationalinsightsmanagementclient/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/_operational_insights_management_client.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/_operational_insights_management_client.py new file mode 100644 index 00000000000..cebf5c25d8a --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/_operational_insights_management_client.py @@ -0,0 +1,158 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import OperationalInsightsManagementClientConfiguration +from .operations import DataExportsOperations +from .operations import DataSourcesOperations +from .operations import IntelligencePacksOperations +from .operations import LinkedServicesOperations +from .operations import LinkedStorageAccountsOperations +from .operations import ManagementGroupsOperations +from .operations import OperationStatusesOperations +from .operations import SharedKeysOperations +from .operations import UsagesOperations +from .operations import StorageInsightConfigsOperations +from .operations import SavedSearchesOperations +from .operations import AvailableServiceTiersOperations +from .operations import GatewaysOperations +from .operations import SchemaOperations +from .operations import WorkspacePurgeOperations +from .operations import TablesOperations +from .operations import ClustersOperations +from .operations import Operations +from .operations import WorkspacesOperations +from .operations import DeletedWorkspacesOperations +from .. import models + + +class OperationalInsightsManagementClient(object): + """Operational Insights Client. + + :ivar data_exports: DataExportsOperations operations + :vartype data_exports: operational_insights_management_client.aio.operations.DataExportsOperations + :ivar data_sources: DataSourcesOperations operations + :vartype data_sources: operational_insights_management_client.aio.operations.DataSourcesOperations + :ivar intelligence_packs: IntelligencePacksOperations operations + :vartype intelligence_packs: operational_insights_management_client.aio.operations.IntelligencePacksOperations + :ivar linked_services: LinkedServicesOperations operations + :vartype linked_services: operational_insights_management_client.aio.operations.LinkedServicesOperations + :ivar linked_storage_accounts: LinkedStorageAccountsOperations operations + :vartype linked_storage_accounts: operational_insights_management_client.aio.operations.LinkedStorageAccountsOperations + :ivar management_groups: ManagementGroupsOperations operations + :vartype management_groups: operational_insights_management_client.aio.operations.ManagementGroupsOperations + :ivar operation_statuses: OperationStatusesOperations operations + :vartype operation_statuses: operational_insights_management_client.aio.operations.OperationStatusesOperations + :ivar shared_keys: SharedKeysOperations operations + :vartype shared_keys: operational_insights_management_client.aio.operations.SharedKeysOperations + :ivar usages: UsagesOperations operations + :vartype usages: operational_insights_management_client.aio.operations.UsagesOperations + :ivar storage_insight_configs: StorageInsightConfigsOperations operations + :vartype storage_insight_configs: operational_insights_management_client.aio.operations.StorageInsightConfigsOperations + :ivar saved_searches: SavedSearchesOperations operations + :vartype saved_searches: operational_insights_management_client.aio.operations.SavedSearchesOperations + :ivar available_service_tiers: AvailableServiceTiersOperations operations + :vartype available_service_tiers: operational_insights_management_client.aio.operations.AvailableServiceTiersOperations + :ivar gateways: GatewaysOperations operations + :vartype gateways: operational_insights_management_client.aio.operations.GatewaysOperations + :ivar schema: SchemaOperations operations + :vartype schema: operational_insights_management_client.aio.operations.SchemaOperations + :ivar workspace_purge: WorkspacePurgeOperations operations + :vartype workspace_purge: operational_insights_management_client.aio.operations.WorkspacePurgeOperations + :ivar tables: TablesOperations operations + :vartype tables: operational_insights_management_client.aio.operations.TablesOperations + :ivar clusters: ClustersOperations operations + :vartype clusters: operational_insights_management_client.aio.operations.ClustersOperations + :ivar operations: Operations operations + :vartype operations: operational_insights_management_client.aio.operations.Operations + :ivar workspaces: WorkspacesOperations operations + :vartype workspaces: operational_insights_management_client.aio.operations.WorkspacesOperations + :ivar deleted_workspaces: DeletedWorkspacesOperations operations + :vartype deleted_workspaces: operational_insights_management_client.aio.operations.DeletedWorkspacesOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = OperationalInsightsManagementClientConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.data_exports = DataExportsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.data_sources = DataSourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.intelligence_packs = IntelligencePacksOperations( + self._client, self._config, self._serialize, self._deserialize) + self.linked_services = LinkedServicesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.linked_storage_accounts = LinkedStorageAccountsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.management_groups = ManagementGroupsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operation_statuses = OperationStatusesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.shared_keys = SharedKeysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.usages = UsagesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.storage_insight_configs = StorageInsightConfigsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.saved_searches = SavedSearchesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.available_service_tiers = AvailableServiceTiersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.gateways = GatewaysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.schema = SchemaOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_purge = WorkspacePurgeOperations( + self._client, self._config, self._serialize, self._deserialize) + self.tables = TablesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.clusters = ClustersOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + self.workspaces = WorkspacesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.deleted_workspaces = DeletedWorkspacesOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "OperationalInsightsManagementClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/__init__.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/__init__.py new file mode 100644 index 00000000000..0ef7b90f330 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._data_exports_operations import DataExportsOperations +from ._data_sources_operations import DataSourcesOperations +from ._intelligence_packs_operations import IntelligencePacksOperations +from ._linked_services_operations import LinkedServicesOperations +from ._linked_storage_accounts_operations import LinkedStorageAccountsOperations +from ._management_groups_operations import ManagementGroupsOperations +from ._operation_statuses_operations import OperationStatusesOperations +from ._shared_keys_operations import SharedKeysOperations +from ._usages_operations import UsagesOperations +from ._storage_insight_configs_operations import StorageInsightConfigsOperations +from ._saved_searches_operations import SavedSearchesOperations +from ._available_service_tiers_operations import AvailableServiceTiersOperations +from ._gateways_operations import GatewaysOperations +from ._schema_operations import SchemaOperations +from ._workspace_purge_operations import WorkspacePurgeOperations +from ._tables_operations import TablesOperations +from ._clusters_operations import ClustersOperations +from ._operations import Operations +from ._workspaces_operations import WorkspacesOperations +from ._deleted_workspaces_operations import DeletedWorkspacesOperations + +__all__ = [ + 'DataExportsOperations', + 'DataSourcesOperations', + 'IntelligencePacksOperations', + 'LinkedServicesOperations', + 'LinkedStorageAccountsOperations', + 'ManagementGroupsOperations', + 'OperationStatusesOperations', + 'SharedKeysOperations', + 'UsagesOperations', + 'StorageInsightConfigsOperations', + 'SavedSearchesOperations', + 'AvailableServiceTiersOperations', + 'GatewaysOperations', + 'SchemaOperations', + 'WorkspacePurgeOperations', + 'TablesOperations', + 'ClustersOperations', + 'Operations', + 'WorkspacesOperations', + 'DeletedWorkspacesOperations', +] diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_available_service_tiers_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_available_service_tiers_operations.py new file mode 100644 index 00000000000..f04ef3b9474 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_available_service_tiers_operations.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class AvailableServiceTiersOperations: + """AvailableServiceTiersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> List["models.AvailableServiceTier"]: + """Gets the available service tiers for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of AvailableServiceTier, or the result of cls(response) + :rtype: list[~operational_insights_management_client.models.AvailableServiceTier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["models.AvailableServiceTier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('[AvailableServiceTier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/availableServiceTiers'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_clusters_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_clusters_operations.py new file mode 100644 index 00000000000..0fff92b4182 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_clusters_operations.py @@ -0,0 +1,547 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ClustersOperations: + """ClustersOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs + ) -> AsyncIterable["models.ClusterListResult"]: + """Gets Log Analytics clusters in a resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters'} # type: ignore + + def list( + self, + **kwargs + ) -> AsyncIterable["models.ClusterListResult"]: + """Gets the Log Analytics clusters in a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/clusters'} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + cluster_name: str, + parameters: "models.Cluster", + **kwargs + ) -> Optional["models.Cluster"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + cluster_name: str, + parameters: "models.Cluster", + **kwargs + ) -> AsyncLROPoller["models.Cluster"]: + """Create or update a Log Analytics cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the Log Analytics cluster. + :type cluster_name: str + :param parameters: The parameters required to create or update a Log Analytics cluster. + :type parameters: ~operational_insights_management_client.models.Cluster + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~operational_insights_management_client.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a cluster instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: Name of the Log Analytics Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + cluster_name: str, + **kwargs + ) -> "models.Cluster": + """Gets a Log Analytics cluster instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: Name of the Log Analytics Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + cluster_name: str, + parameters: "models.ClusterPatch", + **kwargs + ) -> "models.Cluster": + """Updates a Log Analytics cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: Name of the Log Analytics Cluster. + :type cluster_name: str + :param parameters: The parameters required to patch a Log Analytics cluster. + :type parameters: ~operational_insights_management_client.models.ClusterPatch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ClusterPatch') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_data_exports_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_data_exports_operations.py new file mode 100644 index 00000000000..96546492698 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_data_exports_operations.py @@ -0,0 +1,315 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataExportsOperations: + """DataExportsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.DataExportListResult"]: + """Lists the data export instances within a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataExportListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.DataExportListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataExportListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DataExportListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + data_export_name: str, + parameters: "models.DataExport", + **kwargs + ) -> "models.DataExport": + """Create or update a data export. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_export_name: The data export rule name. + :type data_export_name: str + :param parameters: The parameters required to create or update a data export. + :type parameters: ~operational_insights_management_client.models.DataExport + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataExport, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataExport + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataExport"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'DataExport') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataExport', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataExport', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + data_export_name: str, + **kwargs + ) -> "models.DataExport": + """Gets a data export instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_export_name: The data export rule name. + :type data_export_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataExport, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataExport + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataExport"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataExport', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + data_export_name: str, + **kwargs + ) -> None: + """Deletes the specified data export in a given workspace.. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_export_name: The data export rule name. + :type data_export_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_data_sources_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_data_sources_operations.py new file mode 100644 index 00000000000..8fab1959e69 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_data_sources_operations.py @@ -0,0 +1,318 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DataSourcesOperations: + """DataSourcesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + data_source_name: str, + parameters: "models.DataSource", + **kwargs + ) -> "models.DataSource": + """Create or update a data source. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_name: The name of the datasource resource. + :type data_source_name: str + :param parameters: The parameters required to create or update a datasource. + :type parameters: ~operational_insights_management_client.models.DataSource + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataSource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataSource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataSource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceName': self._serialize.url("data_source_name", data_source_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'DataSource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataSource', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataSource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + data_source_name: str, + **kwargs + ) -> None: + """Deletes a data source instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_name: Name of the datasource. + :type data_source_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceName': self._serialize.url("data_source_name", data_source_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + data_source_name: str, + **kwargs + ) -> "models.DataSource": + """Gets a datasource instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_name: Name of the datasource. + :type data_source_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataSource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataSource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataSource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceName': self._serialize.url("data_source_name", data_source_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataSource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + filter: str, + skiptoken: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.DataSourceListResult"]: + """Gets the first page of data source instances in a workspace with the link to the next page. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param filter: The filter to apply on the operation. + :type filter: str + :param skiptoken: Starting point of the collection of data source instances. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataSourceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.DataSourceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataSourceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('DataSourceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_deleted_workspaces_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_deleted_workspaces_operations.py new file mode 100644 index 00000000000..3fc6bbb7592 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_deleted_workspaces_operations.py @@ -0,0 +1,180 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class DeletedWorkspacesOperations: + """DeletedWorkspacesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.WorkspaceListResult"]: + """Gets recently deleted workspaces in a subscription, available for recovery. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/deletedWorkspaces'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs + ) -> AsyncIterable["models.WorkspaceListResult"]: + """Gets recently deleted workspaces in a resource group, available for recovery. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/deletedWorkspaces'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_gateways_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_gateways_operations.py new file mode 100644 index 00000000000..9223d0e0819 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_gateways_operations.py @@ -0,0 +1,98 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class GatewaysOperations: + """GatewaysOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + gateway_id: str, + **kwargs + ) -> None: + """Delete a Log Analytics gateway. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param gateway_id: The Log Analytics gateway Id. + :type gateway_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'gatewayId': self._serialize.url("gateway_id", gateway_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/gateways/{gatewayId}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_intelligence_packs_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_intelligence_packs_operations.py new file mode 100644 index 00000000000..73474e62d9d --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_intelligence_packs_operations.py @@ -0,0 +1,214 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntelligencePacksOperations: + """IntelligencePacksOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def disable( + self, + resource_group_name: str, + workspace_name: str, + intelligence_pack_name: str, + **kwargs + ) -> None: + """Disables an intelligence pack for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param intelligence_pack_name: The name of the intelligence pack to be disabled. + :type intelligence_pack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.disable.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'intelligencePackName': self._serialize.url("intelligence_pack_name", intelligence_pack_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + disable.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Disable'} # type: ignore + + async def enable( + self, + resource_group_name: str, + workspace_name: str, + intelligence_pack_name: str, + **kwargs + ) -> None: + """Enables an intelligence pack for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param intelligence_pack_name: The name of the intelligence pack to be enabled. + :type intelligence_pack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.enable.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'intelligencePackName': self._serialize.url("intelligence_pack_name", intelligence_pack_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + enable.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Enable'} # type: ignore + + async def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> List["models.IntelligencePack"]: + """Lists all the intelligence packs possible and whether they are enabled or disabled for a given + workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of IntelligencePack, or the result of cls(response) + :rtype: list[~operational_insights_management_client.models.IntelligencePack] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["models.IntelligencePack"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('[IntelligencePack]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_linked_services_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_linked_services_operations.py new file mode 100644 index 00000000000..15242e20ebb --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_linked_services_operations.py @@ -0,0 +1,438 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LinkedServicesOperations: + """LinkedServicesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + linked_service_name: str, + parameters: "models.LinkedService", + **kwargs + ) -> "models.LinkedService": + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'LinkedService') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('LinkedService', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + linked_service_name: str, + parameters: "models.LinkedService", + **kwargs + ) -> AsyncLROPoller["models.LinkedService"]: + """Create or update a linked service. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param linked_service_name: Name of the linkedServices resource. + :type linked_service_name: str + :param parameters: The parameters required to create or update a linked service. + :type parameters: ~operational_insights_management_client.models.LinkedService + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LinkedService or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~operational_insights_management_client.models.LinkedService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + workspace_name: str, + linked_service_name: str, + **kwargs + ) -> Optional["models.LinkedService"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedService"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + linked_service_name: str, + **kwargs + ) -> AsyncLROPoller["models.LinkedService"]: + """Deletes a linked service instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param linked_service_name: Name of the linked service. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LinkedService or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~operational_insights_management_client.models.LinkedService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + linked_service_name: str, + **kwargs + ) -> "models.LinkedService": + """Gets a linked service instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param linked_service_name: Name of the linked service. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedService, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.LinkedService + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.LinkedServiceListResult"]: + """Gets the linked services instances in a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LinkedServiceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.LinkedServiceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('LinkedServiceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_linked_storage_accounts_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_linked_storage_accounts_operations.py new file mode 100644 index 00000000000..ea51fc8f35e --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_linked_storage_accounts_operations.py @@ -0,0 +1,309 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LinkedStorageAccountsOperations: + """LinkedStorageAccountsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + data_source_type: Union[str, "models.DataSourceType"], + parameters: "models.LinkedStorageAccountsResource", + **kwargs + ) -> "models.LinkedStorageAccountsResource": + """Create or Update a link relation between current workspace and a group of storage accounts of a + specific data source type. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_type: Linked storage accounts type. + :type data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :param parameters: The parameters required to create or update linked storage accounts. + :type parameters: ~operational_insights_management_client.models.LinkedStorageAccountsResource + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedStorageAccountsResource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.LinkedStorageAccountsResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedStorageAccountsResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceType': self._serialize.url("data_source_type", data_source_type, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'LinkedStorageAccountsResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedStorageAccountsResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + data_source_type: Union[str, "models.DataSourceType"], + **kwargs + ) -> None: + """Deletes all linked storage accounts of a specific data source type associated with the + specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_type: Linked storage accounts type. + :type data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceType': self._serialize.url("data_source_type", data_source_type, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + data_source_type: Union[str, "models.DataSourceType"], + **kwargs + ) -> "models.LinkedStorageAccountsResource": + """Gets all linked storage account of a specific data source type associated with the specified + workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_type: Linked storage accounts type. + :type data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedStorageAccountsResource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.LinkedStorageAccountsResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedStorageAccountsResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceType': self._serialize.url("data_source_type", data_source_type, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedStorageAccountsResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.LinkedStorageAccountsListResult"]: + """Gets all linked storage accounts associated with the specified workspace, storage accounts will + be sorted by their data source type. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LinkedStorageAccountsListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.LinkedStorageAccountsListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedStorageAccountsListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('LinkedStorageAccountsListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_management_groups_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_management_groups_operations.py new file mode 100644 index 00000000000..34e8b35f0b7 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_management_groups_operations.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ManagementGroupsOperations: + """ManagementGroupsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.WorkspaceListManagementGroupsResult"]: + """Gets a list of management groups connected to a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListManagementGroupsResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.WorkspaceListManagementGroupsResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListManagementGroupsResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListManagementGroupsResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/managementGroups'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_operation_statuses_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_operation_statuses_operations.py new file mode 100644 index 00000000000..61e6b42e321 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_operation_statuses_operations.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OperationStatusesOperations: + """OperationStatusesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + location: str, + async_operation_id: str, + **kwargs + ) -> "models.OperationStatus": + """Get the status of a long running azure asynchronous operation. + + :param location: The region name of operation. + :type location: str + :param async_operation_id: The operation Id. + :type async_operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OperationStatus, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.OperationStatus + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationStatus"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'asyncOperationId': self._serialize.url("async_operation_id", async_operation_id, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/locations/{location}/operationStatuses/{asyncOperationId}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_operations.py new file mode 100644 index 00000000000..dbfc624ad5e --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_operations.py @@ -0,0 +1,105 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.OperationListResult"]: + """Lists all of the available OperationalInsights Rest API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.OperationalInsights/operations'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_saved_searches_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_saved_searches_operations.py new file mode 100644 index 00000000000..1461f9ae5f1 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_saved_searches_operations.py @@ -0,0 +1,288 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SavedSearchesOperations: + """SavedSearchesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + saved_search_id: str, + **kwargs + ) -> None: + """Deletes the specified saved search in a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param saved_search_id: The id of the saved search. + :type saved_search_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'savedSearchId': self._serialize.url("saved_search_id", saved_search_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + saved_search_id: str, + parameters: "models.SavedSearch", + **kwargs + ) -> "models.SavedSearch": + """Creates or updates a saved search for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param saved_search_id: The id of the saved search. + :type saved_search_id: str + :param parameters: The parameters required to save a search. + :type parameters: ~operational_insights_management_client.models.SavedSearch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SavedSearch, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SavedSearch + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SavedSearch"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'savedSearchId': self._serialize.url("saved_search_id", saved_search_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'SavedSearch') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SavedSearch', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + saved_search_id: str, + **kwargs + ) -> "models.SavedSearch": + """Gets the specified saved search for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param saved_search_id: The id of the saved search. + :type saved_search_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SavedSearch, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SavedSearch + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SavedSearch"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'savedSearchId': self._serialize.url("saved_search_id", saved_search_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SavedSearch', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}'} # type: ignore + + async def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.SavedSearchesListResult": + """Gets the saved searches for a given Log Analytics Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SavedSearchesListResult, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SavedSearchesListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SavedSearchesListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SavedSearchesListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_schema_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_schema_operations.py new file mode 100644 index 00000000000..fef439d578a --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_schema_operations.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SchemaOperations: + """SchemaOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.SearchGetSchemaResponse": + """Gets the schema for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SearchGetSchemaResponse, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SearchGetSchemaResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SearchGetSchemaResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SearchGetSchemaResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/schema'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_shared_keys_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_shared_keys_operations.py new file mode 100644 index 00000000000..c07a4c9a0ce --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_shared_keys_operations.py @@ -0,0 +1,158 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SharedKeysOperations: + """SharedKeysOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get_shared_keys( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.SharedKeys": + """Gets the shared keys for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SharedKeys, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SharedKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SharedKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get_shared_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SharedKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_shared_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/sharedKeys'} # type: ignore + + async def regenerate( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.SharedKeys": + """Regenerates the shared keys for a Log Analytics Workspace. These keys are used to connect + Microsoft Operational Insights agents to the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SharedKeys, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SharedKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SharedKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.regenerate.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SharedKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/regenerateSharedKey'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_storage_insight_configs_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_storage_insight_configs_operations.py new file mode 100644 index 00000000000..c406c6d2abd --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_storage_insight_configs_operations.py @@ -0,0 +1,309 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class StorageInsightConfigsOperations: + """StorageInsightConfigsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + storage_insight_name: str, + parameters: "models.StorageInsight", + **kwargs + ) -> "models.StorageInsight": + """Create or update a storage insight. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param storage_insight_name: Name of the storageInsightsConfigs resource. + :type storage_insight_name: str + :param parameters: The parameters required to create or update a storage insight. + :type parameters: ~operational_insights_management_client.models.StorageInsight + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageInsight, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.StorageInsight + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StorageInsight"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'storageInsightName': self._serialize.url("storage_insight_name", storage_insight_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'StorageInsight') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('StorageInsight', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('StorageInsight', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + storage_insight_name: str, + **kwargs + ) -> "models.StorageInsight": + """Gets a storage insight instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param storage_insight_name: Name of the storageInsightsConfigs resource. + :type storage_insight_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageInsight, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.StorageInsight + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StorageInsight"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'storageInsightName': self._serialize.url("storage_insight_name", storage_insight_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageInsight', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + storage_insight_name: str, + **kwargs + ) -> None: + """Deletes a storageInsightsConfigs resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param storage_insight_name: Name of the storageInsightsConfigs resource. + :type storage_insight_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'storageInsightName': self._serialize.url("storage_insight_name", storage_insight_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.StorageInsightListResult"]: + """Lists the storage insight instances within a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageInsightListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.StorageInsightListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StorageInsightListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('StorageInsightListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.odata_next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_tables_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_tables_operations.py new file mode 100644 index 00000000000..6507061cb00 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_tables_operations.py @@ -0,0 +1,251 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TablesOperations: + """TablesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.TablesListResult"]: + """Gets all the tables for the specified Log Analytics workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either TablesListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.TablesListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TablesListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('TablesListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables'} # type: ignore + + async def update( + self, + resource_group_name: str, + workspace_name: str, + table_name: str, + parameters: "models.Table", + **kwargs + ) -> "models.Table": + """Updates a Log Analytics workspace table properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param table_name: The name of the table. + :type table_name: str + :param parameters: The parameters required to update table properties. + :type parameters: ~operational_insights_management_client.models.Table + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Table') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + table_name: str, + **kwargs + ) -> "models.Table": + """Gets a Log Analytics workspace table. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param table_name: The name of the table. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_usages_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_usages_operations.py new file mode 100644 index 00000000000..5d0c6aa77a3 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_usages_operations.py @@ -0,0 +1,116 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class UsagesOperations: + """UsagesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.WorkspaceListUsagesResult"]: + """Gets a list of usage metrics for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListUsagesResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.WorkspaceListUsagesResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListUsagesResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListUsagesResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/usages'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_workspace_purge_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_workspace_purge_operations.py new file mode 100644 index 00000000000..7f40c26a640 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_workspace_purge_operations.py @@ -0,0 +1,179 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspacePurgeOperations: + """WorkspacePurgeOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def purge( + self, + resource_group_name: str, + workspace_name: str, + body: "models.WorkspacePurgeBody", + **kwargs + ) -> "models.WorkspacePurgeResponse": + """Purges data in an Log Analytics workspace by a set of user-defined filters. + + In order to manage system resources, purge requests are throttled at 50 requests per hour. You + should batch the execution of purge requests by sending a single command whose predicate + includes all user identities that require purging. Use the in operator to specify multiple + identities. You should run the query prior to using for a purge request to verify that the + results are expected. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param body: Describes the body of a request to purge data in a single table of an Log + Analytics Workspace. + :type body: ~operational_insights_management_client.models.WorkspacePurgeBody + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspacePurgeResponse, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.WorkspacePurgeResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspacePurgeResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.purge.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'WorkspacePurgeBody') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['x-ms-status-location']=self._deserialize('str', response.headers.get('x-ms-status-location')) + deserialized = self._deserialize('WorkspacePurgeResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + purge.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/purge'} # type: ignore + + async def get_purge_status( + self, + resource_group_name: str, + workspace_name: str, + purge_id: str, + **kwargs + ) -> "models.WorkspacePurgeStatusResponse": + """Gets status of an ongoing purge operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param purge_id: In a purge status request, this is the Id of the operation the status of which + is returned. + :type purge_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspacePurgeStatusResponse, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.WorkspacePurgeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspacePurgeStatusResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get_purge_status.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'purgeId': self._serialize.url("purge_id", purge_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('WorkspacePurgeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_purge_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/operations/{purgeId}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_workspaces_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_workspaces_operations.py new file mode 100644 index 00000000000..c7fa89b3eee --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/aio/operations/_workspaces_operations.py @@ -0,0 +1,558 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspacesOperations: + """WorkspacesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.WorkspaceListResult"]: + """Gets the workspaces in a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/workspaces'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + **kwargs + ) -> AsyncIterable["models.WorkspaceListResult"]: + """Gets workspaces in a resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces'} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + parameters: "models.Workspace", + **kwargs + ) -> Optional["models.Workspace"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Workspace') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Workspace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: "models.Workspace", + **kwargs + ) -> AsyncLROPoller["models.Workspace"]: + """Create or update a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param parameters: The parameters required to create or update a workspace. + :type parameters: ~operational_insights_management_client.models.Workspace + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~operational_insights_management_client.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + workspace_name: str, + force: Optional[bool] = None, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if force is not None: + query_parameters['force'] = self._serialize.query("force", force, 'bool') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + force: Optional[bool] = None, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a workspace resource. To recover the workspace, create it again with the same name, in + the same subscription, resource group and location. The name is kept for 14 days and cannot be + used for another workspace. To remove the workspace completely and release the name, use the + force flag. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param force: Deletes the workspace without the recovery option. A workspace that was deleted + with this flag cannot be recovered. + :type force: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + force=force, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.Workspace": + """Gets a workspace instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + workspace_name: str, + parameters: "models.WorkspacePatch", + **kwargs + ) -> "models.Workspace": + """Updates a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param parameters: The parameters required to patch a workspace. + :type parameters: ~operational_insights_management_client.models.WorkspacePatch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'WorkspacePatch') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/__init__.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/__init__.py new file mode 100644 index 00000000000..286d9b722b8 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/__init__.py @@ -0,0 +1,237 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AssociatedWorkspace + from ._models_py3 import AvailableServiceTier + from ._models_py3 import AzureEntityResource + from ._models_py3 import CapacityReservationProperties + from ._models_py3 import Cluster + from ._models_py3 import ClusterListResult + from ._models_py3 import ClusterPatch + from ._models_py3 import ClusterSku + from ._models_py3 import CoreSummary + from ._models_py3 import DataExport + from ._models_py3 import DataExportListResult + from ._models_py3 import DataSource + from ._models_py3 import DataSourceFilter + from ._models_py3 import DataSourceListResult + from ._models_py3 import ErrorAdditionalInfo + from ._models_py3 import ErrorDetail + from ._models_py3 import ErrorResponse + from ._models_py3 import Identity + from ._models_py3 import IntelligencePack + from ._models_py3 import KeyVaultProperties + from ._models_py3 import LinkedService + from ._models_py3 import LinkedServiceListResult + from ._models_py3 import LinkedStorageAccountsListResult + from ._models_py3 import LinkedStorageAccountsResource + from ._models_py3 import ManagementGroup + from ._models_py3 import MetricName + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationListResult + from ._models_py3 import OperationStatus + from ._models_py3 import PrivateLinkScopedResource + from ._models_py3 import ProxyResource + from ._models_py3 import Resource + from ._models_py3 import SavedSearch + from ._models_py3 import SavedSearchesListResult + from ._models_py3 import SearchGetSchemaResponse + from ._models_py3 import SearchMetadata + from ._models_py3 import SearchMetadataSchema + from ._models_py3 import SearchSchemaValue + from ._models_py3 import SearchSort + from ._models_py3 import SharedKeys + from ._models_py3 import StorageAccount + from ._models_py3 import StorageInsight + from ._models_py3 import StorageInsightListResult + from ._models_py3 import StorageInsightStatus + from ._models_py3 import Table + from ._models_py3 import TablesListResult + from ._models_py3 import Tag + from ._models_py3 import TrackedResource + from ._models_py3 import UsageMetric + from ._models_py3 import UserIdentityProperties + from ._models_py3 import Workspace + from ._models_py3 import WorkspaceCapping + from ._models_py3 import WorkspaceFeatures + from ._models_py3 import WorkspaceListManagementGroupsResult + from ._models_py3 import WorkspaceListResult + from ._models_py3 import WorkspaceListUsagesResult + from ._models_py3 import WorkspacePatch + from ._models_py3 import WorkspacePurgeBody + from ._models_py3 import WorkspacePurgeBodyFilters + from ._models_py3 import WorkspacePurgeResponse + from ._models_py3 import WorkspacePurgeStatusResponse + from ._models_py3 import WorkspaceSku +except (SyntaxError, ImportError): + from ._models import AssociatedWorkspace # type: ignore + from ._models import AvailableServiceTier # type: ignore + from ._models import AzureEntityResource # type: ignore + from ._models import CapacityReservationProperties # type: ignore + from ._models import Cluster # type: ignore + from ._models import ClusterListResult # type: ignore + from ._models import ClusterPatch # type: ignore + from ._models import ClusterSku # type: ignore + from ._models import CoreSummary # type: ignore + from ._models import DataExport # type: ignore + from ._models import DataExportListResult # type: ignore + from ._models import DataSource # type: ignore + from ._models import DataSourceFilter # type: ignore + from ._models import DataSourceListResult # type: ignore + from ._models import ErrorAdditionalInfo # type: ignore + from ._models import ErrorDetail # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import Identity # type: ignore + from ._models import IntelligencePack # type: ignore + from ._models import KeyVaultProperties # type: ignore + from ._models import LinkedService # type: ignore + from ._models import LinkedServiceListResult # type: ignore + from ._models import LinkedStorageAccountsListResult # type: ignore + from ._models import LinkedStorageAccountsResource # type: ignore + from ._models import ManagementGroup # type: ignore + from ._models import MetricName # type: ignore + from ._models import Operation # type: ignore + from ._models import OperationDisplay # type: ignore + from ._models import OperationListResult # type: ignore + from ._models import OperationStatus # type: ignore + from ._models import PrivateLinkScopedResource # type: ignore + from ._models import ProxyResource # type: ignore + from ._models import Resource # type: ignore + from ._models import SavedSearch # type: ignore + from ._models import SavedSearchesListResult # type: ignore + from ._models import SearchGetSchemaResponse # type: ignore + from ._models import SearchMetadata # type: ignore + from ._models import SearchMetadataSchema # type: ignore + from ._models import SearchSchemaValue # type: ignore + from ._models import SearchSort # type: ignore + from ._models import SharedKeys # type: ignore + from ._models import StorageAccount # type: ignore + from ._models import StorageInsight # type: ignore + from ._models import StorageInsightListResult # type: ignore + from ._models import StorageInsightStatus # type: ignore + from ._models import Table # type: ignore + from ._models import TablesListResult # type: ignore + from ._models import Tag # type: ignore + from ._models import TrackedResource # type: ignore + from ._models import UsageMetric # type: ignore + from ._models import UserIdentityProperties # type: ignore + from ._models import Workspace # type: ignore + from ._models import WorkspaceCapping # type: ignore + from ._models import WorkspaceFeatures # type: ignore + from ._models import WorkspaceListManagementGroupsResult # type: ignore + from ._models import WorkspaceListResult # type: ignore + from ._models import WorkspaceListUsagesResult # type: ignore + from ._models import WorkspacePatch # type: ignore + from ._models import WorkspacePurgeBody # type: ignore + from ._models import WorkspacePurgeBodyFilters # type: ignore + from ._models import WorkspacePurgeResponse # type: ignore + from ._models import WorkspacePurgeStatusResponse # type: ignore + from ._models import WorkspaceSku # type: ignore + +from ._operational_insights_management_client_enums import ( + BillingType, + ClusterEntityStatus, + ClusterSkuNameEnum, + DataIngestionStatus, + DataSourceKind, + DataSourceType, + IdentityType, + LinkedServiceEntityStatus, + PublicNetworkAccessType, + PurgeState, + SearchSortEnum, + SkuNameEnum, + StorageInsightState, + Type, + WorkspaceEntityStatus, + WorkspaceSkuNameEnum, +) + +__all__ = [ + 'AssociatedWorkspace', + 'AvailableServiceTier', + 'AzureEntityResource', + 'CapacityReservationProperties', + 'Cluster', + 'ClusterListResult', + 'ClusterPatch', + 'ClusterSku', + 'CoreSummary', + 'DataExport', + 'DataExportListResult', + 'DataSource', + 'DataSourceFilter', + 'DataSourceListResult', + 'ErrorAdditionalInfo', + 'ErrorDetail', + 'ErrorResponse', + 'Identity', + 'IntelligencePack', + 'KeyVaultProperties', + 'LinkedService', + 'LinkedServiceListResult', + 'LinkedStorageAccountsListResult', + 'LinkedStorageAccountsResource', + 'ManagementGroup', + 'MetricName', + 'Operation', + 'OperationDisplay', + 'OperationListResult', + 'OperationStatus', + 'PrivateLinkScopedResource', + 'ProxyResource', + 'Resource', + 'SavedSearch', + 'SavedSearchesListResult', + 'SearchGetSchemaResponse', + 'SearchMetadata', + 'SearchMetadataSchema', + 'SearchSchemaValue', + 'SearchSort', + 'SharedKeys', + 'StorageAccount', + 'StorageInsight', + 'StorageInsightListResult', + 'StorageInsightStatus', + 'Table', + 'TablesListResult', + 'Tag', + 'TrackedResource', + 'UsageMetric', + 'UserIdentityProperties', + 'Workspace', + 'WorkspaceCapping', + 'WorkspaceFeatures', + 'WorkspaceListManagementGroupsResult', + 'WorkspaceListResult', + 'WorkspaceListUsagesResult', + 'WorkspacePatch', + 'WorkspacePurgeBody', + 'WorkspacePurgeBodyFilters', + 'WorkspacePurgeResponse', + 'WorkspacePurgeStatusResponse', + 'WorkspaceSku', + 'BillingType', + 'ClusterEntityStatus', + 'ClusterSkuNameEnum', + 'DataIngestionStatus', + 'DataSourceKind', + 'DataSourceType', + 'IdentityType', + 'LinkedServiceEntityStatus', + 'PublicNetworkAccessType', + 'PurgeState', + 'SearchSortEnum', + 'SkuNameEnum', + 'StorageInsightState', + 'Type', + 'WorkspaceEntityStatus', + 'WorkspaceSkuNameEnum', +] diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_models.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_models.py new file mode 100644 index 00000000000..09bf822c614 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_models.py @@ -0,0 +1,2398 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AssociatedWorkspace(msrest.serialization.Model): + """The list of Log Analytics workspaces associated with the cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar workspace_id: The id of the assigned workspace. + :vartype workspace_id: str + :ivar workspace_name: The name id the assigned workspace. + :vartype workspace_name: str + :ivar resource_id: The ResourceId id the assigned workspace. + :vartype resource_id: str + :ivar associate_date: The time of workspace association. + :vartype associate_date: str + """ + + _validation = { + 'workspace_id': {'readonly': True}, + 'workspace_name': {'readonly': True}, + 'resource_id': {'readonly': True}, + 'associate_date': {'readonly': True}, + } + + _attribute_map = { + 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'associate_date': {'key': 'associateDate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AssociatedWorkspace, self).__init__(**kwargs) + self.workspace_id = None + self.workspace_name = None + self.resource_id = None + self.associate_date = None + + +class AvailableServiceTier(msrest.serialization.Model): + """Service Tier details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar service_tier: The name of the Service Tier. Possible values include: "Free", "Standard", + "Premium", "PerNode", "PerGB2018", "Standalone", "CapacityReservation". + :vartype service_tier: str or ~operational_insights_management_client.models.SkuNameEnum + :ivar enabled: True if the Service Tier is enabled for the workspace. + :vartype enabled: bool + :ivar minimum_retention: The minimum retention for the Service Tier, in days. + :vartype minimum_retention: long + :ivar maximum_retention: The maximum retention for the Service Tier, in days. + :vartype maximum_retention: long + :ivar default_retention: The default retention for the Service Tier, in days. + :vartype default_retention: long + :ivar capacity_reservation_level: The capacity reservation level in GB per day. Returned for + the Capacity Reservation Service Tier. + :vartype capacity_reservation_level: long + :ivar last_sku_update: Time when the sku was last updated for the workspace. Returned for the + Capacity Reservation Service Tier. + :vartype last_sku_update: str + """ + + _validation = { + 'service_tier': {'readonly': True}, + 'enabled': {'readonly': True}, + 'minimum_retention': {'readonly': True}, + 'maximum_retention': {'readonly': True}, + 'default_retention': {'readonly': True}, + 'capacity_reservation_level': {'readonly': True}, + 'last_sku_update': {'readonly': True}, + } + + _attribute_map = { + 'service_tier': {'key': 'serviceTier', 'type': 'str'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'minimum_retention': {'key': 'minimumRetention', 'type': 'long'}, + 'maximum_retention': {'key': 'maximumRetention', 'type': 'long'}, + 'default_retention': {'key': 'defaultRetention', 'type': 'long'}, + 'capacity_reservation_level': {'key': 'capacityReservationLevel', 'type': 'long'}, + 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AvailableServiceTier, self).__init__(**kwargs) + self.service_tier = None + self.enabled = None + self.minimum_retention = None + self.maximum_retention = None + self.default_retention = None + self.capacity_reservation_level = None + self.last_sku_update = None + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class AzureEntityResource(Resource): + """The resource model definition for an Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class CapacityReservationProperties(msrest.serialization.Model): + """The Capacity Reservation properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar last_sku_update: The last time Sku was updated. + :vartype last_sku_update: str + :ivar min_capacity: Minimum CapacityReservation value in GB. + :vartype min_capacity: long + :ivar max_capacity: Maximum CapacityReservation value in GB. + :vartype max_capacity: long + """ + + _validation = { + 'last_sku_update': {'readonly': True}, + 'min_capacity': {'readonly': True}, + 'max_capacity': {'readonly': True}, + } + + _attribute_map = { + 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, + 'min_capacity': {'key': 'minCapacity', 'type': 'long'}, + 'max_capacity': {'key': 'maxCapacity', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(CapacityReservationProperties, self).__init__(**kwargs) + self.last_sku_update = None + self.min_capacity = None + self.max_capacity = None + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.location = kwargs['location'] + + +class Cluster(TrackedResource): + """The top level Log Analytics cluster resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param identity: The identity of the resource. + :type identity: ~operational_insights_management_client.models.Identity + :param sku: The sku properties. + :type sku: ~operational_insights_management_client.models.ClusterSku + :ivar cluster_id: The ID associated with the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The provisioning state of the cluster. Possible values include: + "Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating". + :vartype provisioning_state: str or + ~operational_insights_management_client.models.ClusterEntityStatus + :param is_double_encryption_enabled: Configures whether cluster will use double encryption. + This Property can not be modified after cluster creation. Default value is 'true'. + :type is_double_encryption_enabled: bool + :param is_availability_zones_enabled: Sets whether the cluster will support availability zones. + This can be set as true only in regions where Azure Data Explorer support Availability Zones. + This Property can not be modified after cluster creation. Default value is 'true' if region + supports Availability Zones. + :type is_availability_zones_enabled: bool + :param billing_type: The cluster's billing type. Possible values include: "Cluster", + "Workspaces". + :type billing_type: str or ~operational_insights_management_client.models.BillingType + :param key_vault_properties: The associated key properties. + :type key_vault_properties: ~operational_insights_management_client.models.KeyVaultProperties + :ivar last_modified_date: The last time the cluster was updated. + :vartype last_modified_date: str + :ivar created_date: The cluster creation time. + :vartype created_date: str + :param associated_workspaces: The list of Log Analytics workspaces associated with the cluster. + :type associated_workspaces: + list[~operational_insights_management_client.models.AssociatedWorkspace] + :param capacity_reservation_properties: Additional properties for capacity reservation. + :type capacity_reservation_properties: + ~operational_insights_management_client.models.CapacityReservationProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'cluster_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'last_modified_date': {'readonly': True}, + 'created_date': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'cluster_id': {'key': 'properties.clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'is_double_encryption_enabled': {'key': 'properties.isDoubleEncryptionEnabled', 'type': 'bool'}, + 'is_availability_zones_enabled': {'key': 'properties.isAvailabilityZonesEnabled', 'type': 'bool'}, + 'billing_type': {'key': 'properties.billingType', 'type': 'str'}, + 'key_vault_properties': {'key': 'properties.keyVaultProperties', 'type': 'KeyVaultProperties'}, + 'last_modified_date': {'key': 'properties.lastModifiedDate', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'associated_workspaces': {'key': 'properties.associatedWorkspaces', 'type': '[AssociatedWorkspace]'}, + 'capacity_reservation_properties': {'key': 'properties.capacityReservationProperties', 'type': 'CapacityReservationProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(Cluster, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.sku = kwargs.get('sku', None) + self.cluster_id = None + self.provisioning_state = None + self.is_double_encryption_enabled = kwargs.get('is_double_encryption_enabled', None) + self.is_availability_zones_enabled = kwargs.get('is_availability_zones_enabled', None) + self.billing_type = kwargs.get('billing_type', None) + self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.last_modified_date = None + self.created_date = None + self.associated_workspaces = kwargs.get('associated_workspaces', None) + self.capacity_reservation_properties = kwargs.get('capacity_reservation_properties', None) + + +class ClusterListResult(msrest.serialization.Model): + """The list clusters operation response. + + :param next_link: The link used to get the next page of recommendations. + :type next_link: str + :param value: A list of Log Analytics clusters. + :type value: list[~operational_insights_management_client.models.Cluster] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Cluster]'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterListResult, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) + + +class ClusterPatch(msrest.serialization.Model): + """The top level Log Analytics cluster resource container. + + :param identity: The identity of the resource. + :type identity: ~operational_insights_management_client.models.Identity + :param sku: The sku properties. + :type sku: ~operational_insights_management_client.models.ClusterSku + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param key_vault_properties: The associated key properties. + :type key_vault_properties: ~operational_insights_management_client.models.KeyVaultProperties + :param billing_type: The cluster's billing type. Possible values include: "Cluster", + "Workspaces". + :type billing_type: str or ~operational_insights_management_client.models.BillingType + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'key_vault_properties': {'key': 'properties.keyVaultProperties', 'type': 'KeyVaultProperties'}, + 'billing_type': {'key': 'properties.billingType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterPatch, self).__init__(**kwargs) + self.identity = kwargs.get('identity', None) + self.sku = kwargs.get('sku', None) + self.tags = kwargs.get('tags', None) + self.key_vault_properties = kwargs.get('key_vault_properties', None) + self.billing_type = kwargs.get('billing_type', None) + + +class ClusterSku(msrest.serialization.Model): + """The cluster sku definition. + + :param capacity: The capacity value. + :type capacity: long + :param name: The name of the SKU. Possible values include: "CapacityReservation". + :type name: str or ~operational_insights_management_client.models.ClusterSkuNameEnum + """ + + _attribute_map = { + 'capacity': {'key': 'capacity', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterSku, self).__init__(**kwargs) + self.capacity = kwargs.get('capacity', None) + self.name = kwargs.get('name', None) + + +class CoreSummary(msrest.serialization.Model): + """The core summary of a search. + + All required parameters must be populated in order to send to Azure. + + :param status: The status of a core summary. + :type status: str + :param number_of_documents: Required. The number of documents of a core summary. + :type number_of_documents: long + """ + + _validation = { + 'number_of_documents': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'number_of_documents': {'key': 'numberOfDocuments', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(CoreSummary, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.number_of_documents = kwargs['number_of_documents'] + + +class DataExport(Resource): + """The top level data export resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param data_export_id: The data export rule ID. + :type data_export_id: str + :param table_names: An array of tables to export, for example: [“Heartbeat, SecurityEvent”]. + :type table_names: list[str] + :param enable: Active when enabled. + :type enable: bool + :param created_date: The latest data export rule modification time. + :type created_date: str + :param last_modified_date: Date and time when the export was last modified. + :type last_modified_date: str + :param resource_id: The destination resource ID. This can be copied from the Properties entry + of the destination resource in Azure. + :type resource_id: str + :ivar type_properties_destination_type: The type of the destination resource. Possible values + include: "StorageAccount", "EventHub". + :vartype type_properties_destination_type: str or + ~operational_insights_management_client.models.Type + :param event_hub_name: Optional. Allows to define an Event Hub name. Not applicable when + destination is Storage Account. + :type event_hub_name: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'type_properties_destination_type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_export_id': {'key': 'properties.dataExportId', 'type': 'str'}, + 'table_names': {'key': 'properties.tableNames', 'type': '[str]'}, + 'enable': {'key': 'properties.enable', 'type': 'bool'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'last_modified_date': {'key': 'properties.lastModifiedDate', 'type': 'str'}, + 'resource_id': {'key': 'properties.destination.resourceId', 'type': 'str'}, + 'type_properties_destination_type': {'key': 'properties.destination.type', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.destination.metaData.eventHubName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataExport, self).__init__(**kwargs) + self.data_export_id = kwargs.get('data_export_id', None) + self.table_names = kwargs.get('table_names', None) + self.enable = kwargs.get('enable', None) + self.created_date = kwargs.get('created_date', None) + self.last_modified_date = kwargs.get('last_modified_date', None) + self.resource_id = kwargs.get('resource_id', None) + self.type_properties_destination_type = None + self.event_hub_name = kwargs.get('event_hub_name', None) + + +class DataExportListResult(msrest.serialization.Model): + """Result of the request to list data exports. + + :param value: List of data export instances within a workspace.. + :type value: list[~operational_insights_management_client.models.DataExport] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataExport]'}, + } + + def __init__( + self, + **kwargs + ): + super(DataExportListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class DataSource(Resource): + """Datasources under OMS Workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param properties: Required. The data source properties in raw json format, each kind of data + source have it's own schema. + :type properties: object + :param etag: The ETag of the data source. + :type etag: str + :param kind: Required. The kind of the DataSource. Possible values include: "WindowsEvent", + "WindowsPerformanceCounter", "IISLogs", "LinuxSyslog", "LinuxSyslogCollection", + "LinuxPerformanceObject", "LinuxPerformanceCollection", "CustomLog", "CustomLogCollection", + "AzureAuditLog", "AzureActivityLog", "GenericDataSource", "ChangeTrackingCustomPath", + "ChangeTrackingPath", "ChangeTrackingServices", "ChangeTrackingDataTypeConfiguration", + "ChangeTrackingDefaultRegistry", "ChangeTrackingRegistry", "ChangeTrackingLinuxPath", + "LinuxChangeTrackingPath", "ChangeTrackingContentLocation", "WindowsTelemetry", "Office365", + "SecurityWindowsBaselineConfiguration", "SecurityCenterSecurityWindowsBaselineConfiguration", + "SecurityEventCollectionConfiguration", "SecurityInsightsSecurityEventCollectionConfiguration", + "ImportComputerGroup", "NetworkMonitoring", "Itsm", "DnsAnalytics", "ApplicationInsights", + "SqlDataClassification". + :type kind: str or ~operational_insights_management_client.models.DataSourceKind + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'kind': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + **kwargs + ): + super(DataSource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + self.etag = kwargs.get('etag', None) + self.kind = kwargs['kind'] + self.tags = kwargs.get('tags', None) + + +class DataSourceFilter(msrest.serialization.Model): + """DataSource filter. Right now, only filter by kind is supported. + + :param kind: The kind of the DataSource. Possible values include: "WindowsEvent", + "WindowsPerformanceCounter", "IISLogs", "LinuxSyslog", "LinuxSyslogCollection", + "LinuxPerformanceObject", "LinuxPerformanceCollection", "CustomLog", "CustomLogCollection", + "AzureAuditLog", "AzureActivityLog", "GenericDataSource", "ChangeTrackingCustomPath", + "ChangeTrackingPath", "ChangeTrackingServices", "ChangeTrackingDataTypeConfiguration", + "ChangeTrackingDefaultRegistry", "ChangeTrackingRegistry", "ChangeTrackingLinuxPath", + "LinuxChangeTrackingPath", "ChangeTrackingContentLocation", "WindowsTelemetry", "Office365", + "SecurityWindowsBaselineConfiguration", "SecurityCenterSecurityWindowsBaselineConfiguration", + "SecurityEventCollectionConfiguration", "SecurityInsightsSecurityEventCollectionConfiguration", + "ImportComputerGroup", "NetworkMonitoring", "Itsm", "DnsAnalytics", "ApplicationInsights", + "SqlDataClassification". + :type kind: str or ~operational_insights_management_client.models.DataSourceKind + """ + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataSourceFilter, self).__init__(**kwargs) + self.kind = kwargs.get('kind', None) + + +class DataSourceListResult(msrest.serialization.Model): + """The list data source by workspace operation response. + + :param value: A list of datasources. + :type value: list[~operational_insights_management_client.models.DataSource] + :param next_link: The link (url) to the next page of datasources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataSource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataSourceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(msrest.serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~operational_insights_management_client.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: + list[~operational_insights_management_client.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(msrest.serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). + + :param error: The error object. + :type error: ~operational_insights_management_client.models.ErrorDetail + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.error = kwargs.get('error', None) + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: Required. Type of managed service identity. Possible values include: + "SystemAssigned", "UserAssigned", "None". + :type type: str or ~operational_insights_management_client.models.IdentityType + :param user_assigned_identities: The list of user identities associated with the resource. The + user identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~operational_insights_management_client.models.UserIdentityProperties] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserIdentityProperties}'}, + } + + def __init__( + self, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = kwargs['type'] + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + +class IntelligencePack(msrest.serialization.Model): + """Intelligence Pack containing a string name and boolean indicating if it's enabled. + + :param name: The name of the intelligence pack. + :type name: str + :param enabled: The enabled boolean for the intelligence pack. + :type enabled: bool + :param display_name: The display name of the intelligence pack. + :type display_name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntelligencePack, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.enabled = kwargs.get('enabled', None) + self.display_name = kwargs.get('display_name', None) + + +class KeyVaultProperties(msrest.serialization.Model): + """The key vault properties. + + :param key_vault_uri: The Key Vault uri which holds they key associated with the Log Analytics + cluster. + :type key_vault_uri: str + :param key_name: The name of the key associated with the Log Analytics cluster. + :type key_name: str + :param key_version: The version of the key associated with the Log Analytics cluster. + :type key_version: str + :param key_rsa_size: Selected key minimum required size. + :type key_rsa_size: int + """ + + _attribute_map = { + 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'key_rsa_size': {'key': 'keyRsaSize', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_uri = kwargs.get('key_vault_uri', None) + self.key_name = kwargs.get('key_name', None) + self.key_version = kwargs.get('key_version', None) + self.key_rsa_size = kwargs.get('key_rsa_size', None) + + +class LinkedService(Resource): + """The top level Linked service resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param resource_id: The resource id of the resource that will be linked to the workspace. This + should be used for linking resources which require read access. + :type resource_id: str + :param write_access_resource_id: The resource id of the resource that will be linked to the + workspace. This should be used for linking resources which require write access. + :type write_access_resource_id: str + :param provisioning_state: The provisioning state of the linked service. Possible values + include: "Succeeded", "Deleting", "ProvisioningAccount", "Updating". + :type provisioning_state: str or + ~operational_insights_management_client.models.LinkedServiceEntityStatus + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'resource_id': {'key': 'properties.resourceId', 'type': 'str'}, + 'write_access_resource_id': {'key': 'properties.writeAccessResourceId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedService, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.resource_id = kwargs.get('resource_id', None) + self.write_access_resource_id = kwargs.get('write_access_resource_id', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + + +class LinkedServiceListResult(msrest.serialization.Model): + """The list linked service operation response. + + :param value: The list of linked service instances. + :type value: list[~operational_insights_management_client.models.LinkedService] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedService]'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedServiceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class LinkedStorageAccountsListResult(msrest.serialization.Model): + """The list linked storage accounts service operation response. + + :param value: A list of linked storage accounts instances. + :type value: list[~operational_insights_management_client.models.LinkedStorageAccountsResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedStorageAccountsResource]'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedStorageAccountsListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class LinkedStorageAccountsResource(Resource): + """Linked storage accounts top level resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar data_source_type: Linked storage accounts type. Possible values include: "CustomLogs", + "AzureWatson", "Query", "Alerts". + :vartype data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :param storage_account_ids: Linked storage accounts resources ids. + :type storage_account_ids: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'data_source_type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_source_type': {'key': 'properties.dataSourceType', 'type': 'str'}, + 'storage_account_ids': {'key': 'properties.storageAccountIds', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedStorageAccountsResource, self).__init__(**kwargs) + self.data_source_type = None + self.storage_account_ids = kwargs.get('storage_account_ids', None) + + +class ManagementGroup(msrest.serialization.Model): + """A management group that is connected to a workspace. + + :param server_count: The number of servers connected to the management group. + :type server_count: int + :param is_gateway: Gets or sets a value indicating whether the management group is a gateway. + :type is_gateway: bool + :param name: The name of the management group. + :type name: str + :param id: The unique ID of the management group. + :type id: str + :param created: The datetime that the management group was created. + :type created: ~datetime.datetime + :param data_received: The last datetime that the management group received data. + :type data_received: ~datetime.datetime + :param version: The version of System Center that is managing the management group. + :type version: str + :param sku: The SKU of System Center that is managing the management group. + :type sku: str + """ + + _attribute_map = { + 'server_count': {'key': 'properties.serverCount', 'type': 'int'}, + 'is_gateway': {'key': 'properties.isGateway', 'type': 'bool'}, + 'name': {'key': 'properties.name', 'type': 'str'}, + 'id': {'key': 'properties.id', 'type': 'str'}, + 'created': {'key': 'properties.created', 'type': 'iso-8601'}, + 'data_received': {'key': 'properties.dataReceived', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagementGroup, self).__init__(**kwargs) + self.server_count = kwargs.get('server_count', None) + self.is_gateway = kwargs.get('is_gateway', None) + self.name = kwargs.get('name', None) + self.id = kwargs.get('id', None) + self.created = kwargs.get('created', None) + self.data_received = kwargs.get('data_received', None) + self.version = kwargs.get('version', None) + self.sku = kwargs.get('sku', None) + + +class MetricName(msrest.serialization.Model): + """The name of a metric. + + :param value: The system name of the metric. + :type value: str + :param localized_value: The localized name of the metric. + :type localized_value: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(MetricName, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.localized_value = kwargs.get('localized_value', None) + + +class Operation(msrest.serialization.Model): + """Supported operation of OperationalInsights resource provider. + + :param name: Operation name: {provider}/{resource}/{operation}. + :type name: str + :param display: Display metadata associated with the operation. + :type display: ~operational_insights_management_client.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display = kwargs.get('display', None) + + +class OperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + :param provider: Service provider: Microsoft OperationsManagement. + :type provider: str + :param resource: Resource on which the operation is performed etc. + :type resource: str + :param operation: Type of operation: get, read, delete, etc. + :type operation: str + :param description: Description of operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + self.description = kwargs.get('description', None) + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list solution operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param value: List of solution operations supported by the OperationsManagement resource + provider. + :type value: list[~operational_insights_management_client.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str + """ + + _validation = { + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = None + + +class OperationStatus(msrest.serialization.Model): + """The status of operation. + + :param id: The operation Id. + :type id: str + :param name: The operation name. + :type name: str + :param start_time: The start time of the operation. + :type start_time: str + :param end_time: The end time of the operation. + :type end_time: str + :param status: The status of the operation. + :type status: str + :param error: The error detail of the operation if any. + :type error: ~operational_insights_management_client.models.ErrorResponse + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationStatus, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.status = kwargs.get('status', None) + self.error = kwargs.get('error', None) + + +class PrivateLinkScopedResource(msrest.serialization.Model): + """The private link scope resource reference. + + :param resource_id: The full resource Id of the private link scope resource. + :type resource_id: str + :param scope_id: The private link scope unique Identifier. + :type scope_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'scope_id': {'key': 'scopeId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkScopedResource, self).__init__(**kwargs) + self.resource_id = kwargs.get('resource_id', None) + self.scope_id = kwargs.get('scope_id', None) + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class SavedSearch(Resource): + """Value object for saved search results. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param etag: The ETag of the saved search. To override an existing saved search, use "*" or + specify the current Etag. + :type etag: str + :param category: Required. The category of the saved search. This helps the user to find a + saved search faster. + :type category: str + :param display_name: Required. Saved search display name. + :type display_name: str + :param query: Required. The query expression for the saved search. + :type query: str + :param function_alias: The function alias if query serves as a function. + :type function_alias: str + :param function_parameters: The optional function parameters if query serves as a function. + Value should be in the following format: 'param-name1:type1 = default_value1, param-name2:type2 + = default_value2'. For more examples and proper syntax please refer to + https://docs.microsoft.com/en-us/azure/kusto/query/functions/user-defined-functions. + :type function_parameters: str + :param version: The version number of the query language. The current version is 2 and is the + default. + :type version: long + :param tags: A set of tags. The tags attached to the saved search. + :type tags: list[~operational_insights_management_client.models.Tag] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'category': {'required': True}, + 'display_name': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'display_name': {'key': 'properties.displayName', 'type': 'str'}, + 'query': {'key': 'properties.query', 'type': 'str'}, + 'function_alias': {'key': 'properties.functionAlias', 'type': 'str'}, + 'function_parameters': {'key': 'properties.functionParameters', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'long'}, + 'tags': {'key': 'properties.tags', 'type': '[Tag]'}, + } + + def __init__( + self, + **kwargs + ): + super(SavedSearch, self).__init__(**kwargs) + self.etag = kwargs.get('etag', None) + self.category = kwargs['category'] + self.display_name = kwargs['display_name'] + self.query = kwargs['query'] + self.function_alias = kwargs.get('function_alias', None) + self.function_parameters = kwargs.get('function_parameters', None) + self.version = kwargs.get('version', None) + self.tags = kwargs.get('tags', None) + + +class SavedSearchesListResult(msrest.serialization.Model): + """The saved search list operation response. + + :param value: The array of result values. + :type value: list[~operational_insights_management_client.models.SavedSearch] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SavedSearch]'}, + } + + def __init__( + self, + **kwargs + ): + super(SavedSearchesListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class SearchGetSchemaResponse(msrest.serialization.Model): + """The get schema operation response. + + :param metadata: The metadata from search results. + :type metadata: ~operational_insights_management_client.models.SearchMetadata + :param value: The array of result values. + :type value: list[~operational_insights_management_client.models.SearchSchemaValue] + """ + + _attribute_map = { + 'metadata': {'key': 'metadata', 'type': 'SearchMetadata'}, + 'value': {'key': 'value', 'type': '[SearchSchemaValue]'}, + } + + def __init__( + self, + **kwargs + ): + super(SearchGetSchemaResponse, self).__init__(**kwargs) + self.metadata = kwargs.get('metadata', None) + self.value = kwargs.get('value', None) + + +class SearchMetadata(msrest.serialization.Model): + """Metadata for search results. + + :param search_id: The request id of the search. + :type search_id: str + :param result_type: The search result type. + :type result_type: str + :param total: The total number of search results. + :type total: long + :param top: The number of top search results. + :type top: long + :param id: The id of the search results request. + :type id: str + :param core_summaries: The core summaries. + :type core_summaries: list[~operational_insights_management_client.models.CoreSummary] + :param status: The status of the search results. + :type status: str + :param start_time: The start time for the search. + :type start_time: ~datetime.datetime + :param last_updated: The time of last update. + :type last_updated: ~datetime.datetime + :param e_tag: The ETag of the search results. + :type e_tag: str + :param sort: How the results are sorted. + :type sort: list[~operational_insights_management_client.models.SearchSort] + :param request_time: The request time. + :type request_time: long + :param aggregated_value_field: The aggregated value field. + :type aggregated_value_field: str + :param aggregated_grouping_fields: The aggregated grouping fields. + :type aggregated_grouping_fields: str + :param sum: The sum of all aggregates returned in the result set. + :type sum: long + :param max: The max of all aggregates returned in the result set. + :type max: long + :param schema: The schema. + :type schema: ~operational_insights_management_client.models.SearchMetadataSchema + """ + + _attribute_map = { + 'search_id': {'key': 'requestId', 'type': 'str'}, + 'result_type': {'key': 'resultType', 'type': 'str'}, + 'total': {'key': 'total', 'type': 'long'}, + 'top': {'key': 'top', 'type': 'long'}, + 'id': {'key': 'id', 'type': 'str'}, + 'core_summaries': {'key': 'coreSummaries', 'type': '[CoreSummary]'}, + 'status': {'key': 'status', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'sort': {'key': 'sort', 'type': '[SearchSort]'}, + 'request_time': {'key': 'requestTime', 'type': 'long'}, + 'aggregated_value_field': {'key': 'aggregatedValueField', 'type': 'str'}, + 'aggregated_grouping_fields': {'key': 'aggregatedGroupingFields', 'type': 'str'}, + 'sum': {'key': 'sum', 'type': 'long'}, + 'max': {'key': 'max', 'type': 'long'}, + 'schema': {'key': 'schema', 'type': 'SearchMetadataSchema'}, + } + + def __init__( + self, + **kwargs + ): + super(SearchMetadata, self).__init__(**kwargs) + self.search_id = kwargs.get('search_id', None) + self.result_type = kwargs.get('result_type', None) + self.total = kwargs.get('total', None) + self.top = kwargs.get('top', None) + self.id = kwargs.get('id', None) + self.core_summaries = kwargs.get('core_summaries', None) + self.status = kwargs.get('status', None) + self.start_time = kwargs.get('start_time', None) + self.last_updated = kwargs.get('last_updated', None) + self.e_tag = kwargs.get('e_tag', None) + self.sort = kwargs.get('sort', None) + self.request_time = kwargs.get('request_time', None) + self.aggregated_value_field = kwargs.get('aggregated_value_field', None) + self.aggregated_grouping_fields = kwargs.get('aggregated_grouping_fields', None) + self.sum = kwargs.get('sum', None) + self.max = kwargs.get('max', None) + self.schema = kwargs.get('schema', None) + + +class SearchMetadataSchema(msrest.serialization.Model): + """Schema metadata for search. + + :param name: The name of the metadata schema. + :type name: str + :param version: The version of the metadata schema. + :type version: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(SearchMetadataSchema, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.version = kwargs.get('version', None) + + +class SearchSchemaValue(msrest.serialization.Model): + """Value object for schema results. + + All required parameters must be populated in order to send to Azure. + + :param name: The name of the schema. + :type name: str + :param display_name: The display name of the schema. + :type display_name: str + :param type: The type. + :type type: str + :param indexed: Required. The boolean that indicates the field is searchable as free text. + :type indexed: bool + :param stored: Required. The boolean that indicates whether or not the field is stored. + :type stored: bool + :param facet: Required. The boolean that indicates whether or not the field is a facet. + :type facet: bool + :param owner_type: The array of workflows containing the field. + :type owner_type: list[str] + """ + + _validation = { + 'indexed': {'required': True}, + 'stored': {'required': True}, + 'facet': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'indexed': {'key': 'indexed', 'type': 'bool'}, + 'stored': {'key': 'stored', 'type': 'bool'}, + 'facet': {'key': 'facet', 'type': 'bool'}, + 'owner_type': {'key': 'ownerType', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(SearchSchemaValue, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display_name = kwargs.get('display_name', None) + self.type = kwargs.get('type', None) + self.indexed = kwargs['indexed'] + self.stored = kwargs['stored'] + self.facet = kwargs['facet'] + self.owner_type = kwargs.get('owner_type', None) + + +class SearchSort(msrest.serialization.Model): + """The sort parameters for search. + + :param name: The name of the field the search query is sorted on. + :type name: str + :param order: The sort order of the search. Possible values include: "asc", "desc". + :type order: str or ~operational_insights_management_client.models.SearchSortEnum + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SearchSort, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.order = kwargs.get('order', None) + + +class SharedKeys(msrest.serialization.Model): + """The shared keys for a workspace. + + :param primary_shared_key: The primary shared key of a workspace. + :type primary_shared_key: str + :param secondary_shared_key: The secondary shared key of a workspace. + :type secondary_shared_key: str + """ + + _attribute_map = { + 'primary_shared_key': {'key': 'primarySharedKey', 'type': 'str'}, + 'secondary_shared_key': {'key': 'secondarySharedKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SharedKeys, self).__init__(**kwargs) + self.primary_shared_key = kwargs.get('primary_shared_key', None) + self.secondary_shared_key = kwargs.get('secondary_shared_key', None) + + +class StorageAccount(msrest.serialization.Model): + """Describes a storage account connection. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. The Azure Resource Manager ID of the storage account resource. + :type id: str + :param key: Required. The storage account key. + :type key: str + """ + + _validation = { + 'id': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageAccount, self).__init__(**kwargs) + self.id = kwargs['id'] + self.key = kwargs['key'] + + +class StorageInsight(Resource): + """The top level storage insight resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param e_tag: The ETag of the storage insight. + :type e_tag: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param containers: The names of the blob containers that the workspace should read. + :type containers: list[str] + :param tables: The names of the Azure tables that the workspace should read. + :type tables: list[str] + :param storage_account: The storage account connection details. + :type storage_account: ~operational_insights_management_client.models.StorageAccount + :ivar status: The status of the storage insight. + :vartype status: ~operational_insights_management_client.models.StorageInsightStatus + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'containers': {'key': 'properties.containers', 'type': '[str]'}, + 'tables': {'key': 'properties.tables', 'type': '[str]'}, + 'storage_account': {'key': 'properties.storageAccount', 'type': 'StorageAccount'}, + 'status': {'key': 'properties.status', 'type': 'StorageInsightStatus'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageInsight, self).__init__(**kwargs) + self.e_tag = kwargs.get('e_tag', None) + self.tags = kwargs.get('tags', None) + self.containers = kwargs.get('containers', None) + self.tables = kwargs.get('tables', None) + self.storage_account = kwargs.get('storage_account', None) + self.status = None + + +class StorageInsightListResult(msrest.serialization.Model): + """The list storage insights operation response. + + :param value: A list of storage insight items. + :type value: list[~operational_insights_management_client.models.StorageInsight] + :param odata_next_link: The link (url) to the next page of results. + :type odata_next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[StorageInsight]'}, + 'odata_next_link': {'key': '@odata\\.nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageInsightListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.odata_next_link = kwargs.get('odata_next_link', None) + + +class StorageInsightStatus(msrest.serialization.Model): + """The status of the storage insight. + + All required parameters must be populated in order to send to Azure. + + :param state: Required. The state of the storage insight connection to the workspace. Possible + values include: "OK", "ERROR". + :type state: str or ~operational_insights_management_client.models.StorageInsightState + :param description: Description of the state of the storage insight. + :type description: str + """ + + _validation = { + 'state': {'required': True}, + } + + _attribute_map = { + 'state': {'key': 'state', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(StorageInsightStatus, self).__init__(**kwargs) + self.state = kwargs['state'] + self.description = kwargs.get('description', None) + + +class Table(Resource): + """Workspace data table definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param retention_in_days: The data table data retention in days, between 30 and 730. Setting + this property to null will default to the workspace retention. + :type retention_in_days: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'retention_in_days': {'maximum': 730, 'minimum': 30}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(Table, self).__init__(**kwargs) + self.retention_in_days = kwargs.get('retention_in_days', None) + + +class TablesListResult(msrest.serialization.Model): + """The list tables operation response. + + :param value: A list of data tables. + :type value: list[~operational_insights_management_client.models.Table] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Table]'}, + } + + def __init__( + self, + **kwargs + ): + super(TablesListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class Tag(msrest.serialization.Model): + """A tag of a saved search. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The tag name. + :type name: str + :param value: Required. The tag value. + :type value: str + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Tag, self).__init__(**kwargs) + self.name = kwargs['name'] + self.value = kwargs['value'] + + +class UsageMetric(msrest.serialization.Model): + """A metric describing the usage of a resource. + + :param name: The name of the metric. + :type name: ~operational_insights_management_client.models.MetricName + :param unit: The units used for the metric. + :type unit: str + :param current_value: The current value of the metric. + :type current_value: float + :param limit: The quota limit for the metric. + :type limit: float + :param next_reset_time: The time that the metric's value will reset. + :type next_reset_time: ~datetime.datetime + :param quota_period: The quota period that determines the length of time between value resets. + :type quota_period: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'MetricName'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'current_value': {'key': 'currentValue', 'type': 'float'}, + 'limit': {'key': 'limit', 'type': 'float'}, + 'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'}, + 'quota_period': {'key': 'quotaPeriod', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UsageMetric, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.unit = kwargs.get('unit', None) + self.current_value = kwargs.get('current_value', None) + self.limit = kwargs.get('limit', None) + self.next_reset_time = kwargs.get('next_reset_time', None) + self.quota_period = kwargs.get('quota_period', None) + + +class UserIdentityProperties(msrest.serialization.Model): + """User assigned identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserIdentityProperties, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class Workspace(TrackedResource): + """The top level Workspace resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param e_tag: The ETag of the workspace. + :type e_tag: str + :param provisioning_state: The provisioning state of the workspace. Possible values include: + "Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating". + :type provisioning_state: str or + ~operational_insights_management_client.models.WorkspaceEntityStatus + :ivar customer_id: This is a read-only property. Represents the ID associated with the + workspace. + :vartype customer_id: str + :param sku: The SKU of the workspace. + :type sku: ~operational_insights_management_client.models.WorkspaceSku + :param retention_in_days: The workspace data retention in days. Allowed values are per pricing + plan. See pricing tiers documentation for details. + :type retention_in_days: int + :param workspace_capping: The daily volume cap for ingestion. + :type workspace_capping: ~operational_insights_management_client.models.WorkspaceCapping + :ivar created_date: Workspace creation date. + :vartype created_date: str + :ivar modified_date: Workspace modification date. + :vartype modified_date: str + :param public_network_access_for_ingestion: The network access type for accessing Log Analytics + ingestion. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_ingestion: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param public_network_access_for_query: The network access type for accessing Log Analytics + query. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_query: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param force_cmk_for_query: Indicates whether customer managed storage is mandatory for query + management. + :type force_cmk_for_query: bool + :ivar private_link_scoped_resources: List of linked private link scope resources. + :vartype private_link_scoped_resources: + list[~operational_insights_management_client.models.PrivateLinkScopedResource] + :param features: Workspace features. + :type features: ~operational_insights_management_client.models.WorkspaceFeatures + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'customer_id': {'readonly': True}, + 'created_date': {'readonly': True}, + 'modified_date': {'readonly': True}, + 'private_link_scoped_resources': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'customer_id': {'key': 'properties.customerId', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'WorkspaceSku'}, + 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, + 'workspace_capping': {'key': 'properties.workspaceCapping', 'type': 'WorkspaceCapping'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'modified_date': {'key': 'properties.modifiedDate', 'type': 'str'}, + 'public_network_access_for_ingestion': {'key': 'properties.publicNetworkAccessForIngestion', 'type': 'str'}, + 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, + 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, + 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, + 'features': {'key': 'properties.features', 'type': 'WorkspaceFeatures'}, + } + + def __init__( + self, + **kwargs + ): + super(Workspace, self).__init__(**kwargs) + self.e_tag = kwargs.get('e_tag', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.customer_id = None + self.sku = kwargs.get('sku', None) + self.retention_in_days = kwargs.get('retention_in_days', None) + self.workspace_capping = kwargs.get('workspace_capping', None) + self.created_date = None + self.modified_date = None + self.public_network_access_for_ingestion = kwargs.get('public_network_access_for_ingestion', "Enabled") + self.public_network_access_for_query = kwargs.get('public_network_access_for_query', "Enabled") + self.force_cmk_for_query = kwargs.get('force_cmk_for_query', None) + self.private_link_scoped_resources = None + self.features = kwargs.get('features', None) + + +class WorkspaceCapping(msrest.serialization.Model): + """The daily volume cap for ingestion. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param daily_quota_gb: The workspace daily quota for ingestion. + :type daily_quota_gb: float + :ivar quota_next_reset_time: The time when the quota will be rest. + :vartype quota_next_reset_time: str + :ivar data_ingestion_status: The status of data ingestion for this workspace. Possible values + include: "RespectQuota", "ForceOn", "ForceOff", "OverQuota", "SubscriptionSuspended", + "ApproachingQuota". + :vartype data_ingestion_status: str or + ~operational_insights_management_client.models.DataIngestionStatus + """ + + _validation = { + 'quota_next_reset_time': {'readonly': True}, + 'data_ingestion_status': {'readonly': True}, + } + + _attribute_map = { + 'daily_quota_gb': {'key': 'dailyQuotaGb', 'type': 'float'}, + 'quota_next_reset_time': {'key': 'quotaNextResetTime', 'type': 'str'}, + 'data_ingestion_status': {'key': 'dataIngestionStatus', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceCapping, self).__init__(**kwargs) + self.daily_quota_gb = kwargs.get('daily_quota_gb', None) + self.quota_next_reset_time = None + self.data_ingestion_status = None + + +class WorkspaceFeatures(msrest.serialization.Model): + """Workspace features. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str + :param disable_local_auth: Disable Non-AAD based Auth. + :type disable_local_auth: bool + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'enable_data_export': {'key': 'enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'clusterResourceId', 'type': 'str'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceFeatures, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.enable_data_export = kwargs.get('enable_data_export', None) + self.immediate_purge_data_on30_days = kwargs.get('immediate_purge_data_on30_days', None) + self.enable_log_access_using_only_resource_permissions = kwargs.get('enable_log_access_using_only_resource_permissions', None) + self.cluster_resource_id = kwargs.get('cluster_resource_id', None) + self.disable_local_auth = kwargs.get('disable_local_auth', None) + + +class WorkspaceListManagementGroupsResult(msrest.serialization.Model): + """The list workspace management groups operation response. + + :param value: Gets or sets a list of management groups attached to the workspace. + :type value: list[~operational_insights_management_client.models.ManagementGroup] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ManagementGroup]'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceListManagementGroupsResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class WorkspaceListResult(msrest.serialization.Model): + """The list workspaces operation response. + + :param value: A list of workspaces. + :type value: list[~operational_insights_management_client.models.Workspace] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Workspace]'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class WorkspaceListUsagesResult(msrest.serialization.Model): + """The list workspace usages operation response. + + :param value: Gets or sets a list of usage metrics for a workspace. + :type value: list[~operational_insights_management_client.models.UsageMetric] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[UsageMetric]'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceListUsagesResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class WorkspacePatch(AzureEntityResource): + """The top level Workspace resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param tags: A set of tags. Resource tags. Optional. + :type tags: dict[str, str] + :param provisioning_state: The provisioning state of the workspace. Possible values include: + "Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating". + :type provisioning_state: str or + ~operational_insights_management_client.models.WorkspaceEntityStatus + :ivar customer_id: This is a read-only property. Represents the ID associated with the + workspace. + :vartype customer_id: str + :param sku: The SKU of the workspace. + :type sku: ~operational_insights_management_client.models.WorkspaceSku + :param retention_in_days: The workspace data retention in days. Allowed values are per pricing + plan. See pricing tiers documentation for details. + :type retention_in_days: int + :param workspace_capping: The daily volume cap for ingestion. + :type workspace_capping: ~operational_insights_management_client.models.WorkspaceCapping + :ivar created_date: Workspace creation date. + :vartype created_date: str + :ivar modified_date: Workspace modification date. + :vartype modified_date: str + :param public_network_access_for_ingestion: The network access type for accessing Log Analytics + ingestion. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_ingestion: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param public_network_access_for_query: The network access type for accessing Log Analytics + query. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_query: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param force_cmk_for_query: Indicates whether customer managed storage is mandatory for query + management. + :type force_cmk_for_query: bool + :ivar private_link_scoped_resources: List of linked private link scope resources. + :vartype private_link_scoped_resources: + list[~operational_insights_management_client.models.PrivateLinkScopedResource] + :param features: Workspace features. + :type features: ~operational_insights_management_client.models.WorkspaceFeatures + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'customer_id': {'readonly': True}, + 'created_date': {'readonly': True}, + 'modified_date': {'readonly': True}, + 'private_link_scoped_resources': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'customer_id': {'key': 'properties.customerId', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'WorkspaceSku'}, + 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, + 'workspace_capping': {'key': 'properties.workspaceCapping', 'type': 'WorkspaceCapping'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'modified_date': {'key': 'properties.modifiedDate', 'type': 'str'}, + 'public_network_access_for_ingestion': {'key': 'properties.publicNetworkAccessForIngestion', 'type': 'str'}, + 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, + 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, + 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, + 'features': {'key': 'properties.features', 'type': 'WorkspaceFeatures'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspacePatch, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.customer_id = None + self.sku = kwargs.get('sku', None) + self.retention_in_days = kwargs.get('retention_in_days', None) + self.workspace_capping = kwargs.get('workspace_capping', None) + self.created_date = None + self.modified_date = None + self.public_network_access_for_ingestion = kwargs.get('public_network_access_for_ingestion', "Enabled") + self.public_network_access_for_query = kwargs.get('public_network_access_for_query', "Enabled") + self.force_cmk_for_query = kwargs.get('force_cmk_for_query', None) + self.private_link_scoped_resources = None + self.features = kwargs.get('features', None) + + +class WorkspacePurgeBody(msrest.serialization.Model): + """Describes the body of a purge request for an App Insights Workspace. + + All required parameters must be populated in order to send to Azure. + + :param table: Required. Table from which to purge data. + :type table: str + :param filters: Required. The set of columns and filters (queries) to run over them to purge + the resulting data. + :type filters: list[~operational_insights_management_client.models.WorkspacePurgeBodyFilters] + """ + + _validation = { + 'table': {'required': True}, + 'filters': {'required': True}, + } + + _attribute_map = { + 'table': {'key': 'table', 'type': 'str'}, + 'filters': {'key': 'filters', 'type': '[WorkspacePurgeBodyFilters]'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspacePurgeBody, self).__init__(**kwargs) + self.table = kwargs['table'] + self.filters = kwargs['filters'] + + +class WorkspacePurgeBodyFilters(msrest.serialization.Model): + """User-defined filters to return data which will be purged from the table. + + :param column: The column of the table over which the given query should run. + :type column: str + :param operator: A query operator to evaluate over the provided column and value(s). Supported + operators are ==, =~, in, in~, >, >=, <, <=, between, and have the same behavior as they would + in a KQL query. + :type operator: str + :param value: the value for the operator to function over. This can be a number (e.g., > 100), + a string (timestamp >= '2017-09-01') or array of values. + :type value: object + :param key: When filtering over custom dimensions, this key will be used as the name of the + custom dimension. + :type key: str + """ + + _attribute_map = { + 'column': {'key': 'column', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspacePurgeBodyFilters, self).__init__(**kwargs) + self.column = kwargs.get('column', None) + self.operator = kwargs.get('operator', None) + self.value = kwargs.get('value', None) + self.key = kwargs.get('key', None) + + +class WorkspacePurgeResponse(msrest.serialization.Model): + """Response containing operationId for a specific purge action. + + All required parameters must be populated in order to send to Azure. + + :param operation_id: Required. Id to use when querying for status for a particular purge + operation. + :type operation_id: str + """ + + _validation = { + 'operation_id': {'required': True}, + } + + _attribute_map = { + 'operation_id': {'key': 'operationId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspacePurgeResponse, self).__init__(**kwargs) + self.operation_id = kwargs['operation_id'] + + +class WorkspacePurgeStatusResponse(msrest.serialization.Model): + """Response containing status for a specific purge operation. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. Status of the operation represented by the requested Id. Possible + values include: "pending", "completed". + :type status: str or ~operational_insights_management_client.models.PurgeState + """ + + _validation = { + 'status': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspacePurgeStatusResponse, self).__init__(**kwargs) + self.status = kwargs['status'] + + +class WorkspaceSku(msrest.serialization.Model): + """The SKU (tier) of a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the SKU. Possible values include: "Free", "Standard", + "Premium", "PerNode", "PerGB2018", "Standalone", "CapacityReservation", "LACluster". + :type name: str or ~operational_insights_management_client.models.WorkspaceSkuNameEnum + :param capacity_reservation_level: The capacity reservation level for this workspace, when + CapacityReservation sku is selected. + :type capacity_reservation_level: int + :ivar last_sku_update: The last time when the sku was updated. + :vartype last_sku_update: str + """ + + _validation = { + 'name': {'required': True}, + 'last_sku_update': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity_reservation_level': {'key': 'capacityReservationLevel', 'type': 'int'}, + 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceSku, self).__init__(**kwargs) + self.name = kwargs['name'] + self.capacity_reservation_level = kwargs.get('capacity_reservation_level', None) + self.last_sku_update = None diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_models_py3.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_models_py3.py new file mode 100644 index 00000000000..8986ab3fb47 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_models_py3.py @@ -0,0 +1,2639 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._operational_insights_management_client_enums import * + + +class AssociatedWorkspace(msrest.serialization.Model): + """The list of Log Analytics workspaces associated with the cluster. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar workspace_id: The id of the assigned workspace. + :vartype workspace_id: str + :ivar workspace_name: The name id the assigned workspace. + :vartype workspace_name: str + :ivar resource_id: The ResourceId id the assigned workspace. + :vartype resource_id: str + :ivar associate_date: The time of workspace association. + :vartype associate_date: str + """ + + _validation = { + 'workspace_id': {'readonly': True}, + 'workspace_name': {'readonly': True}, + 'resource_id': {'readonly': True}, + 'associate_date': {'readonly': True}, + } + + _attribute_map = { + 'workspace_id': {'key': 'workspaceId', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'associate_date': {'key': 'associateDate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AssociatedWorkspace, self).__init__(**kwargs) + self.workspace_id = None + self.workspace_name = None + self.resource_id = None + self.associate_date = None + + +class AvailableServiceTier(msrest.serialization.Model): + """Service Tier details. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar service_tier: The name of the Service Tier. Possible values include: "Free", "Standard", + "Premium", "PerNode", "PerGB2018", "Standalone", "CapacityReservation". + :vartype service_tier: str or ~operational_insights_management_client.models.SkuNameEnum + :ivar enabled: True if the Service Tier is enabled for the workspace. + :vartype enabled: bool + :ivar minimum_retention: The minimum retention for the Service Tier, in days. + :vartype minimum_retention: long + :ivar maximum_retention: The maximum retention for the Service Tier, in days. + :vartype maximum_retention: long + :ivar default_retention: The default retention for the Service Tier, in days. + :vartype default_retention: long + :ivar capacity_reservation_level: The capacity reservation level in GB per day. Returned for + the Capacity Reservation Service Tier. + :vartype capacity_reservation_level: long + :ivar last_sku_update: Time when the sku was last updated for the workspace. Returned for the + Capacity Reservation Service Tier. + :vartype last_sku_update: str + """ + + _validation = { + 'service_tier': {'readonly': True}, + 'enabled': {'readonly': True}, + 'minimum_retention': {'readonly': True}, + 'maximum_retention': {'readonly': True}, + 'default_retention': {'readonly': True}, + 'capacity_reservation_level': {'readonly': True}, + 'last_sku_update': {'readonly': True}, + } + + _attribute_map = { + 'service_tier': {'key': 'serviceTier', 'type': 'str'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'minimum_retention': {'key': 'minimumRetention', 'type': 'long'}, + 'maximum_retention': {'key': 'maximumRetention', 'type': 'long'}, + 'default_retention': {'key': 'defaultRetention', 'type': 'long'}, + 'capacity_reservation_level': {'key': 'capacityReservationLevel', 'type': 'long'}, + 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AvailableServiceTier, self).__init__(**kwargs) + self.service_tier = None + self.enabled = None + self.minimum_retention = None + self.maximum_retention = None + self.default_retention = None + self.capacity_reservation_level = None + self.last_sku_update = None + + +class Resource(msrest.serialization.Model): + """Common fields that are returned in the response for all Azure Resource Manager resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class AzureEntityResource(Resource): + """The resource model definition for an Azure Resource Manager resource with an etag. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureEntityResource, self).__init__(**kwargs) + self.etag = None + + +class CapacityReservationProperties(msrest.serialization.Model): + """The Capacity Reservation properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar last_sku_update: The last time Sku was updated. + :vartype last_sku_update: str + :ivar min_capacity: Minimum CapacityReservation value in GB. + :vartype min_capacity: long + :ivar max_capacity: Maximum CapacityReservation value in GB. + :vartype max_capacity: long + """ + + _validation = { + 'last_sku_update': {'readonly': True}, + 'min_capacity': {'readonly': True}, + 'max_capacity': {'readonly': True}, + } + + _attribute_map = { + 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, + 'min_capacity': {'key': 'minCapacity', 'type': 'long'}, + 'max_capacity': {'key': 'maxCapacity', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(CapacityReservationProperties, self).__init__(**kwargs) + self.last_sku_update = None + self.min_capacity = None + self.max_capacity = None + + +class TrackedResource(Resource): + """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(TrackedResource, self).__init__(**kwargs) + self.tags = tags + self.location = location + + +class Cluster(TrackedResource): + """The top level Log Analytics cluster resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param identity: The identity of the resource. + :type identity: ~operational_insights_management_client.models.Identity + :param sku: The sku properties. + :type sku: ~operational_insights_management_client.models.ClusterSku + :ivar cluster_id: The ID associated with the cluster. + :vartype cluster_id: str + :ivar provisioning_state: The provisioning state of the cluster. Possible values include: + "Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating". + :vartype provisioning_state: str or + ~operational_insights_management_client.models.ClusterEntityStatus + :param is_double_encryption_enabled: Configures whether cluster will use double encryption. + This Property can not be modified after cluster creation. Default value is 'true'. + :type is_double_encryption_enabled: bool + :param is_availability_zones_enabled: Sets whether the cluster will support availability zones. + This can be set as true only in regions where Azure Data Explorer support Availability Zones. + This Property can not be modified after cluster creation. Default value is 'true' if region + supports Availability Zones. + :type is_availability_zones_enabled: bool + :param billing_type: The cluster's billing type. Possible values include: "Cluster", + "Workspaces". + :type billing_type: str or ~operational_insights_management_client.models.BillingType + :param key_vault_properties: The associated key properties. + :type key_vault_properties: ~operational_insights_management_client.models.KeyVaultProperties + :ivar last_modified_date: The last time the cluster was updated. + :vartype last_modified_date: str + :ivar created_date: The cluster creation time. + :vartype created_date: str + :param associated_workspaces: The list of Log Analytics workspaces associated with the cluster. + :type associated_workspaces: + list[~operational_insights_management_client.models.AssociatedWorkspace] + :param capacity_reservation_properties: Additional properties for capacity reservation. + :type capacity_reservation_properties: + ~operational_insights_management_client.models.CapacityReservationProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'cluster_id': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'last_modified_date': {'readonly': True}, + 'created_date': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'cluster_id': {'key': 'properties.clusterId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'is_double_encryption_enabled': {'key': 'properties.isDoubleEncryptionEnabled', 'type': 'bool'}, + 'is_availability_zones_enabled': {'key': 'properties.isAvailabilityZonesEnabled', 'type': 'bool'}, + 'billing_type': {'key': 'properties.billingType', 'type': 'str'}, + 'key_vault_properties': {'key': 'properties.keyVaultProperties', 'type': 'KeyVaultProperties'}, + 'last_modified_date': {'key': 'properties.lastModifiedDate', 'type': 'str'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'associated_workspaces': {'key': 'properties.associatedWorkspaces', 'type': '[AssociatedWorkspace]'}, + 'capacity_reservation_properties': {'key': 'properties.capacityReservationProperties', 'type': 'CapacityReservationProperties'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + identity: Optional["Identity"] = None, + sku: Optional["ClusterSku"] = None, + is_double_encryption_enabled: Optional[bool] = None, + is_availability_zones_enabled: Optional[bool] = None, + billing_type: Optional[Union[str, "BillingType"]] = None, + key_vault_properties: Optional["KeyVaultProperties"] = None, + associated_workspaces: Optional[List["AssociatedWorkspace"]] = None, + capacity_reservation_properties: Optional["CapacityReservationProperties"] = None, + **kwargs + ): + super(Cluster, self).__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.sku = sku + self.cluster_id = None + self.provisioning_state = None + self.is_double_encryption_enabled = is_double_encryption_enabled + self.is_availability_zones_enabled = is_availability_zones_enabled + self.billing_type = billing_type + self.key_vault_properties = key_vault_properties + self.last_modified_date = None + self.created_date = None + self.associated_workspaces = associated_workspaces + self.capacity_reservation_properties = capacity_reservation_properties + + +class ClusterListResult(msrest.serialization.Model): + """The list clusters operation response. + + :param next_link: The link used to get the next page of recommendations. + :type next_link: str + :param value: A list of Log Analytics clusters. + :type value: list[~operational_insights_management_client.models.Cluster] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[Cluster]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["Cluster"]] = None, + **kwargs + ): + super(ClusterListResult, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ClusterPatch(msrest.serialization.Model): + """The top level Log Analytics cluster resource container. + + :param identity: The identity of the resource. + :type identity: ~operational_insights_management_client.models.Identity + :param sku: The sku properties. + :type sku: ~operational_insights_management_client.models.ClusterSku + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param key_vault_properties: The associated key properties. + :type key_vault_properties: ~operational_insights_management_client.models.KeyVaultProperties + :param billing_type: The cluster's billing type. Possible values include: "Cluster", + "Workspaces". + :type billing_type: str or ~operational_insights_management_client.models.BillingType + """ + + _attribute_map = { + 'identity': {'key': 'identity', 'type': 'Identity'}, + 'sku': {'key': 'sku', 'type': 'ClusterSku'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'key_vault_properties': {'key': 'properties.keyVaultProperties', 'type': 'KeyVaultProperties'}, + 'billing_type': {'key': 'properties.billingType', 'type': 'str'}, + } + + def __init__( + self, + *, + identity: Optional["Identity"] = None, + sku: Optional["ClusterSku"] = None, + tags: Optional[Dict[str, str]] = None, + key_vault_properties: Optional["KeyVaultProperties"] = None, + billing_type: Optional[Union[str, "BillingType"]] = None, + **kwargs + ): + super(ClusterPatch, self).__init__(**kwargs) + self.identity = identity + self.sku = sku + self.tags = tags + self.key_vault_properties = key_vault_properties + self.billing_type = billing_type + + +class ClusterSku(msrest.serialization.Model): + """The cluster sku definition. + + :param capacity: The capacity value. + :type capacity: long + :param name: The name of the SKU. Possible values include: "CapacityReservation". + :type name: str or ~operational_insights_management_client.models.ClusterSkuNameEnum + """ + + _attribute_map = { + 'capacity': {'key': 'capacity', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + capacity: Optional[int] = None, + name: Optional[Union[str, "ClusterSkuNameEnum"]] = None, + **kwargs + ): + super(ClusterSku, self).__init__(**kwargs) + self.capacity = capacity + self.name = name + + +class CoreSummary(msrest.serialization.Model): + """The core summary of a search. + + All required parameters must be populated in order to send to Azure. + + :param status: The status of a core summary. + :type status: str + :param number_of_documents: Required. The number of documents of a core summary. + :type number_of_documents: long + """ + + _validation = { + 'number_of_documents': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'number_of_documents': {'key': 'numberOfDocuments', 'type': 'long'}, + } + + def __init__( + self, + *, + number_of_documents: int, + status: Optional[str] = None, + **kwargs + ): + super(CoreSummary, self).__init__(**kwargs) + self.status = status + self.number_of_documents = number_of_documents + + +class DataExport(Resource): + """The top level data export resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param data_export_id: The data export rule ID. + :type data_export_id: str + :param table_names: An array of tables to export, for example: [“Heartbeat, SecurityEvent”]. + :type table_names: list[str] + :param enable: Active when enabled. + :type enable: bool + :param created_date: The latest data export rule modification time. + :type created_date: str + :param last_modified_date: Date and time when the export was last modified. + :type last_modified_date: str + :param resource_id: The destination resource ID. This can be copied from the Properties entry + of the destination resource in Azure. + :type resource_id: str + :ivar type_properties_destination_type: The type of the destination resource. Possible values + include: "StorageAccount", "EventHub". + :vartype type_properties_destination_type: str or + ~operational_insights_management_client.models.Type + :param event_hub_name: Optional. Allows to define an Event Hub name. Not applicable when + destination is Storage Account. + :type event_hub_name: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'type_properties_destination_type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_export_id': {'key': 'properties.dataExportId', 'type': 'str'}, + 'table_names': {'key': 'properties.tableNames', 'type': '[str]'}, + 'enable': {'key': 'properties.enable', 'type': 'bool'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'last_modified_date': {'key': 'properties.lastModifiedDate', 'type': 'str'}, + 'resource_id': {'key': 'properties.destination.resourceId', 'type': 'str'}, + 'type_properties_destination_type': {'key': 'properties.destination.type', 'type': 'str'}, + 'event_hub_name': {'key': 'properties.destination.metaData.eventHubName', 'type': 'str'}, + } + + def __init__( + self, + *, + data_export_id: Optional[str] = None, + table_names: Optional[List[str]] = None, + enable: Optional[bool] = None, + created_date: Optional[str] = None, + last_modified_date: Optional[str] = None, + resource_id: Optional[str] = None, + event_hub_name: Optional[str] = None, + **kwargs + ): + super(DataExport, self).__init__(**kwargs) + self.data_export_id = data_export_id + self.table_names = table_names + self.enable = enable + self.created_date = created_date + self.last_modified_date = last_modified_date + self.resource_id = resource_id + self.type_properties_destination_type = None + self.event_hub_name = event_hub_name + + +class DataExportListResult(msrest.serialization.Model): + """Result of the request to list data exports. + + :param value: List of data export instances within a workspace.. + :type value: list[~operational_insights_management_client.models.DataExport] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataExport]'}, + } + + def __init__( + self, + *, + value: Optional[List["DataExport"]] = None, + **kwargs + ): + super(DataExportListResult, self).__init__(**kwargs) + self.value = value + + +class DataSource(Resource): + """Datasources under OMS Workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param properties: Required. The data source properties in raw json format, each kind of data + source have it's own schema. + :type properties: object + :param etag: The ETag of the data source. + :type etag: str + :param kind: Required. The kind of the DataSource. Possible values include: "WindowsEvent", + "WindowsPerformanceCounter", "IISLogs", "LinuxSyslog", "LinuxSyslogCollection", + "LinuxPerformanceObject", "LinuxPerformanceCollection", "CustomLog", "CustomLogCollection", + "AzureAuditLog", "AzureActivityLog", "GenericDataSource", "ChangeTrackingCustomPath", + "ChangeTrackingPath", "ChangeTrackingServices", "ChangeTrackingDataTypeConfiguration", + "ChangeTrackingDefaultRegistry", "ChangeTrackingRegistry", "ChangeTrackingLinuxPath", + "LinuxChangeTrackingPath", "ChangeTrackingContentLocation", "WindowsTelemetry", "Office365", + "SecurityWindowsBaselineConfiguration", "SecurityCenterSecurityWindowsBaselineConfiguration", + "SecurityEventCollectionConfiguration", "SecurityInsightsSecurityEventCollectionConfiguration", + "ImportComputerGroup", "NetworkMonitoring", "Itsm", "DnsAnalytics", "ApplicationInsights", + "SqlDataClassification". + :type kind: str or ~operational_insights_management_client.models.DataSourceKind + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + 'kind': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + } + + def __init__( + self, + *, + properties: object, + kind: Union[str, "DataSourceKind"], + etag: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(DataSource, self).__init__(**kwargs) + self.properties = properties + self.etag = etag + self.kind = kind + self.tags = tags + + +class DataSourceFilter(msrest.serialization.Model): + """DataSource filter. Right now, only filter by kind is supported. + + :param kind: The kind of the DataSource. Possible values include: "WindowsEvent", + "WindowsPerformanceCounter", "IISLogs", "LinuxSyslog", "LinuxSyslogCollection", + "LinuxPerformanceObject", "LinuxPerformanceCollection", "CustomLog", "CustomLogCollection", + "AzureAuditLog", "AzureActivityLog", "GenericDataSource", "ChangeTrackingCustomPath", + "ChangeTrackingPath", "ChangeTrackingServices", "ChangeTrackingDataTypeConfiguration", + "ChangeTrackingDefaultRegistry", "ChangeTrackingRegistry", "ChangeTrackingLinuxPath", + "LinuxChangeTrackingPath", "ChangeTrackingContentLocation", "WindowsTelemetry", "Office365", + "SecurityWindowsBaselineConfiguration", "SecurityCenterSecurityWindowsBaselineConfiguration", + "SecurityEventCollectionConfiguration", "SecurityInsightsSecurityEventCollectionConfiguration", + "ImportComputerGroup", "NetworkMonitoring", "Itsm", "DnsAnalytics", "ApplicationInsights", + "SqlDataClassification". + :type kind: str or ~operational_insights_management_client.models.DataSourceKind + """ + + _attribute_map = { + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__( + self, + *, + kind: Optional[Union[str, "DataSourceKind"]] = None, + **kwargs + ): + super(DataSourceFilter, self).__init__(**kwargs) + self.kind = kind + + +class DataSourceListResult(msrest.serialization.Model): + """The list data source by workspace operation response. + + :param value: A list of datasources. + :type value: list[~operational_insights_management_client.models.DataSource] + :param next_link: The link (url) to the next page of datasources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DataSource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["DataSource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(DataSourceListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class ErrorAdditionalInfo(msrest.serialization.Model): + """The resource management error additional info. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: object + """ + + _validation = { + 'type': {'readonly': True}, + 'info': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'info': {'key': 'info', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorAdditionalInfo, self).__init__(**kwargs) + self.type = None + self.info = None + + +class ErrorDetail(msrest.serialization.Model): + """The error detail. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~operational_insights_management_client.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: + list[~operational_insights_management_client.models.ErrorAdditionalInfo] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'target': {'readonly': True}, + 'details': {'readonly': True}, + 'additional_info': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = None + self.message = None + self.target = None + self.details = None + self.additional_info = None + + +class ErrorResponse(msrest.serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). + + :param error: The error object. + :type error: ~operational_insights_management_client.models.ErrorDetail + """ + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorDetail'}, + } + + def __init__( + self, + *, + error: Optional["ErrorDetail"] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.error = error + + +class Identity(msrest.serialization.Model): + """Identity for the resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type: Required. Type of managed service identity. Possible values include: + "SystemAssigned", "UserAssigned", "None". + :type type: str or ~operational_insights_management_client.models.IdentityType + :param user_assigned_identities: The list of user identities associated with the resource. The + user identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~operational_insights_management_client.models.UserIdentityProperties] + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserIdentityProperties}'}, + } + + def __init__( + self, + *, + type: Union[str, "IdentityType"], + user_assigned_identities: Optional[Dict[str, "UserIdentityProperties"]] = None, + **kwargs + ): + super(Identity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type + self.user_assigned_identities = user_assigned_identities + + +class IntelligencePack(msrest.serialization.Model): + """Intelligence Pack containing a string name and boolean indicating if it's enabled. + + :param name: The name of the intelligence pack. + :type name: str + :param enabled: The enabled boolean for the intelligence pack. + :type enabled: bool + :param display_name: The display name of the intelligence pack. + :type display_name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'enabled': {'key': 'enabled', 'type': 'bool'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + enabled: Optional[bool] = None, + display_name: Optional[str] = None, + **kwargs + ): + super(IntelligencePack, self).__init__(**kwargs) + self.name = name + self.enabled = enabled + self.display_name = display_name + + +class KeyVaultProperties(msrest.serialization.Model): + """The key vault properties. + + :param key_vault_uri: The Key Vault uri which holds they key associated with the Log Analytics + cluster. + :type key_vault_uri: str + :param key_name: The name of the key associated with the Log Analytics cluster. + :type key_name: str + :param key_version: The version of the key associated with the Log Analytics cluster. + :type key_version: str + :param key_rsa_size: Selected key minimum required size. + :type key_rsa_size: int + """ + + _attribute_map = { + 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'key_rsa_size': {'key': 'keyRsaSize', 'type': 'int'}, + } + + def __init__( + self, + *, + key_vault_uri: Optional[str] = None, + key_name: Optional[str] = None, + key_version: Optional[str] = None, + key_rsa_size: Optional[int] = None, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_uri = key_vault_uri + self.key_name = key_name + self.key_version = key_version + self.key_rsa_size = key_rsa_size + + +class LinkedService(Resource): + """The top level Linked service resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param resource_id: The resource id of the resource that will be linked to the workspace. This + should be used for linking resources which require read access. + :type resource_id: str + :param write_access_resource_id: The resource id of the resource that will be linked to the + workspace. This should be used for linking resources which require write access. + :type write_access_resource_id: str + :param provisioning_state: The provisioning state of the linked service. Possible values + include: "Succeeded", "Deleting", "ProvisioningAccount", "Updating". + :type provisioning_state: str or + ~operational_insights_management_client.models.LinkedServiceEntityStatus + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'resource_id': {'key': 'properties.resourceId', 'type': 'str'}, + 'write_access_resource_id': {'key': 'properties.writeAccessResourceId', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + resource_id: Optional[str] = None, + write_access_resource_id: Optional[str] = None, + provisioning_state: Optional[Union[str, "LinkedServiceEntityStatus"]] = None, + **kwargs + ): + super(LinkedService, self).__init__(**kwargs) + self.tags = tags + self.resource_id = resource_id + self.write_access_resource_id = write_access_resource_id + self.provisioning_state = provisioning_state + + +class LinkedServiceListResult(msrest.serialization.Model): + """The list linked service operation response. + + :param value: The list of linked service instances. + :type value: list[~operational_insights_management_client.models.LinkedService] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedService]'}, + } + + def __init__( + self, + *, + value: Optional[List["LinkedService"]] = None, + **kwargs + ): + super(LinkedServiceListResult, self).__init__(**kwargs) + self.value = value + + +class LinkedStorageAccountsListResult(msrest.serialization.Model): + """The list linked storage accounts service operation response. + + :param value: A list of linked storage accounts instances. + :type value: list[~operational_insights_management_client.models.LinkedStorageAccountsResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[LinkedStorageAccountsResource]'}, + } + + def __init__( + self, + *, + value: Optional[List["LinkedStorageAccountsResource"]] = None, + **kwargs + ): + super(LinkedStorageAccountsListResult, self).__init__(**kwargs) + self.value = value + + +class LinkedStorageAccountsResource(Resource): + """Linked storage accounts top level resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar data_source_type: Linked storage accounts type. Possible values include: "CustomLogs", + "AzureWatson", "Query", "Alerts". + :vartype data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :param storage_account_ids: Linked storage accounts resources ids. + :type storage_account_ids: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'data_source_type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_source_type': {'key': 'properties.dataSourceType', 'type': 'str'}, + 'storage_account_ids': {'key': 'properties.storageAccountIds', 'type': '[str]'}, + } + + def __init__( + self, + *, + storage_account_ids: Optional[List[str]] = None, + **kwargs + ): + super(LinkedStorageAccountsResource, self).__init__(**kwargs) + self.data_source_type = None + self.storage_account_ids = storage_account_ids + + +class ManagementGroup(msrest.serialization.Model): + """A management group that is connected to a workspace. + + :param server_count: The number of servers connected to the management group. + :type server_count: int + :param is_gateway: Gets or sets a value indicating whether the management group is a gateway. + :type is_gateway: bool + :param name: The name of the management group. + :type name: str + :param id: The unique ID of the management group. + :type id: str + :param created: The datetime that the management group was created. + :type created: ~datetime.datetime + :param data_received: The last datetime that the management group received data. + :type data_received: ~datetime.datetime + :param version: The version of System Center that is managing the management group. + :type version: str + :param sku: The SKU of System Center that is managing the management group. + :type sku: str + """ + + _attribute_map = { + 'server_count': {'key': 'properties.serverCount', 'type': 'int'}, + 'is_gateway': {'key': 'properties.isGateway', 'type': 'bool'}, + 'name': {'key': 'properties.name', 'type': 'str'}, + 'id': {'key': 'properties.id', 'type': 'str'}, + 'created': {'key': 'properties.created', 'type': 'iso-8601'}, + 'data_received': {'key': 'properties.dataReceived', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'str'}, + } + + def __init__( + self, + *, + server_count: Optional[int] = None, + is_gateway: Optional[bool] = None, + name: Optional[str] = None, + id: Optional[str] = None, + created: Optional[datetime.datetime] = None, + data_received: Optional[datetime.datetime] = None, + version: Optional[str] = None, + sku: Optional[str] = None, + **kwargs + ): + super(ManagementGroup, self).__init__(**kwargs) + self.server_count = server_count + self.is_gateway = is_gateway + self.name = name + self.id = id + self.created = created + self.data_received = data_received + self.version = version + self.sku = sku + + +class MetricName(msrest.serialization.Model): + """The name of a metric. + + :param value: The system name of the metric. + :type value: str + :param localized_value: The localized name of the metric. + :type localized_value: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[str] = None, + localized_value: Optional[str] = None, + **kwargs + ): + super(MetricName, self).__init__(**kwargs) + self.value = value + self.localized_value = localized_value + + +class Operation(msrest.serialization.Model): + """Supported operation of OperationalInsights resource provider. + + :param name: Operation name: {provider}/{resource}/{operation}. + :type name: str + :param display: Display metadata associated with the operation. + :type display: ~operational_insights_management_client.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display: Optional["OperationDisplay"] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = name + self.display = display + + +class OperationDisplay(msrest.serialization.Model): + """Display metadata associated with the operation. + + :param provider: Service provider: Microsoft OperationsManagement. + :type provider: str + :param resource: Resource on which the operation is performed etc. + :type resource: str + :param operation: Type of operation: get, read, delete, etc. + :type operation: str + :param description: Description of operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationListResult(msrest.serialization.Model): + """Result of the request to list solution operations. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param value: List of solution operations supported by the OperationsManagement resource + provider. + :type value: list[~operational_insights_management_client.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str + """ + + _validation = { + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["Operation"]] = None, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = value + self.next_link = None + + +class OperationStatus(msrest.serialization.Model): + """The status of operation. + + :param id: The operation Id. + :type id: str + :param name: The operation name. + :type name: str + :param start_time: The start time of the operation. + :type start_time: str + :param end_time: The end time of the operation. + :type end_time: str + :param status: The status of the operation. + :type status: str + :param error: The error detail of the operation if any. + :type error: ~operational_insights_management_client.models.ErrorResponse + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'str'}, + 'end_time': {'key': 'endTime', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + name: Optional[str] = None, + start_time: Optional[str] = None, + end_time: Optional[str] = None, + status: Optional[str] = None, + error: Optional["ErrorResponse"] = None, + **kwargs + ): + super(OperationStatus, self).__init__(**kwargs) + self.id = id + self.name = name + self.start_time = start_time + self.end_time = end_time + self.status = status + self.error = error + + +class PrivateLinkScopedResource(msrest.serialization.Model): + """The private link scope resource reference. + + :param resource_id: The full resource Id of the private link scope resource. + :type resource_id: str + :param scope_id: The private link scope unique Identifier. + :type scope_id: str + """ + + _attribute_map = { + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'scope_id': {'key': 'scopeId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: Optional[str] = None, + scope_id: Optional[str] = None, + **kwargs + ): + super(PrivateLinkScopedResource, self).__init__(**kwargs) + self.resource_id = resource_id + self.scope_id = scope_id + + +class ProxyResource(Resource): + """The resource model definition for a Azure Resource Manager proxy resource. It will not have tags and a location. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ProxyResource, self).__init__(**kwargs) + + +class SavedSearch(Resource): + """Value object for saved search results. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param etag: The ETag of the saved search. To override an existing saved search, use "*" or + specify the current Etag. + :type etag: str + :param category: Required. The category of the saved search. This helps the user to find a + saved search faster. + :type category: str + :param display_name: Required. Saved search display name. + :type display_name: str + :param query: Required. The query expression for the saved search. + :type query: str + :param function_alias: The function alias if query serves as a function. + :type function_alias: str + :param function_parameters: The optional function parameters if query serves as a function. + Value should be in the following format: 'param-name1:type1 = default_value1, param-name2:type2 + = default_value2'. For more examples and proper syntax please refer to + https://docs.microsoft.com/en-us/azure/kusto/query/functions/user-defined-functions. + :type function_parameters: str + :param version: The version number of the query language. The current version is 2 and is the + default. + :type version: long + :param tags: A set of tags. The tags attached to the saved search. + :type tags: list[~operational_insights_management_client.models.Tag] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'category': {'required': True}, + 'display_name': {'required': True}, + 'query': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'display_name': {'key': 'properties.displayName', 'type': 'str'}, + 'query': {'key': 'properties.query', 'type': 'str'}, + 'function_alias': {'key': 'properties.functionAlias', 'type': 'str'}, + 'function_parameters': {'key': 'properties.functionParameters', 'type': 'str'}, + 'version': {'key': 'properties.version', 'type': 'long'}, + 'tags': {'key': 'properties.tags', 'type': '[Tag]'}, + } + + def __init__( + self, + *, + category: str, + display_name: str, + query: str, + etag: Optional[str] = None, + function_alias: Optional[str] = None, + function_parameters: Optional[str] = None, + version: Optional[int] = None, + tags: Optional[List["Tag"]] = None, + **kwargs + ): + super(SavedSearch, self).__init__(**kwargs) + self.etag = etag + self.category = category + self.display_name = display_name + self.query = query + self.function_alias = function_alias + self.function_parameters = function_parameters + self.version = version + self.tags = tags + + +class SavedSearchesListResult(msrest.serialization.Model): + """The saved search list operation response. + + :param value: The array of result values. + :type value: list[~operational_insights_management_client.models.SavedSearch] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SavedSearch]'}, + } + + def __init__( + self, + *, + value: Optional[List["SavedSearch"]] = None, + **kwargs + ): + super(SavedSearchesListResult, self).__init__(**kwargs) + self.value = value + + +class SearchGetSchemaResponse(msrest.serialization.Model): + """The get schema operation response. + + :param metadata: The metadata from search results. + :type metadata: ~operational_insights_management_client.models.SearchMetadata + :param value: The array of result values. + :type value: list[~operational_insights_management_client.models.SearchSchemaValue] + """ + + _attribute_map = { + 'metadata': {'key': 'metadata', 'type': 'SearchMetadata'}, + 'value': {'key': 'value', 'type': '[SearchSchemaValue]'}, + } + + def __init__( + self, + *, + metadata: Optional["SearchMetadata"] = None, + value: Optional[List["SearchSchemaValue"]] = None, + **kwargs + ): + super(SearchGetSchemaResponse, self).__init__(**kwargs) + self.metadata = metadata + self.value = value + + +class SearchMetadata(msrest.serialization.Model): + """Metadata for search results. + + :param search_id: The request id of the search. + :type search_id: str + :param result_type: The search result type. + :type result_type: str + :param total: The total number of search results. + :type total: long + :param top: The number of top search results. + :type top: long + :param id: The id of the search results request. + :type id: str + :param core_summaries: The core summaries. + :type core_summaries: list[~operational_insights_management_client.models.CoreSummary] + :param status: The status of the search results. + :type status: str + :param start_time: The start time for the search. + :type start_time: ~datetime.datetime + :param last_updated: The time of last update. + :type last_updated: ~datetime.datetime + :param e_tag: The ETag of the search results. + :type e_tag: str + :param sort: How the results are sorted. + :type sort: list[~operational_insights_management_client.models.SearchSort] + :param request_time: The request time. + :type request_time: long + :param aggregated_value_field: The aggregated value field. + :type aggregated_value_field: str + :param aggregated_grouping_fields: The aggregated grouping fields. + :type aggregated_grouping_fields: str + :param sum: The sum of all aggregates returned in the result set. + :type sum: long + :param max: The max of all aggregates returned in the result set. + :type max: long + :param schema: The schema. + :type schema: ~operational_insights_management_client.models.SearchMetadataSchema + """ + + _attribute_map = { + 'search_id': {'key': 'requestId', 'type': 'str'}, + 'result_type': {'key': 'resultType', 'type': 'str'}, + 'total': {'key': 'total', 'type': 'long'}, + 'top': {'key': 'top', 'type': 'long'}, + 'id': {'key': 'id', 'type': 'str'}, + 'core_summaries': {'key': 'coreSummaries', 'type': '[CoreSummary]'}, + 'status': {'key': 'status', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'last_updated': {'key': 'lastUpdated', 'type': 'iso-8601'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'sort': {'key': 'sort', 'type': '[SearchSort]'}, + 'request_time': {'key': 'requestTime', 'type': 'long'}, + 'aggregated_value_field': {'key': 'aggregatedValueField', 'type': 'str'}, + 'aggregated_grouping_fields': {'key': 'aggregatedGroupingFields', 'type': 'str'}, + 'sum': {'key': 'sum', 'type': 'long'}, + 'max': {'key': 'max', 'type': 'long'}, + 'schema': {'key': 'schema', 'type': 'SearchMetadataSchema'}, + } + + def __init__( + self, + *, + search_id: Optional[str] = None, + result_type: Optional[str] = None, + total: Optional[int] = None, + top: Optional[int] = None, + id: Optional[str] = None, + core_summaries: Optional[List["CoreSummary"]] = None, + status: Optional[str] = None, + start_time: Optional[datetime.datetime] = None, + last_updated: Optional[datetime.datetime] = None, + e_tag: Optional[str] = None, + sort: Optional[List["SearchSort"]] = None, + request_time: Optional[int] = None, + aggregated_value_field: Optional[str] = None, + aggregated_grouping_fields: Optional[str] = None, + sum: Optional[int] = None, + max: Optional[int] = None, + schema: Optional["SearchMetadataSchema"] = None, + **kwargs + ): + super(SearchMetadata, self).__init__(**kwargs) + self.search_id = search_id + self.result_type = result_type + self.total = total + self.top = top + self.id = id + self.core_summaries = core_summaries + self.status = status + self.start_time = start_time + self.last_updated = last_updated + self.e_tag = e_tag + self.sort = sort + self.request_time = request_time + self.aggregated_value_field = aggregated_value_field + self.aggregated_grouping_fields = aggregated_grouping_fields + self.sum = sum + self.max = max + self.schema = schema + + +class SearchMetadataSchema(msrest.serialization.Model): + """Schema metadata for search. + + :param name: The name of the metadata schema. + :type name: str + :param version: The version of the metadata schema. + :type version: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + version: Optional[int] = None, + **kwargs + ): + super(SearchMetadataSchema, self).__init__(**kwargs) + self.name = name + self.version = version + + +class SearchSchemaValue(msrest.serialization.Model): + """Value object for schema results. + + All required parameters must be populated in order to send to Azure. + + :param name: The name of the schema. + :type name: str + :param display_name: The display name of the schema. + :type display_name: str + :param type: The type. + :type type: str + :param indexed: Required. The boolean that indicates the field is searchable as free text. + :type indexed: bool + :param stored: Required. The boolean that indicates whether or not the field is stored. + :type stored: bool + :param facet: Required. The boolean that indicates whether or not the field is a facet. + :type facet: bool + :param owner_type: The array of workflows containing the field. + :type owner_type: list[str] + """ + + _validation = { + 'indexed': {'required': True}, + 'stored': {'required': True}, + 'facet': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'indexed': {'key': 'indexed', 'type': 'bool'}, + 'stored': {'key': 'stored', 'type': 'bool'}, + 'facet': {'key': 'facet', 'type': 'bool'}, + 'owner_type': {'key': 'ownerType', 'type': '[str]'}, + } + + def __init__( + self, + *, + indexed: bool, + stored: bool, + facet: bool, + name: Optional[str] = None, + display_name: Optional[str] = None, + type: Optional[str] = None, + owner_type: Optional[List[str]] = None, + **kwargs + ): + super(SearchSchemaValue, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.type = type + self.indexed = indexed + self.stored = stored + self.facet = facet + self.owner_type = owner_type + + +class SearchSort(msrest.serialization.Model): + """The sort parameters for search. + + :param name: The name of the field the search query is sorted on. + :type name: str + :param order: The sort order of the search. Possible values include: "asc", "desc". + :type order: str or ~operational_insights_management_client.models.SearchSortEnum + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'order': {'key': 'order', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + order: Optional[Union[str, "SearchSortEnum"]] = None, + **kwargs + ): + super(SearchSort, self).__init__(**kwargs) + self.name = name + self.order = order + + +class SharedKeys(msrest.serialization.Model): + """The shared keys for a workspace. + + :param primary_shared_key: The primary shared key of a workspace. + :type primary_shared_key: str + :param secondary_shared_key: The secondary shared key of a workspace. + :type secondary_shared_key: str + """ + + _attribute_map = { + 'primary_shared_key': {'key': 'primarySharedKey', 'type': 'str'}, + 'secondary_shared_key': {'key': 'secondarySharedKey', 'type': 'str'}, + } + + def __init__( + self, + *, + primary_shared_key: Optional[str] = None, + secondary_shared_key: Optional[str] = None, + **kwargs + ): + super(SharedKeys, self).__init__(**kwargs) + self.primary_shared_key = primary_shared_key + self.secondary_shared_key = secondary_shared_key + + +class StorageAccount(msrest.serialization.Model): + """Describes a storage account connection. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. The Azure Resource Manager ID of the storage account resource. + :type id: str + :param key: Required. The storage account key. + :type key: str + """ + + _validation = { + 'id': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + *, + id: str, + key: str, + **kwargs + ): + super(StorageAccount, self).__init__(**kwargs) + self.id = id + self.key = key + + +class StorageInsight(Resource): + """The top level storage insight resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param e_tag: The ETag of the storage insight. + :type e_tag: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param containers: The names of the blob containers that the workspace should read. + :type containers: list[str] + :param tables: The names of the Azure tables that the workspace should read. + :type tables: list[str] + :param storage_account: The storage account connection details. + :type storage_account: ~operational_insights_management_client.models.StorageAccount + :ivar status: The status of the storage insight. + :vartype status: ~operational_insights_management_client.models.StorageInsightStatus + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'containers': {'key': 'properties.containers', 'type': '[str]'}, + 'tables': {'key': 'properties.tables', 'type': '[str]'}, + 'storage_account': {'key': 'properties.storageAccount', 'type': 'StorageAccount'}, + 'status': {'key': 'properties.status', 'type': 'StorageInsightStatus'}, + } + + def __init__( + self, + *, + e_tag: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + containers: Optional[List[str]] = None, + tables: Optional[List[str]] = None, + storage_account: Optional["StorageAccount"] = None, + **kwargs + ): + super(StorageInsight, self).__init__(**kwargs) + self.e_tag = e_tag + self.tags = tags + self.containers = containers + self.tables = tables + self.storage_account = storage_account + self.status = None + + +class StorageInsightListResult(msrest.serialization.Model): + """The list storage insights operation response. + + :param value: A list of storage insight items. + :type value: list[~operational_insights_management_client.models.StorageInsight] + :param odata_next_link: The link (url) to the next page of results. + :type odata_next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[StorageInsight]'}, + 'odata_next_link': {'key': '@odata\\.nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["StorageInsight"]] = None, + odata_next_link: Optional[str] = None, + **kwargs + ): + super(StorageInsightListResult, self).__init__(**kwargs) + self.value = value + self.odata_next_link = odata_next_link + + +class StorageInsightStatus(msrest.serialization.Model): + """The status of the storage insight. + + All required parameters must be populated in order to send to Azure. + + :param state: Required. The state of the storage insight connection to the workspace. Possible + values include: "OK", "ERROR". + :type state: str or ~operational_insights_management_client.models.StorageInsightState + :param description: Description of the state of the storage insight. + :type description: str + """ + + _validation = { + 'state': {'required': True}, + } + + _attribute_map = { + 'state': {'key': 'state', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + state: Union[str, "StorageInsightState"], + description: Optional[str] = None, + **kwargs + ): + super(StorageInsightStatus, self).__init__(**kwargs) + self.state = state + self.description = description + + +class Table(Resource): + """Workspace data table definition. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param retention_in_days: The data table data retention in days, between 30 and 730. Setting + this property to null will default to the workspace retention. + :type retention_in_days: int + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'retention_in_days': {'maximum': 730, 'minimum': 30}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, + } + + def __init__( + self, + *, + retention_in_days: Optional[int] = None, + **kwargs + ): + super(Table, self).__init__(**kwargs) + self.retention_in_days = retention_in_days + + +class TablesListResult(msrest.serialization.Model): + """The list tables operation response. + + :param value: A list of data tables. + :type value: list[~operational_insights_management_client.models.Table] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Table]'}, + } + + def __init__( + self, + *, + value: Optional[List["Table"]] = None, + **kwargs + ): + super(TablesListResult, self).__init__(**kwargs) + self.value = value + + +class Tag(msrest.serialization.Model): + """A tag of a saved search. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The tag name. + :type name: str + :param value: Required. The tag value. + :type value: str + """ + + _validation = { + 'name': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + value: str, + **kwargs + ): + super(Tag, self).__init__(**kwargs) + self.name = name + self.value = value + + +class UsageMetric(msrest.serialization.Model): + """A metric describing the usage of a resource. + + :param name: The name of the metric. + :type name: ~operational_insights_management_client.models.MetricName + :param unit: The units used for the metric. + :type unit: str + :param current_value: The current value of the metric. + :type current_value: float + :param limit: The quota limit for the metric. + :type limit: float + :param next_reset_time: The time that the metric's value will reset. + :type next_reset_time: ~datetime.datetime + :param quota_period: The quota period that determines the length of time between value resets. + :type quota_period: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'MetricName'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'current_value': {'key': 'currentValue', 'type': 'float'}, + 'limit': {'key': 'limit', 'type': 'float'}, + 'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'}, + 'quota_period': {'key': 'quotaPeriod', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional["MetricName"] = None, + unit: Optional[str] = None, + current_value: Optional[float] = None, + limit: Optional[float] = None, + next_reset_time: Optional[datetime.datetime] = None, + quota_period: Optional[str] = None, + **kwargs + ): + super(UsageMetric, self).__init__(**kwargs) + self.name = name + self.unit = unit + self.current_value = current_value + self.limit = limit + self.next_reset_time = next_reset_time + self.quota_period = quota_period + + +class UserIdentityProperties(msrest.serialization.Model): + """User assigned identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserIdentityProperties, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class Workspace(TrackedResource): + """The top level Workspace resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :param tags: A set of tags. Resource tags. + :type tags: dict[str, str] + :param location: Required. The geo-location where the resource lives. + :type location: str + :param e_tag: The ETag of the workspace. + :type e_tag: str + :param provisioning_state: The provisioning state of the workspace. Possible values include: + "Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating". + :type provisioning_state: str or + ~operational_insights_management_client.models.WorkspaceEntityStatus + :ivar customer_id: This is a read-only property. Represents the ID associated with the + workspace. + :vartype customer_id: str + :param sku: The SKU of the workspace. + :type sku: ~operational_insights_management_client.models.WorkspaceSku + :param retention_in_days: The workspace data retention in days. Allowed values are per pricing + plan. See pricing tiers documentation for details. + :type retention_in_days: int + :param workspace_capping: The daily volume cap for ingestion. + :type workspace_capping: ~operational_insights_management_client.models.WorkspaceCapping + :ivar created_date: Workspace creation date. + :vartype created_date: str + :ivar modified_date: Workspace modification date. + :vartype modified_date: str + :param public_network_access_for_ingestion: The network access type for accessing Log Analytics + ingestion. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_ingestion: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param public_network_access_for_query: The network access type for accessing Log Analytics + query. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_query: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param force_cmk_for_query: Indicates whether customer managed storage is mandatory for query + management. + :type force_cmk_for_query: bool + :ivar private_link_scoped_resources: List of linked private link scope resources. + :vartype private_link_scoped_resources: + list[~operational_insights_management_client.models.PrivateLinkScopedResource] + :param features: Workspace features. + :type features: ~operational_insights_management_client.models.WorkspaceFeatures + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'location': {'required': True}, + 'customer_id': {'readonly': True}, + 'created_date': {'readonly': True}, + 'modified_date': {'readonly': True}, + 'private_link_scoped_resources': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'customer_id': {'key': 'properties.customerId', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'WorkspaceSku'}, + 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, + 'workspace_capping': {'key': 'properties.workspaceCapping', 'type': 'WorkspaceCapping'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'modified_date': {'key': 'properties.modifiedDate', 'type': 'str'}, + 'public_network_access_for_ingestion': {'key': 'properties.publicNetworkAccessForIngestion', 'type': 'str'}, + 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, + 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, + 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, + 'features': {'key': 'properties.features', 'type': 'WorkspaceFeatures'}, + } + + def __init__( + self, + *, + location: str, + tags: Optional[Dict[str, str]] = None, + e_tag: Optional[str] = None, + provisioning_state: Optional[Union[str, "WorkspaceEntityStatus"]] = None, + sku: Optional["WorkspaceSku"] = None, + retention_in_days: Optional[int] = None, + workspace_capping: Optional["WorkspaceCapping"] = None, + public_network_access_for_ingestion: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", + public_network_access_for_query: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", + force_cmk_for_query: Optional[bool] = None, + features: Optional["WorkspaceFeatures"] = None, + **kwargs + ): + super(Workspace, self).__init__(tags=tags, location=location, **kwargs) + self.e_tag = e_tag + self.provisioning_state = provisioning_state + self.customer_id = None + self.sku = sku + self.retention_in_days = retention_in_days + self.workspace_capping = workspace_capping + self.created_date = None + self.modified_date = None + self.public_network_access_for_ingestion = public_network_access_for_ingestion + self.public_network_access_for_query = public_network_access_for_query + self.force_cmk_for_query = force_cmk_for_query + self.private_link_scoped_resources = None + self.features = features + + +class WorkspaceCapping(msrest.serialization.Model): + """The daily volume cap for ingestion. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param daily_quota_gb: The workspace daily quota for ingestion. + :type daily_quota_gb: float + :ivar quota_next_reset_time: The time when the quota will be rest. + :vartype quota_next_reset_time: str + :ivar data_ingestion_status: The status of data ingestion for this workspace. Possible values + include: "RespectQuota", "ForceOn", "ForceOff", "OverQuota", "SubscriptionSuspended", + "ApproachingQuota". + :vartype data_ingestion_status: str or + ~operational_insights_management_client.models.DataIngestionStatus + """ + + _validation = { + 'quota_next_reset_time': {'readonly': True}, + 'data_ingestion_status': {'readonly': True}, + } + + _attribute_map = { + 'daily_quota_gb': {'key': 'dailyQuotaGb', 'type': 'float'}, + 'quota_next_reset_time': {'key': 'quotaNextResetTime', 'type': 'str'}, + 'data_ingestion_status': {'key': 'dataIngestionStatus', 'type': 'str'}, + } + + def __init__( + self, + *, + daily_quota_gb: Optional[float] = None, + **kwargs + ): + super(WorkspaceCapping, self).__init__(**kwargs) + self.daily_quota_gb = daily_quota_gb + self.quota_next_reset_time = None + self.data_ingestion_status = None + + +class WorkspaceFeatures(msrest.serialization.Model): + """Workspace features. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param enable_data_export: Flag that indicate if data should be exported. + :type enable_data_export: bool + :param immediate_purge_data_on30_days: Flag that describes if we want to remove the data after + 30 days. + :type immediate_purge_data_on30_days: bool + :param enable_log_access_using_only_resource_permissions: Flag that indicate which permission + to use - resource or workspace or both. + :type enable_log_access_using_only_resource_permissions: bool + :param cluster_resource_id: Dedicated LA cluster resourceId that is linked to the workspaces. + :type cluster_resource_id: str + :param disable_local_auth: Disable Non-AAD based Auth. + :type disable_local_auth: bool + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'enable_data_export': {'key': 'enableDataExport', 'type': 'bool'}, + 'immediate_purge_data_on30_days': {'key': 'immediatePurgeDataOn30Days', 'type': 'bool'}, + 'enable_log_access_using_only_resource_permissions': {'key': 'enableLogAccessUsingOnlyResourcePermissions', 'type': 'bool'}, + 'cluster_resource_id': {'key': 'clusterResourceId', 'type': 'str'}, + 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + enable_data_export: Optional[bool] = None, + immediate_purge_data_on30_days: Optional[bool] = None, + enable_log_access_using_only_resource_permissions: Optional[bool] = None, + cluster_resource_id: Optional[str] = None, + disable_local_auth: Optional[bool] = None, + **kwargs + ): + super(WorkspaceFeatures, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.enable_data_export = enable_data_export + self.immediate_purge_data_on30_days = immediate_purge_data_on30_days + self.enable_log_access_using_only_resource_permissions = enable_log_access_using_only_resource_permissions + self.cluster_resource_id = cluster_resource_id + self.disable_local_auth = disable_local_auth + + +class WorkspaceListManagementGroupsResult(msrest.serialization.Model): + """The list workspace management groups operation response. + + :param value: Gets or sets a list of management groups attached to the workspace. + :type value: list[~operational_insights_management_client.models.ManagementGroup] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ManagementGroup]'}, + } + + def __init__( + self, + *, + value: Optional[List["ManagementGroup"]] = None, + **kwargs + ): + super(WorkspaceListManagementGroupsResult, self).__init__(**kwargs) + self.value = value + + +class WorkspaceListResult(msrest.serialization.Model): + """The list workspaces operation response. + + :param value: A list of workspaces. + :type value: list[~operational_insights_management_client.models.Workspace] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Workspace]'}, + } + + def __init__( + self, + *, + value: Optional[List["Workspace"]] = None, + **kwargs + ): + super(WorkspaceListResult, self).__init__(**kwargs) + self.value = value + + +class WorkspaceListUsagesResult(msrest.serialization.Model): + """The list workspace usages operation response. + + :param value: Gets or sets a list of usage metrics for a workspace. + :type value: list[~operational_insights_management_client.models.UsageMetric] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[UsageMetric]'}, + } + + def __init__( + self, + *, + value: Optional[List["UsageMetric"]] = None, + **kwargs + ): + super(WorkspaceListUsagesResult, self).__init__(**kwargs) + self.value = value + + +class WorkspacePatch(AzureEntityResource): + """The top level Workspace resource container. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar etag: Resource Etag. + :vartype etag: str + :param tags: A set of tags. Resource tags. Optional. + :type tags: dict[str, str] + :param provisioning_state: The provisioning state of the workspace. Possible values include: + "Creating", "Succeeded", "Failed", "Canceled", "Deleting", "ProvisioningAccount", "Updating". + :type provisioning_state: str or + ~operational_insights_management_client.models.WorkspaceEntityStatus + :ivar customer_id: This is a read-only property. Represents the ID associated with the + workspace. + :vartype customer_id: str + :param sku: The SKU of the workspace. + :type sku: ~operational_insights_management_client.models.WorkspaceSku + :param retention_in_days: The workspace data retention in days. Allowed values are per pricing + plan. See pricing tiers documentation for details. + :type retention_in_days: int + :param workspace_capping: The daily volume cap for ingestion. + :type workspace_capping: ~operational_insights_management_client.models.WorkspaceCapping + :ivar created_date: Workspace creation date. + :vartype created_date: str + :ivar modified_date: Workspace modification date. + :vartype modified_date: str + :param public_network_access_for_ingestion: The network access type for accessing Log Analytics + ingestion. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_ingestion: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param public_network_access_for_query: The network access type for accessing Log Analytics + query. Possible values include: "Enabled", "Disabled". Default value: "Enabled". + :type public_network_access_for_query: str or + ~operational_insights_management_client.models.PublicNetworkAccessType + :param force_cmk_for_query: Indicates whether customer managed storage is mandatory for query + management. + :type force_cmk_for_query: bool + :ivar private_link_scoped_resources: List of linked private link scope resources. + :vartype private_link_scoped_resources: + list[~operational_insights_management_client.models.PrivateLinkScopedResource] + :param features: Workspace features. + :type features: ~operational_insights_management_client.models.WorkspaceFeatures + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'customer_id': {'readonly': True}, + 'created_date': {'readonly': True}, + 'modified_date': {'readonly': True}, + 'private_link_scoped_resources': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'customer_id': {'key': 'properties.customerId', 'type': 'str'}, + 'sku': {'key': 'properties.sku', 'type': 'WorkspaceSku'}, + 'retention_in_days': {'key': 'properties.retentionInDays', 'type': 'int'}, + 'workspace_capping': {'key': 'properties.workspaceCapping', 'type': 'WorkspaceCapping'}, + 'created_date': {'key': 'properties.createdDate', 'type': 'str'}, + 'modified_date': {'key': 'properties.modifiedDate', 'type': 'str'}, + 'public_network_access_for_ingestion': {'key': 'properties.publicNetworkAccessForIngestion', 'type': 'str'}, + 'public_network_access_for_query': {'key': 'properties.publicNetworkAccessForQuery', 'type': 'str'}, + 'force_cmk_for_query': {'key': 'properties.forceCmkForQuery', 'type': 'bool'}, + 'private_link_scoped_resources': {'key': 'properties.privateLinkScopedResources', 'type': '[PrivateLinkScopedResource]'}, + 'features': {'key': 'properties.features', 'type': 'WorkspaceFeatures'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + provisioning_state: Optional[Union[str, "WorkspaceEntityStatus"]] = None, + sku: Optional["WorkspaceSku"] = None, + retention_in_days: Optional[int] = None, + workspace_capping: Optional["WorkspaceCapping"] = None, + public_network_access_for_ingestion: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", + public_network_access_for_query: Optional[Union[str, "PublicNetworkAccessType"]] = "Enabled", + force_cmk_for_query: Optional[bool] = None, + features: Optional["WorkspaceFeatures"] = None, + **kwargs + ): + super(WorkspacePatch, self).__init__(**kwargs) + self.tags = tags + self.provisioning_state = provisioning_state + self.customer_id = None + self.sku = sku + self.retention_in_days = retention_in_days + self.workspace_capping = workspace_capping + self.created_date = None + self.modified_date = None + self.public_network_access_for_ingestion = public_network_access_for_ingestion + self.public_network_access_for_query = public_network_access_for_query + self.force_cmk_for_query = force_cmk_for_query + self.private_link_scoped_resources = None + self.features = features + + +class WorkspacePurgeBody(msrest.serialization.Model): + """Describes the body of a purge request for an App Insights Workspace. + + All required parameters must be populated in order to send to Azure. + + :param table: Required. Table from which to purge data. + :type table: str + :param filters: Required. The set of columns and filters (queries) to run over them to purge + the resulting data. + :type filters: list[~operational_insights_management_client.models.WorkspacePurgeBodyFilters] + """ + + _validation = { + 'table': {'required': True}, + 'filters': {'required': True}, + } + + _attribute_map = { + 'table': {'key': 'table', 'type': 'str'}, + 'filters': {'key': 'filters', 'type': '[WorkspacePurgeBodyFilters]'}, + } + + def __init__( + self, + *, + table: str, + filters: List["WorkspacePurgeBodyFilters"], + **kwargs + ): + super(WorkspacePurgeBody, self).__init__(**kwargs) + self.table = table + self.filters = filters + + +class WorkspacePurgeBodyFilters(msrest.serialization.Model): + """User-defined filters to return data which will be purged from the table. + + :param column: The column of the table over which the given query should run. + :type column: str + :param operator: A query operator to evaluate over the provided column and value(s). Supported + operators are ==, =~, in, in~, >, >=, <, <=, between, and have the same behavior as they would + in a KQL query. + :type operator: str + :param value: the value for the operator to function over. This can be a number (e.g., > 100), + a string (timestamp >= '2017-09-01') or array of values. + :type value: object + :param key: When filtering over custom dimensions, this key will be used as the name of the + custom dimension. + :type key: str + """ + + _attribute_map = { + 'column': {'key': 'column', 'type': 'str'}, + 'operator': {'key': 'operator', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'object'}, + 'key': {'key': 'key', 'type': 'str'}, + } + + def __init__( + self, + *, + column: Optional[str] = None, + operator: Optional[str] = None, + value: Optional[object] = None, + key: Optional[str] = None, + **kwargs + ): + super(WorkspacePurgeBodyFilters, self).__init__(**kwargs) + self.column = column + self.operator = operator + self.value = value + self.key = key + + +class WorkspacePurgeResponse(msrest.serialization.Model): + """Response containing operationId for a specific purge action. + + All required parameters must be populated in order to send to Azure. + + :param operation_id: Required. Id to use when querying for status for a particular purge + operation. + :type operation_id: str + """ + + _validation = { + 'operation_id': {'required': True}, + } + + _attribute_map = { + 'operation_id': {'key': 'operationId', 'type': 'str'}, + } + + def __init__( + self, + *, + operation_id: str, + **kwargs + ): + super(WorkspacePurgeResponse, self).__init__(**kwargs) + self.operation_id = operation_id + + +class WorkspacePurgeStatusResponse(msrest.serialization.Model): + """Response containing status for a specific purge operation. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. Status of the operation represented by the requested Id. Possible + values include: "pending", "completed". + :type status: str or ~operational_insights_management_client.models.PurgeState + """ + + _validation = { + 'status': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Union[str, "PurgeState"], + **kwargs + ): + super(WorkspacePurgeStatusResponse, self).__init__(**kwargs) + self.status = status + + +class WorkspaceSku(msrest.serialization.Model): + """The SKU (tier) of a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the SKU. Possible values include: "Free", "Standard", + "Premium", "PerNode", "PerGB2018", "Standalone", "CapacityReservation", "LACluster". + :type name: str or ~operational_insights_management_client.models.WorkspaceSkuNameEnum + :param capacity_reservation_level: The capacity reservation level for this workspace, when + CapacityReservation sku is selected. + :type capacity_reservation_level: int + :ivar last_sku_update: The last time when the sku was updated. + :vartype last_sku_update: str + """ + + _validation = { + 'name': {'required': True}, + 'last_sku_update': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'capacity_reservation_level': {'key': 'capacityReservationLevel', 'type': 'int'}, + 'last_sku_update': {'key': 'lastSkuUpdate', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Union[str, "WorkspaceSkuNameEnum"], + capacity_reservation_level: Optional[int] = None, + **kwargs + ): + super(WorkspaceSku, self).__init__(**kwargs) + self.name = name + self.capacity_reservation_level = capacity_reservation_level + self.last_sku_update = None diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_operational_insights_management_client_enums.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_operational_insights_management_client_enums.py new file mode 100644 index 00000000000..ed6292ec258 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/models/_operational_insights_management_client_enums.py @@ -0,0 +1,201 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class BillingType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Configures whether billing will be only on the cluster or each workspace will be billed by its + proportional use. This does not change the overall billing, only how it will be distributed. + Default value is 'Cluster' + """ + + CLUSTER = "Cluster" + WORKSPACES = "Workspaces" + +class ClusterEntityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The provisioning state of the cluster. + """ + + CREATING = "Creating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + DELETING = "Deleting" + PROVISIONING_ACCOUNT = "ProvisioningAccount" + UPDATING = "Updating" + +class ClusterSkuNameEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The name of the SKU. + """ + + CAPACITY_RESERVATION = "CapacityReservation" + +class DataIngestionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of data ingestion for this workspace. + """ + + RESPECT_QUOTA = "RespectQuota" #: Ingestion enabled following daily cap quota reset, or subscription enablement. + FORCE_ON = "ForceOn" #: Ingestion started following service setting change. + FORCE_OFF = "ForceOff" #: Ingestion stopped following service setting change. + OVER_QUOTA = "OverQuota" #: Reached daily cap quota, ingestion stopped. + SUBSCRIPTION_SUSPENDED = "SubscriptionSuspended" #: Ingestion stopped following suspended subscription. + APPROACHING_QUOTA = "ApproachingQuota" #: 80% of daily cap quota reached. + +class DataSourceKind(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The kind of the DataSource. + """ + + WINDOWS_EVENT = "WindowsEvent" + WINDOWS_PERFORMANCE_COUNTER = "WindowsPerformanceCounter" + IIS_LOGS = "IISLogs" + LINUX_SYSLOG = "LinuxSyslog" + LINUX_SYSLOG_COLLECTION = "LinuxSyslogCollection" + LINUX_PERFORMANCE_OBJECT = "LinuxPerformanceObject" + LINUX_PERFORMANCE_COLLECTION = "LinuxPerformanceCollection" + CUSTOM_LOG = "CustomLog" + CUSTOM_LOG_COLLECTION = "CustomLogCollection" + AZURE_AUDIT_LOG = "AzureAuditLog" + AZURE_ACTIVITY_LOG = "AzureActivityLog" + GENERIC_DATA_SOURCE = "GenericDataSource" + CHANGE_TRACKING_CUSTOM_PATH = "ChangeTrackingCustomPath" + CHANGE_TRACKING_PATH = "ChangeTrackingPath" + CHANGE_TRACKING_SERVICES = "ChangeTrackingServices" + CHANGE_TRACKING_DATA_TYPE_CONFIGURATION = "ChangeTrackingDataTypeConfiguration" + CHANGE_TRACKING_DEFAULT_REGISTRY = "ChangeTrackingDefaultRegistry" + CHANGE_TRACKING_REGISTRY = "ChangeTrackingRegistry" + CHANGE_TRACKING_LINUX_PATH = "ChangeTrackingLinuxPath" + LINUX_CHANGE_TRACKING_PATH = "LinuxChangeTrackingPath" + CHANGE_TRACKING_CONTENT_LOCATION = "ChangeTrackingContentLocation" + WINDOWS_TELEMETRY = "WindowsTelemetry" + OFFICE365 = "Office365" + SECURITY_WINDOWS_BASELINE_CONFIGURATION = "SecurityWindowsBaselineConfiguration" + SECURITY_CENTER_SECURITY_WINDOWS_BASELINE_CONFIGURATION = "SecurityCenterSecurityWindowsBaselineConfiguration" + SECURITY_EVENT_COLLECTION_CONFIGURATION = "SecurityEventCollectionConfiguration" + SECURITY_INSIGHTS_SECURITY_EVENT_COLLECTION_CONFIGURATION = "SecurityInsightsSecurityEventCollectionConfiguration" + IMPORT_COMPUTER_GROUP = "ImportComputerGroup" + NETWORK_MONITORING = "NetworkMonitoring" + ITSM = "Itsm" + DNS_ANALYTICS = "DnsAnalytics" + APPLICATION_INSIGHTS = "ApplicationInsights" + SQL_DATA_CLASSIFICATION = "SqlDataClassification" + +class DataSourceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Linked storage accounts type. + """ + + CUSTOM_LOGS = "CustomLogs" + AZURE_WATSON = "AzureWatson" + QUERY = "Query" + ALERTS = "Alerts" + +class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Type of managed service identity. + """ + + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + NONE = "None" + +class LinkedServiceEntityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The provisioning state of the linked service. + """ + + SUCCEEDED = "Succeeded" + DELETING = "Deleting" + PROVISIONING_ACCOUNT = "ProvisioningAccount" + UPDATING = "Updating" + +class PublicNetworkAccessType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The network access type for operating on the Log Analytics Workspace. By default it is Enabled + """ + + ENABLED = "Enabled" #: Enables connectivity to Log Analytics through public DNS. + DISABLED = "Disabled" #: Disables public connectivity to Log Analytics through public DNS. + +class PurgeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Status of the operation represented by the requested Id. + """ + + PENDING = "pending" + COMPLETED = "completed" + +class SearchSortEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The sort order of the search. + """ + + ASC = "asc" + DESC = "desc" + +class SkuNameEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The name of the Service Tier. + """ + + FREE = "Free" + STANDARD = "Standard" + PREMIUM = "Premium" + PER_NODE = "PerNode" + PER_GB2018 = "PerGB2018" + STANDALONE = "Standalone" + CAPACITY_RESERVATION = "CapacityReservation" + +class StorageInsightState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The state of the storage insight connection to the workspace + """ + + OK = "OK" + ERROR = "ERROR" + +class Type(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of the destination resource + """ + + STORAGE_ACCOUNT = "StorageAccount" + EVENT_HUB = "EventHub" + +class WorkspaceEntityStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The provisioning state of the workspace. + """ + + CREATING = "Creating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + DELETING = "Deleting" + PROVISIONING_ACCOUNT = "ProvisioningAccount" + UPDATING = "Updating" + +class WorkspaceSkuNameEnum(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The name of the SKU. + """ + + FREE = "Free" + STANDARD = "Standard" + PREMIUM = "Premium" + PER_NODE = "PerNode" + PER_GB2018 = "PerGB2018" + STANDALONE = "Standalone" + CAPACITY_RESERVATION = "CapacityReservation" + LA_CLUSTER = "LACluster" diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/__init__.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/__init__.py new file mode 100644 index 00000000000..0ef7b90f330 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/__init__.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._data_exports_operations import DataExportsOperations +from ._data_sources_operations import DataSourcesOperations +from ._intelligence_packs_operations import IntelligencePacksOperations +from ._linked_services_operations import LinkedServicesOperations +from ._linked_storage_accounts_operations import LinkedStorageAccountsOperations +from ._management_groups_operations import ManagementGroupsOperations +from ._operation_statuses_operations import OperationStatusesOperations +from ._shared_keys_operations import SharedKeysOperations +from ._usages_operations import UsagesOperations +from ._storage_insight_configs_operations import StorageInsightConfigsOperations +from ._saved_searches_operations import SavedSearchesOperations +from ._available_service_tiers_operations import AvailableServiceTiersOperations +from ._gateways_operations import GatewaysOperations +from ._schema_operations import SchemaOperations +from ._workspace_purge_operations import WorkspacePurgeOperations +from ._tables_operations import TablesOperations +from ._clusters_operations import ClustersOperations +from ._operations import Operations +from ._workspaces_operations import WorkspacesOperations +from ._deleted_workspaces_operations import DeletedWorkspacesOperations + +__all__ = [ + 'DataExportsOperations', + 'DataSourcesOperations', + 'IntelligencePacksOperations', + 'LinkedServicesOperations', + 'LinkedStorageAccountsOperations', + 'ManagementGroupsOperations', + 'OperationStatusesOperations', + 'SharedKeysOperations', + 'UsagesOperations', + 'StorageInsightConfigsOperations', + 'SavedSearchesOperations', + 'AvailableServiceTiersOperations', + 'GatewaysOperations', + 'SchemaOperations', + 'WorkspacePurgeOperations', + 'TablesOperations', + 'ClustersOperations', + 'Operations', + 'WorkspacesOperations', + 'DeletedWorkspacesOperations', +] diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_available_service_tiers_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_available_service_tiers_operations.py new file mode 100644 index 00000000000..ec3fb650497 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_available_service_tiers_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class AvailableServiceTiersOperations(object): + """AvailableServiceTiersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> List["models.AvailableServiceTier"] + """Gets the available service tiers for the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of AvailableServiceTier, or the result of cls(response) + :rtype: list[~operational_insights_management_client.models.AvailableServiceTier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["models.AvailableServiceTier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('[AvailableServiceTier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/availableServiceTiers'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_clusters_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_clusters_operations.py new file mode 100644 index 00000000000..fc63a71c5b6 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_clusters_operations.py @@ -0,0 +1,559 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ClustersOperations(object): + """ClustersOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ClusterListResult"] + """Gets Log Analytics clusters in a resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters'} # type: ignore + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ClusterListResult"] + """Gets the Log Analytics clusters in a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ClusterListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.ClusterListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ClusterListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ClusterListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/clusters'} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "models.Cluster" + **kwargs # type: Any + ): + # type: (...) -> Optional["models.Cluster"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Cluster"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Cluster') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Cluster', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "models.Cluster" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.Cluster"] + """Create or update a Log Analytics cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: The name of the Log Analytics cluster. + :type cluster_name: str + :param parameters: The parameters required to create or update a Log Analytics cluster. + :type parameters: ~operational_insights_management_client.models.Cluster + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Cluster or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~operational_insights_management_client.models.Cluster] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a cluster instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: Name of the Log Analytics Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + cluster_name=cluster_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + cluster_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Cluster" + """Gets a Log Analytics cluster instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: Name of the Log Analytics Cluster. + :type cluster_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + cluster_name, # type: str + parameters, # type: "models.ClusterPatch" + **kwargs # type: Any + ): + # type: (...) -> "models.Cluster" + """Updates a Log Analytics cluster. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param cluster_name: Name of the Log Analytics Cluster. + :type cluster_name: str + :param parameters: The parameters required to patch a Log Analytics cluster. + :type parameters: ~operational_insights_management_client.models.ClusterPatch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Cluster, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Cluster + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Cluster"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'clusterName': self._serialize.url("cluster_name", cluster_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'ClusterPatch') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Cluster', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/clusters/{clusterName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_data_exports_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_data_exports_operations.py new file mode 100644 index 00000000000..053756099e3 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_data_exports_operations.py @@ -0,0 +1,323 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DataExportsOperations(object): + """DataExportsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.DataExportListResult"] + """Lists the data export instances within a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataExportListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.DataExportListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataExportListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DataExportListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_export_name, # type: str + parameters, # type: "models.DataExport" + **kwargs # type: Any + ): + # type: (...) -> "models.DataExport" + """Create or update a data export. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_export_name: The data export rule name. + :type data_export_name: str + :param parameters: The parameters required to create or update a data export. + :type parameters: ~operational_insights_management_client.models.DataExport + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataExport, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataExport + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataExport"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'DataExport') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataExport', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataExport', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_export_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.DataExport" + """Gets a data export instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_export_name: The data export rule name. + :type data_export_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataExport, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataExport + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataExport"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataExport', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_export_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the specified data export in a given workspace.. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_export_name: The data export rule name. + :type data_export_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataExportName': self._serialize.url("data_export_name", data_export_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataExports/{dataExportName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_data_sources_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_data_sources_operations.py new file mode 100644 index 00000000000..37176e7a012 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_data_sources_operations.py @@ -0,0 +1,326 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DataSourcesOperations(object): + """DataSourcesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_source_name, # type: str + parameters, # type: "models.DataSource" + **kwargs # type: Any + ): + # type: (...) -> "models.DataSource" + """Create or update a data source. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_name: The name of the datasource resource. + :type data_source_name: str + :param parameters: The parameters required to create or update a datasource. + :type parameters: ~operational_insights_management_client.models.DataSource + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataSource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataSource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataSource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceName': self._serialize.url("data_source_name", data_source_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'DataSource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('DataSource', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('DataSource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_source_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a data source instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_name: Name of the datasource. + :type data_source_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceName': self._serialize.url("data_source_name", data_source_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_source_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.DataSource" + """Gets a datasource instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_name: Name of the datasource. + :type data_source_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataSource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.DataSource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataSource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceName': self._serialize.url("data_source_name", data_source_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('DataSource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources/{dataSourceName}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + filter, # type: str + skiptoken=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.DataSourceListResult"] + """Gets the first page of data source instances in a workspace with the link to the next page. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param filter: The filter to apply on the operation. + :type filter: str + :param skiptoken: Starting point of the collection of data source instances. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either DataSourceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.DataSourceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.DataSourceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('DataSourceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/dataSources'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_deleted_workspaces_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_deleted_workspaces_operations.py new file mode 100644 index 00000000000..f594994c778 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_deleted_workspaces_operations.py @@ -0,0 +1,186 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class DeletedWorkspacesOperations(object): + """DeletedWorkspacesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListResult"] + """Gets recently deleted workspaces in a subscription, available for recovery. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/deletedWorkspaces'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListResult"] + """Gets recently deleted workspaces in a resource group, available for recovery. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/deletedWorkspaces'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_gateways_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_gateways_operations.py new file mode 100644 index 00000000000..bc734ada2a6 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_gateways_operations.py @@ -0,0 +1,103 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class GatewaysOperations(object): + """GatewaysOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + gateway_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Delete a Log Analytics gateway. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param gateway_id: The Log Analytics gateway Id. + :type gateway_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'gatewayId': self._serialize.url("gateway_id", gateway_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/gateways/{gatewayId}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_intelligence_packs_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_intelligence_packs_operations.py new file mode 100644 index 00000000000..fa655fdce44 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_intelligence_packs_operations.py @@ -0,0 +1,221 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class IntelligencePacksOperations(object): + """IntelligencePacksOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def disable( + self, + resource_group_name, # type: str + workspace_name, # type: str + intelligence_pack_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Disables an intelligence pack for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param intelligence_pack_name: The name of the intelligence pack to be disabled. + :type intelligence_pack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.disable.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'intelligencePackName': self._serialize.url("intelligence_pack_name", intelligence_pack_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + disable.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Disable'} # type: ignore + + def enable( + self, + resource_group_name, # type: str + workspace_name, # type: str + intelligence_pack_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Enables an intelligence pack for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param intelligence_pack_name: The name of the intelligence pack to be enabled. + :type intelligence_pack_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.enable.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'intelligencePackName': self._serialize.url("intelligence_pack_name", intelligence_pack_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + enable.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks/{intelligencePackName}/Enable'} # type: ignore + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> List["models.IntelligencePack"] + """Lists all the intelligence packs possible and whether they are enabled or disabled for a given + workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of IntelligencePack, or the result of cls(response) + :rtype: list[~operational_insights_management_client.models.IntelligencePack] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["models.IntelligencePack"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('[IntelligencePack]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/intelligencePacks'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_linked_services_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_linked_services_operations.py new file mode 100644 index 00000000000..b585dab4d19 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_linked_services_operations.py @@ -0,0 +1,448 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class LinkedServicesOperations(object): + """LinkedServicesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def _create_or_update_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + linked_service_name, # type: str + parameters, # type: "models.LinkedService" + **kwargs # type: Any + ): + # type: (...) -> "models.LinkedService" + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'LinkedService') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('LinkedService', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + linked_service_name, # type: str + parameters, # type: "models.LinkedService" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.LinkedService"] + """Create or update a linked service. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param linked_service_name: Name of the linkedServices resource. + :type linked_service_name: str + :param parameters: The parameters required to create or update a linked service. + :type parameters: ~operational_insights_management_client.models.LinkedService + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either LinkedService or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~operational_insights_management_client.models.LinkedService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["models.LinkedService"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedService"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.LinkedService"] + """Deletes a linked service instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param linked_service_name: Name of the linked service. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either LinkedService or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~operational_insights_management_client.models.LinkedService] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + linked_service_name=linked_service_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + linked_service_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.LinkedService" + """Gets a linked service instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param linked_service_name: Name of the linked service. + :type linked_service_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedService, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.LinkedService + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedService"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedService', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.LinkedServiceListResult"] + """Gets the linked services instances in a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LinkedServiceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.LinkedServiceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('LinkedServiceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_linked_storage_accounts_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_linked_storage_accounts_operations.py new file mode 100644 index 00000000000..5074d3a52d0 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_linked_storage_accounts_operations.py @@ -0,0 +1,317 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class LinkedStorageAccountsOperations(object): + """LinkedStorageAccountsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_source_type, # type: Union[str, "models.DataSourceType"] + parameters, # type: "models.LinkedStorageAccountsResource" + **kwargs # type: Any + ): + # type: (...) -> "models.LinkedStorageAccountsResource" + """Create or Update a link relation between current workspace and a group of storage accounts of a + specific data source type. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_type: Linked storage accounts type. + :type data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :param parameters: The parameters required to create or update linked storage accounts. + :type parameters: ~operational_insights_management_client.models.LinkedStorageAccountsResource + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedStorageAccountsResource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.LinkedStorageAccountsResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedStorageAccountsResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceType': self._serialize.url("data_source_type", data_source_type, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'LinkedStorageAccountsResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedStorageAccountsResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_source_type, # type: Union[str, "models.DataSourceType"] + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes all linked storage accounts of a specific data source type associated with the + specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_type: Linked storage accounts type. + :type data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceType': self._serialize.url("data_source_type", data_source_type, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + data_source_type, # type: Union[str, "models.DataSourceType"] + **kwargs # type: Any + ): + # type: (...) -> "models.LinkedStorageAccountsResource" + """Gets all linked storage account of a specific data source type associated with the specified + workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param data_source_type: Linked storage accounts type. + :type data_source_type: str or ~operational_insights_management_client.models.DataSourceType + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LinkedStorageAccountsResource, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.LinkedStorageAccountsResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedStorageAccountsResource"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'dataSourceType': self._serialize.url("data_source_type", data_source_type, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('LinkedStorageAccountsResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts/{dataSourceType}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.LinkedStorageAccountsListResult"] + """Gets all linked storage accounts associated with the specified workspace, storage accounts will + be sorted by their data source type. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LinkedStorageAccountsListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.LinkedStorageAccountsListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedStorageAccountsListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('LinkedStorageAccountsListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedStorageAccounts'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_management_groups_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_management_groups_operations.py new file mode 100644 index 00000000000..29be064a1cf --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_management_groups_operations.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ManagementGroupsOperations(object): + """ManagementGroupsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListManagementGroupsResult"] + """Gets a list of management groups connected to a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListManagementGroupsResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.WorkspaceListManagementGroupsResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListManagementGroupsResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListManagementGroupsResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/managementGroups'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_operation_statuses_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_operation_statuses_operations.py new file mode 100644 index 00000000000..e2e5ecf203c --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_operation_statuses_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class OperationStatusesOperations(object): + """OperationStatusesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + location, # type: str + async_operation_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.OperationStatus" + """Get the status of a long running azure asynchronous operation. + + :param location: The region name of operation. + :type location: str + :param async_operation_id: The operation Id. + :type async_operation_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: OperationStatus, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.OperationStatus + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationStatus"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str'), + 'asyncOperationId': self._serialize.url("async_operation_id", async_operation_id, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('OperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/locations/{location}/operationStatuses/{asyncOperationId}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_operations.py new file mode 100644 index 00000000000..b5080962d5b --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_operations.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.OperationListResult"] + """Lists all of the available OperationalInsights Rest API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.OperationalInsights/operations'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_saved_searches_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_saved_searches_operations.py new file mode 100644 index 00000000000..0329f7f0a8a --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_saved_searches_operations.py @@ -0,0 +1,296 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SavedSearchesOperations(object): + """SavedSearchesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + saved_search_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the specified saved search in a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param saved_search_id: The id of the saved search. + :type saved_search_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'savedSearchId': self._serialize.url("saved_search_id", saved_search_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + saved_search_id, # type: str + parameters, # type: "models.SavedSearch" + **kwargs # type: Any + ): + # type: (...) -> "models.SavedSearch" + """Creates or updates a saved search for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param saved_search_id: The id of the saved search. + :type saved_search_id: str + :param parameters: The parameters required to save a search. + :type parameters: ~operational_insights_management_client.models.SavedSearch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SavedSearch, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SavedSearch + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SavedSearch"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'savedSearchId': self._serialize.url("saved_search_id", saved_search_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'SavedSearch') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SavedSearch', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + saved_search_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SavedSearch" + """Gets the specified saved search for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param saved_search_id: The id of the saved search. + :type saved_search_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SavedSearch, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SavedSearch + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SavedSearch"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'savedSearchId': self._serialize.url("saved_search_id", saved_search_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SavedSearch', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches/{savedSearchId}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SavedSearchesListResult" + """Gets the saved searches for a given Log Analytics Workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SavedSearchesListResult, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SavedSearchesListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SavedSearchesListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SavedSearchesListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/savedSearches'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_schema_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_schema_operations.py new file mode 100644 index 00000000000..c9652c381af --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_schema_operations.py @@ -0,0 +1,104 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SchemaOperations(object): + """SchemaOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SearchGetSchemaResponse" + """Gets the schema for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SearchGetSchemaResponse, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SearchGetSchemaResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SearchGetSchemaResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SearchGetSchemaResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/schema'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_shared_keys_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_shared_keys_operations.py new file mode 100644 index 00000000000..83f4f869bff --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_shared_keys_operations.py @@ -0,0 +1,164 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SharedKeysOperations(object): + """SharedKeysOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get_shared_keys( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SharedKeys" + """Gets the shared keys for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SharedKeys, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SharedKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SharedKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get_shared_keys.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SharedKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_shared_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/sharedKeys'} # type: ignore + + def regenerate( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SharedKeys" + """Regenerates the shared keys for a Log Analytics Workspace. These keys are used to connect + Microsoft Operational Insights agents to the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SharedKeys, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.SharedKeys + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SharedKeys"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.regenerate.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('SharedKeys', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/regenerateSharedKey'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_storage_insight_configs_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_storage_insight_configs_operations.py new file mode 100644 index 00000000000..060acc9cb18 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_storage_insight_configs_operations.py @@ -0,0 +1,317 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class StorageInsightConfigsOperations(object): + """StorageInsightConfigsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + storage_insight_name, # type: str + parameters, # type: "models.StorageInsight" + **kwargs # type: Any + ): + # type: (...) -> "models.StorageInsight" + """Create or update a storage insight. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param storage_insight_name: Name of the storageInsightsConfigs resource. + :type storage_insight_name: str + :param parameters: The parameters required to create or update a storage insight. + :type parameters: ~operational_insights_management_client.models.StorageInsight + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageInsight, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.StorageInsight + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StorageInsight"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'storageInsightName': self._serialize.url("storage_insight_name", storage_insight_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'StorageInsight') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('StorageInsight', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('StorageInsight', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + storage_insight_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.StorageInsight" + """Gets a storage insight instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param storage_insight_name: Name of the storageInsightsConfigs resource. + :type storage_insight_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: StorageInsight, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.StorageInsight + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StorageInsight"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'storageInsightName': self._serialize.url("storage_insight_name", storage_insight_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('StorageInsight', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + storage_insight_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a storageInsightsConfigs resource. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param storage_insight_name: Name of the storageInsightsConfigs resource. + :type storage_insight_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'storageInsightName': self._serialize.url("storage_insight_name", storage_insight_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs/{storageInsightName}'} # type: ignore + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.StorageInsightListResult"] + """Lists the storage insight instances within a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either StorageInsightListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.StorageInsightListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.StorageInsightListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('StorageInsightListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.odata_next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/storageInsightConfigs'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_tables_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_tables_operations.py new file mode 100644 index 00000000000..1105dfcd35a --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_tables_operations.py @@ -0,0 +1,258 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TablesOperations(object): + """TablesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.TablesListResult"] + """Gets all the tables for the specified Log Analytics workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either TablesListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.TablesListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TablesListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('TablesListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables'} # type: ignore + + def update( + self, + resource_group_name, # type: str + workspace_name, # type: str + table_name, # type: str + parameters, # type: "models.Table" + **kwargs # type: Any + ): + # type: (...) -> "models.Table" + """Updates a Log Analytics workspace table properties. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param table_name: The name of the table. + :type table_name: str + :param parameters: The parameters required to update table properties. + :type parameters: ~operational_insights_management_client.models.Table + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Table') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Table" + """Gets a Log Analytics workspace table. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param table_name: The name of the table. + :type table_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Table, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Table + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Table"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'tableName': self._serialize.url("table_name", table_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Table', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/tables/{tableName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_usages_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_usages_operations.py new file mode 100644 index 00000000000..e67ecc19264 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_usages_operations.py @@ -0,0 +1,121 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class UsagesOperations(object): + """UsagesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListUsagesResult"] + """Gets a list of usage metrics for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListUsagesResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.WorkspaceListUsagesResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListUsagesResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListUsagesResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/usages'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_workspace_purge_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_workspace_purge_operations.py new file mode 100644 index 00000000000..fe3e7abb7b5 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_workspace_purge_operations.py @@ -0,0 +1,185 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspacePurgeOperations(object): + """WorkspacePurgeOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def purge( + self, + resource_group_name, # type: str + workspace_name, # type: str + body, # type: "models.WorkspacePurgeBody" + **kwargs # type: Any + ): + # type: (...) -> "models.WorkspacePurgeResponse" + """Purges data in an Log Analytics workspace by a set of user-defined filters. + + In order to manage system resources, purge requests are throttled at 50 requests per hour. You + should batch the execution of purge requests by sending a single command whose predicate + includes all user identities that require purging. Use the in operator to specify multiple + identities. You should run the query prior to using for a purge request to verify that the + results are expected. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param body: Describes the body of a request to purge data in a single table of an Log + Analytics Workspace. + :type body: ~operational_insights_management_client.models.WorkspacePurgeBody + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspacePurgeResponse, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.WorkspacePurgeResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspacePurgeResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.purge.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'WorkspacePurgeBody') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + response_headers = {} + response_headers['x-ms-status-location']=self._deserialize('str', response.headers.get('x-ms-status-location')) + deserialized = self._deserialize('WorkspacePurgeResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + purge.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/purge'} # type: ignore + + def get_purge_status( + self, + resource_group_name, # type: str + workspace_name, # type: str + purge_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.WorkspacePurgeStatusResponse" + """Gets status of an ongoing purge operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param purge_id: In a purge status request, this is the Id of the operation the status of which + is returned. + :type purge_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspacePurgeStatusResponse, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.WorkspacePurgeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspacePurgeStatusResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-08-01" + accept = "application/json" + + # Construct URL + url = self.get_purge_status.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'purgeId': self._serialize.url("purge_id", purge_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('WorkspacePurgeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_purge_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/operations/{purgeId}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_workspaces_operations.py b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_workspaces_operations.py new file mode 100644 index 00000000000..3ed852e9b53 --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/operations/_workspaces_operations.py @@ -0,0 +1,570 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspacesOperations(object): + """WorkspacesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~operational_insights_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListResult"] + """Gets the workspaces in a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.OperationalInsights/workspaces'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListResult"] + """Gets workspaces in a resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~operational_insights_management_client.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces'} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + parameters, # type: "models.Workspace" + **kwargs # type: Any + ): + # type: (...) -> Optional["models.Workspace"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'Workspace') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Workspace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + parameters, # type: "models.Workspace" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.Workspace"] + """Create or update a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param parameters: The parameters required to create or update a workspace. + :type parameters: ~operational_insights_management_client.models.Workspace + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~operational_insights_management_client.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + parameters=parameters, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + force=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if force is not None: + query_parameters['force'] = self._serialize.query("force", force, 'bool') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + force=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a workspace resource. To recover the workspace, create it again with the same name, in + the same subscription, resource group and location. The name is kept for 14 days and cannot be + used for another workspace. To remove the workspace completely and release the name, use the + force flag. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param force: Deletes the workspace without the recovery option. A workspace that was deleted + with this flag cannot be recovered. + :type force: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + force=force, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Workspace" + """Gets a workspace instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + workspace_name, # type: str + parameters, # type: "models.WorkspacePatch" + **kwargs # type: Any + ): + # type: (...) -> "models.Workspace" + """Updates a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param parameters: The parameters required to patch a workspace. + :type parameters: ~operational_insights_management_client.models.WorkspacePatch + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~operational_insights_management_client.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-10-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=63, min_length=4, pattern=r'^[A-Za-z0-9][A-Za-z0-9-]+[A-Za-z0-9]$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(parameters, 'WorkspacePatch') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}'} # type: ignore diff --git a/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/py.typed b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/loganalytics/azext_loganalytics/vendored_sdks/loganalytics/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/loganalytics/report.md b/src/loganalytics/report.md new file mode 100644 index 00000000000..152a8962d4a --- /dev/null +++ b/src/loganalytics/report.md @@ -0,0 +1,1037 @@ +# Azure CLI Module Creation Report + +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az loganalytics|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az loganalytics` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az loganalytics data-export|DataExports|[commands](#CommandsInDataExports)| +|az loganalytics data-source|DataSources|[commands](#CommandsInDataSources)| +|az loganalytics intelligence-pack|IntelligencePacks|[commands](#CommandsInIntelligencePacks)| +|az loganalytics linked-service|LinkedServices|[commands](#CommandsInLinkedServices)| +|az loganalytics linked-storage-account|LinkedStorageAccounts|[commands](#CommandsInLinkedStorageAccounts)| +|az loganalytics management-group|ManagementGroups|[commands](#CommandsInManagementGroups)| +|az loganalytics operation-statuses|OperationStatuses|[commands](#CommandsInOperationStatuses)| +|az loganalytics shared-key|SharedKeys|[commands](#CommandsInSharedKeys)| +|az loganalytics usage|Usages|[commands](#CommandsInUsages)| +|az loganalytics storage-insight-config|StorageInsightConfigs|[commands](#CommandsInStorageInsightConfigs)| +|az loganalytics saved-search|SavedSearches|[commands](#CommandsInSavedSearches)| +|az loganalytics available-service-tier|AvailableServiceTiers|[commands](#CommandsInAvailableServiceTiers)| +|az loganalytics gateway|Gateways|[commands](#CommandsInGateways)| +|az loganalytics schema|Schema|[commands](#CommandsInSchema)| +|az loganalytics workspace-purge|WorkspacePurge|[commands](#CommandsInWorkspacePurge)| +|az loganalytics table|Tables|[commands](#CommandsInTables)| +|az loganalytics cluster|Clusters|[commands](#CommandsInClusters)| +|az loganalytics workspace|Workspaces|[commands](#CommandsInWorkspaces)| +|az loganalytics deleted-workspace|DeletedWorkspaces|[commands](#CommandsInDeletedWorkspaces)| + +## COMMANDS +### Commands in `az loganalytics available-service-tier` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics available-service-tier list](#AvailableServiceTiersListByWorkspace)|ListByWorkspace|[Parameters](#ParametersAvailableServiceTiersListByWorkspace)|[Example](#ExamplesAvailableServiceTiersListByWorkspace)| + +### Commands in `az loganalytics cluster` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics cluster list](#ClustersListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersClustersListByResourceGroup)|[Example](#ExamplesClustersListByResourceGroup)| +|[az loganalytics cluster list](#ClustersList)|List|[Parameters](#ParametersClustersList)|[Example](#ExamplesClustersList)| +|[az loganalytics cluster show](#ClustersGet)|Get|[Parameters](#ParametersClustersGet)|[Example](#ExamplesClustersGet)| +|[az loganalytics cluster create](#ClustersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersClustersCreateOrUpdate#Create)|[Example](#ExamplesClustersCreateOrUpdate#Create)| +|[az loganalytics cluster update](#ClustersUpdate)|Update|[Parameters](#ParametersClustersUpdate)|[Example](#ExamplesClustersUpdate)| +|[az loganalytics cluster delete](#ClustersDelete)|Delete|[Parameters](#ParametersClustersDelete)|[Example](#ExamplesClustersDelete)| + +### Commands in `az loganalytics data-export` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics data-export list](#DataExportsListByWorkspace)|ListByWorkspace|[Parameters](#ParametersDataExportsListByWorkspace)|[Example](#ExamplesDataExportsListByWorkspace)| +|[az loganalytics data-export show](#DataExportsGet)|Get|[Parameters](#ParametersDataExportsGet)|[Example](#ExamplesDataExportsGet)| +|[az loganalytics data-export create](#DataExportsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDataExportsCreateOrUpdate#Create)|[Example](#ExamplesDataExportsCreateOrUpdate#Create)| +|[az loganalytics data-export update](#DataExportsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDataExportsCreateOrUpdate#Update)|Not Found| +|[az loganalytics data-export delete](#DataExportsDelete)|Delete|[Parameters](#ParametersDataExportsDelete)|[Example](#ExamplesDataExportsDelete)| + +### Commands in `az loganalytics data-source` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics data-source list](#DataSourcesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersDataSourcesListByWorkspace)|[Example](#ExamplesDataSourcesListByWorkspace)| +|[az loganalytics data-source show](#DataSourcesGet)|Get|[Parameters](#ParametersDataSourcesGet)|[Example](#ExamplesDataSourcesGet)| +|[az loganalytics data-source create](#DataSourcesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDataSourcesCreateOrUpdate#Create)|[Example](#ExamplesDataSourcesCreateOrUpdate#Create)| +|[az loganalytics data-source update](#DataSourcesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDataSourcesCreateOrUpdate#Update)|Not Found| +|[az loganalytics data-source delete](#DataSourcesDelete)|Delete|[Parameters](#ParametersDataSourcesDelete)|[Example](#ExamplesDataSourcesDelete)| + +### Commands in `az loganalytics deleted-workspace` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics deleted-workspace list](#DeletedWorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersDeletedWorkspacesListByResourceGroup)|[Example](#ExamplesDeletedWorkspacesListByResourceGroup)| +|[az loganalytics deleted-workspace list](#DeletedWorkspacesList)|List|[Parameters](#ParametersDeletedWorkspacesList)|[Example](#ExamplesDeletedWorkspacesList)| + +### Commands in `az loganalytics gateway` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics gateway delete](#GatewaysDelete)|Delete|[Parameters](#ParametersGatewaysDelete)|[Example](#ExamplesGatewaysDelete)| + +### Commands in `az loganalytics intelligence-pack` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics intelligence-pack list](#IntelligencePacksList)|List|[Parameters](#ParametersIntelligencePacksList)|[Example](#ExamplesIntelligencePacksList)| +|[az loganalytics intelligence-pack disable](#IntelligencePacksDisable)|Disable|[Parameters](#ParametersIntelligencePacksDisable)|[Example](#ExamplesIntelligencePacksDisable)| +|[az loganalytics intelligence-pack enable](#IntelligencePacksEnable)|Enable|[Parameters](#ParametersIntelligencePacksEnable)|[Example](#ExamplesIntelligencePacksEnable)| + +### Commands in `az loganalytics linked-service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics linked-service list](#LinkedServicesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersLinkedServicesListByWorkspace)|[Example](#ExamplesLinkedServicesListByWorkspace)| +|[az loganalytics linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)| +|[az loganalytics linked-service create](#LinkedServicesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Create)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Create)| +|[az loganalytics linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|Not Found| +|[az loganalytics linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| + +### Commands in `az loganalytics linked-storage-account` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics linked-storage-account list](#LinkedStorageAccountsListByWorkspace)|ListByWorkspace|[Parameters](#ParametersLinkedStorageAccountsListByWorkspace)|[Example](#ExamplesLinkedStorageAccountsListByWorkspace)| +|[az loganalytics linked-storage-account show](#LinkedStorageAccountsGet)|Get|[Parameters](#ParametersLinkedStorageAccountsGet)|[Example](#ExamplesLinkedStorageAccountsGet)| +|[az loganalytics linked-storage-account create](#LinkedStorageAccountsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedStorageAccountsCreateOrUpdate#Create)|[Example](#ExamplesLinkedStorageAccountsCreateOrUpdate#Create)| +|[az loganalytics linked-storage-account update](#LinkedStorageAccountsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedStorageAccountsCreateOrUpdate#Update)|Not Found| +|[az loganalytics linked-storage-account delete](#LinkedStorageAccountsDelete)|Delete|[Parameters](#ParametersLinkedStorageAccountsDelete)|[Example](#ExamplesLinkedStorageAccountsDelete)| + +### Commands in `az loganalytics management-group` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics management-group list](#ManagementGroupsList)|List|[Parameters](#ParametersManagementGroupsList)|[Example](#ExamplesManagementGroupsList)| + +### Commands in `az loganalytics operation-statuses` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics operation-statuses show](#OperationStatusesGet)|Get|[Parameters](#ParametersOperationStatusesGet)|[Example](#ExamplesOperationStatusesGet)| + +### Commands in `az loganalytics saved-search` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics saved-search list](#SavedSearchesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersSavedSearchesListByWorkspace)|[Example](#ExamplesSavedSearchesListByWorkspace)| +|[az loganalytics saved-search show](#SavedSearchesGet)|Get|[Parameters](#ParametersSavedSearchesGet)|[Example](#ExamplesSavedSearchesGet)| +|[az loganalytics saved-search create](#SavedSearchesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersSavedSearchesCreateOrUpdate#Create)|[Example](#ExamplesSavedSearchesCreateOrUpdate#Create)| +|[az loganalytics saved-search update](#SavedSearchesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersSavedSearchesCreateOrUpdate#Update)|Not Found| +|[az loganalytics saved-search delete](#SavedSearchesDelete)|Delete|[Parameters](#ParametersSavedSearchesDelete)|[Example](#ExamplesSavedSearchesDelete)| + +### Commands in `az loganalytics schema` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics schema get](#SchemaGet)|Get|[Parameters](#ParametersSchemaGet)|[Example](#ExamplesSchemaGet)| + +### Commands in `az loganalytics shared-key` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics shared-key get-shared-key](#SharedKeysGetSharedKeys)|GetSharedKeys|[Parameters](#ParametersSharedKeysGetSharedKeys)|[Example](#ExamplesSharedKeysGetSharedKeys)| +|[az loganalytics shared-key regenerate](#SharedKeysRegenerate)|Regenerate|[Parameters](#ParametersSharedKeysRegenerate)|[Example](#ExamplesSharedKeysRegenerate)| + +### Commands in `az loganalytics storage-insight-config` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics storage-insight-config list](#StorageInsightConfigsListByWorkspace)|ListByWorkspace|[Parameters](#ParametersStorageInsightConfigsListByWorkspace)|[Example](#ExamplesStorageInsightConfigsListByWorkspace)| +|[az loganalytics storage-insight-config show](#StorageInsightConfigsGet)|Get|[Parameters](#ParametersStorageInsightConfigsGet)|[Example](#ExamplesStorageInsightConfigsGet)| +|[az loganalytics storage-insight-config create](#StorageInsightConfigsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersStorageInsightConfigsCreateOrUpdate#Create)|[Example](#ExamplesStorageInsightConfigsCreateOrUpdate#Create)| +|[az loganalytics storage-insight-config update](#StorageInsightConfigsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersStorageInsightConfigsCreateOrUpdate#Update)|Not Found| +|[az loganalytics storage-insight-config delete](#StorageInsightConfigsDelete)|Delete|[Parameters](#ParametersStorageInsightConfigsDelete)|[Example](#ExamplesStorageInsightConfigsDelete)| + +### Commands in `az loganalytics table` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics table list](#TablesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersTablesListByWorkspace)|[Example](#ExamplesTablesListByWorkspace)| +|[az loganalytics table show](#TablesGet)|Get|[Parameters](#ParametersTablesGet)|[Example](#ExamplesTablesGet)| +|[az loganalytics table update](#TablesUpdate)|Update|[Parameters](#ParametersTablesUpdate)|[Example](#ExamplesTablesUpdate)| + +### Commands in `az loganalytics usage` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics usage list](#UsagesList)|List|[Parameters](#ParametersUsagesList)|[Example](#ExamplesUsagesList)| + +### Commands in `az loganalytics workspace` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics workspace list](#WorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersWorkspacesListByResourceGroup)|[Example](#ExamplesWorkspacesListByResourceGroup)| +|[az loganalytics workspace list](#WorkspacesList)|List|[Parameters](#ParametersWorkspacesList)|[Example](#ExamplesWorkspacesList)| +|[az loganalytics workspace show](#WorkspacesGet)|Get|[Parameters](#ParametersWorkspacesGet)|[Example](#ExamplesWorkspacesGet)| +|[az loganalytics workspace create](#WorkspacesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersWorkspacesCreateOrUpdate#Create)|[Example](#ExamplesWorkspacesCreateOrUpdate#Create)| +|[az loganalytics workspace update](#WorkspacesUpdate)|Update|[Parameters](#ParametersWorkspacesUpdate)|[Example](#ExamplesWorkspacesUpdate)| +|[az loganalytics workspace delete](#WorkspacesDelete)|Delete|[Parameters](#ParametersWorkspacesDelete)|[Example](#ExamplesWorkspacesDelete)| + +### Commands in `az loganalytics workspace-purge` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az loganalytics workspace-purge purge](#WorkspacePurgePurge)|Purge|[Parameters](#ParametersWorkspacePurgePurge)|[Example](#ExamplesWorkspacePurgePurge)| +|[az loganalytics workspace-purge show-purge-status](#WorkspacePurgeGetPurgeStatus)|GetPurgeStatus|[Parameters](#ParametersWorkspacePurgeGetPurgeStatus)|[Example](#ExamplesWorkspacePurgeGetPurgeStatus)| + + +## COMMAND DETAILS + +### group `az loganalytics available-service-tier` +#### Command `az loganalytics available-service-tier list` + +##### Example +``` +az loganalytics available-service-tier list --resource-group "rg1" --workspace-name "workspace1" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +### group `az loganalytics cluster` +#### Command `az loganalytics cluster list` + +##### Example +``` +az loganalytics cluster list --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| + +#### Command `az loganalytics cluster list` + +##### Example +``` +az loganalytics cluster list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az loganalytics cluster show` + +##### Example +``` +az loganalytics cluster show --name "oiautorest6685" --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--cluster-name**|string|Name of the Log Analytics Cluster.|cluster_name|clusterName| + +#### Command `az loganalytics cluster create` + +##### Example +``` +az loganalytics cluster create --name "oiautorest6685" --location "australiasoutheast" --sku \ +name="CapacityReservation" capacity=1000 --tags tag1="val1" --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--cluster-name**|string|The name of the Log Analytics cluster.|cluster_name|clusterName| +|**--location**|string|The geo-location where the resource lives|location|location| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--sku**|object|The sku properties.|sku|sku| +|**--is-double-encryption-enabled**|boolean|Configures whether cluster will use double encryption. This Property can not be modified after cluster creation. Default value is 'true'|is_double_encryption_enabled|isDoubleEncryptionEnabled| +|**--is-availability-zones-enabled**|boolean|Sets whether the cluster will support availability zones. This can be set as true only in regions where Azure Data Explorer support Availability Zones. This Property can not be modified after cluster creation. Default value is 'true' if region supports Availability Zones.|is_availability_zones_enabled|isAvailabilityZonesEnabled| +|**--billing-type**|choice|The cluster's billing type.|billing_type|billingType| +|**--key-vault-properties**|object|The associated key properties.|key_vault_properties|keyVaultProperties| +|**--type**|sealed-choice|Type of managed service identity.|type|type| +|**--user-assigned-identities**|dictionary|The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| + +#### Command `az loganalytics cluster update` + +##### Example +``` +az loganalytics cluster update --name "oiautorest6685" --type "UserAssigned" --user-assigned-identities \ +"{\\"/subscriptions/00000000-0000-0000-0000-00000000000/resourcegroups/oiautorest6685/providers/Microsoft.ManagedIdenti\ +ty/userAssignedIdentities/myidentity\\":{}}" --key-vault-properties key-name="aztest2170cert" key-rsa-size=1024 \ +key-vault-uri="https://aztest2170.vault.azure.net" key-version="654ft6c4e63845cbb50fd6fg51540429" --sku \ +name="CapacityReservation" capacity=1000 --tags tag1="val1" --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--cluster-name**|string|Name of the Log Analytics Cluster.|cluster_name|clusterName| +|**--sku**|object|The sku properties.|sku|sku| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--key-vault-properties**|object|The associated key properties.|key_vault_properties|keyVaultProperties| +|**--billing-type**|choice|The cluster's billing type.|billing_type|billingType| +|**--type**|sealed-choice|Type of managed service identity.|type|type| +|**--user-assigned-identities**|dictionary|The list of user identities associated with the resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| + +#### Command `az loganalytics cluster delete` + +##### Example +``` +az loganalytics cluster delete --name "oiautorest6685" --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--cluster-name**|string|Name of the Log Analytics Cluster.|cluster_name|clusterName| + +### group `az loganalytics data-export` +#### Command `az loganalytics data-export list` + +##### Example +``` +az loganalytics data-export list --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics data-export show` + +##### Example +``` +az loganalytics data-export show --name "export1" --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-export-name**|string|The data export rule name.|data_export_name|dataExportName| + +#### Command `az loganalytics data-export create` + +##### Example +``` +az loganalytics data-export create --name "export1" --resource-id "/subscriptions/192b9f85-a39a-4276-b96d-d5cd351703f9/\ +resourceGroups/OIAutoRest1234/providers/Microsoft.EventHub/namespaces/test" --table-names "Heartbeat" --resource-group \ +"RgTest1" --workspace-name "DeWnTest1234" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-export-name**|string|The data export rule name.|data_export_name|dataExportName| +|**--data-export-id**|string|The data export rule ID.|data_export_id|dataExportId| +|**--table-names**|array|An array of tables to export, for example: [“Heartbeat, SecurityEvent”].|table_names|tableNames| +|**--enable**|boolean|Active when enabled.|enable|enable| +|**--created-date**|string|The latest data export rule modification time.|created_date|createdDate| +|**--last-modified-date**|string|Date and time when the export was last modified.|last_modified_date|lastModifiedDate| +|**--resource-id**|string|The destination resource ID. This can be copied from the Properties entry of the destination resource in Azure.|resource_id|resourceId| +|**--event-hub-name**|string|Optional. Allows to define an Event Hub name. Not applicable when destination is Storage Account.|event_hub_name|eventHubName| + +#### Command `az loganalytics data-export update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-export-name**|string|The data export rule name.|data_export_name|dataExportName| +|**--data-export-id**|string|The data export rule ID.|data_export_id|dataExportId| +|**--table-names**|array|An array of tables to export, for example: [“Heartbeat, SecurityEvent”].|table_names|tableNames| +|**--enable**|boolean|Active when enabled.|enable|enable| +|**--created-date**|string|The latest data export rule modification time.|created_date|createdDate| +|**--last-modified-date**|string|Date and time when the export was last modified.|last_modified_date|lastModifiedDate| +|**--resource-id**|string|The destination resource ID. This can be copied from the Properties entry of the destination resource in Azure.|resource_id|resourceId| +|**--event-hub-name**|string|Optional. Allows to define an Event Hub name. Not applicable when destination is Storage Account.|event_hub_name|eventHubName| + +#### Command `az loganalytics data-export delete` + +##### Example +``` +az loganalytics data-export delete --name "export1" --resource-group "RgTest1" --workspace-name "DeWnTest1234" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-export-name**|string|The data export rule name.|data_export_name|dataExportName| + +### group `az loganalytics data-source` +#### Command `az loganalytics data-source list` + +##### Example +``` +az loganalytics data-source list --filter "kind=\'WindowsEvent\'" --resource-group "OIAutoRest5123" --workspace-name \ +"AzTest9724" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--filter**|string|The filter to apply on the operation.|filter|$filter| +|**--skiptoken**|string|Starting point of the collection of data source instances.|skiptoken|$skiptoken| + +#### Command `az loganalytics data-source show` + +##### Example +``` +az loganalytics data-source show --name "AzTestDS774" --resource-group "OIAutoRest5123" --workspace-name "AzTest9724" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-name**|string|Name of the datasource|data_source_name|dataSourceName| + +#### Command `az loganalytics data-source create` + +##### Example +``` +az loganalytics data-source create --name "AzTestDS774" --kind "AzureActivityLog" --properties \ +"{\\"LinkedResourceId\\":\\"/subscriptions/00000000-0000-0000-0000-00000000000/providers/microsoft.insights/eventtypes/\ +management\\"}" --resource-group "OIAutoRest5123" --workspace-name "AzTest9724" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-name**|string|The name of the datasource resource.|data_source_name|dataSourceName| +|**--properties**|any|The data source properties in raw json format, each kind of data source have it's own schema.|properties|properties| +|**--kind**|choice|The kind of the DataSource.|kind|kind| +|**--etag**|string|The ETag of the data source.|etag|etag| +|**--tags**|dictionary|Resource tags.|tags|tags| + +#### Command `az loganalytics data-source update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-name**|string|The name of the datasource resource.|data_source_name|dataSourceName| +|**--properties**|any|The data source properties in raw json format, each kind of data source have it's own schema.|properties|properties| +|**--kind**|choice|The kind of the DataSource.|kind|kind| +|**--etag**|string|The ETag of the data source.|etag|etag| +|**--tags**|dictionary|Resource tags.|tags|tags| + +#### Command `az loganalytics data-source delete` + +##### Example +``` +az loganalytics data-source delete --name "AzTestDS774" --resource-group "OIAutoRest5123" --workspace-name \ +"AzTest9724" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-name**|string|Name of the datasource.|data_source_name|dataSourceName| + +### group `az loganalytics deleted-workspace` +#### Command `az loganalytics deleted-workspace list` + +##### Example +``` +az loganalytics deleted-workspace list --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| + +#### Command `az loganalytics deleted-workspace list` + +##### Example +``` +az loganalytics deleted-workspace list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +### group `az loganalytics gateway` +#### Command `az loganalytics gateway delete` + +##### Example +``` +az loganalytics gateway delete --gateway-id "00000000-0000-0000-0000-00000000000" --resource-group "OIAutoRest5123" \ +--workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--gateway-id**|string|The Log Analytics gateway Id.|gateway_id|gatewayId| + +### group `az loganalytics intelligence-pack` +#### Command `az loganalytics intelligence-pack list` + +##### Example +``` +az loganalytics intelligence-pack list --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics intelligence-pack disable` + +##### Example +``` +az loganalytics intelligence-pack disable --name "ChangeTracking" --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--intelligence-pack-name**|string|The name of the intelligence pack to be disabled.|intelligence_pack_name|intelligencePackName| + +#### Command `az loganalytics intelligence-pack enable` + +##### Example +``` +az loganalytics intelligence-pack enable --name "ChangeTracking" --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--intelligence-pack-name**|string|The name of the intelligence pack to be enabled.|intelligence_pack_name|intelligencePackName| + +### group `az loganalytics linked-service` +#### Command `az loganalytics linked-service list` + +##### Example +``` +az loganalytics linked-service list --resource-group "mms-eus" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics linked-service show` + +##### Example +``` +az loganalytics linked-service show --name "Cluster" --resource-group "mms-eus" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--linked-service-name**|string|Name of the linked service.|linked_service_name|linkedServiceName| + +#### Command `az loganalytics linked-service create` + +##### Example +``` +az loganalytics linked-service create --name "Cluster" --write-access-resource-id "/subscriptions/00000000-0000-0000-00\ +00-00000000000/resourceGroups/mms-eus/providers/Microsoft.OperationalInsights/clusters/testcluster" --resource-group \ +"mms-eus" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--linked-service-name**|string|Name of the linkedServices resource|linked_service_name|linkedServiceName| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--resource-id**|string|The resource id of the resource that will be linked to the workspace. This should be used for linking resources which require read access|resource_id|resourceId| +|**--write-access-resource-id**|string|The resource id of the resource that will be linked to the workspace. This should be used for linking resources which require write access|write_access_resource_id|writeAccessResourceId| +|**--provisioning-state**|choice|The provisioning state of the linked service.|provisioning_state|provisioningState| + +#### Command `az loganalytics linked-service update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--linked-service-name**|string|Name of the linkedServices resource|linked_service_name|linkedServiceName| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--resource-id**|string|The resource id of the resource that will be linked to the workspace. This should be used for linking resources which require read access|resource_id|resourceId| +|**--write-access-resource-id**|string|The resource id of the resource that will be linked to the workspace. This should be used for linking resources which require write access|write_access_resource_id|writeAccessResourceId| +|**--provisioning-state**|choice|The provisioning state of the linked service.|provisioning_state|provisioningState| + +#### Command `az loganalytics linked-service delete` + +##### Example +``` +az loganalytics linked-service delete --name "Cluster" --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--linked-service-name**|string|Name of the linked service.|linked_service_name|linkedServiceName| + +### group `az loganalytics linked-storage-account` +#### Command `az loganalytics linked-storage-account list` + +##### Example +``` +az loganalytics linked-storage-account list --resource-group "mms-eus" --workspace-name "testLinkStorageAccountsWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics linked-storage-account show` + +##### Example +``` +az loganalytics linked-storage-account show --data-source-type "CustomLogs" --resource-group "mms-eus" \ +--workspace-name "testLinkStorageAccountsWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-type**|sealed-choice|Linked storage accounts type.|data_source_type|dataSourceType| + +#### Command `az loganalytics linked-storage-account create` + +##### Example +``` +az loganalytics linked-storage-account create --data-source-type "CustomLogs" --storage-account-ids \ +"/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.Storage/storageAccounts/\ +testStorageA" "/subscriptions/00000000-0000-0000-0000-00000000000/resourceGroups/mms-eus/providers/Microsoft.Storage/st\ +orageAccounts/testStorageB" --resource-group "mms-eus" --workspace-name "testLinkStorageAccountsWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-type**|sealed-choice|Linked storage accounts type.|data_source_type|dataSourceType| +|**--storage-account-ids**|array|Linked storage accounts resources ids.|storage_account_ids|storageAccountIds| + +#### Command `az loganalytics linked-storage-account update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-type**|sealed-choice|Linked storage accounts type.|data_source_type|dataSourceType| +|**--storage-account-ids**|array|Linked storage accounts resources ids.|storage_account_ids|storageAccountIds| + +#### Command `az loganalytics linked-storage-account delete` + +##### Example +``` +az loganalytics linked-storage-account delete --data-source-type "CustomLogs" --resource-group "mms-eus" \ +--workspace-name "testLinkStorageAccountsWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--data-source-type**|sealed-choice|Linked storage accounts type.|data_source_type|dataSourceType| + +### group `az loganalytics management-group` +#### Command `az loganalytics management-group list` + +##### Example +``` +az loganalytics management-group list --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +### group `az loganalytics operation-statuses` +#### Command `az loganalytics operation-statuses show` + +##### Example +``` +az loganalytics operation-statuses show --async-operation-id "713192d7-503f-477a-9cfe-4efc3ee2bd11" --location "West \ +US" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The region name of operation.|location|location| +|**--async-operation-id**|string|The operation Id.|async_operation_id|asyncOperationId| + +### group `az loganalytics saved-search` +#### Command `az loganalytics saved-search list` + +##### Example +``` +az loganalytics saved-search list --resource-group "TestRG" --workspace-name "TestWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics saved-search show` + +##### Example +``` +az loganalytics saved-search show --resource-group "TestRG" --saved-search-id "00000000-0000-0000-0000-00000000000" \ +--workspace-name "TestWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--saved-search-id**|string|The id of the saved search.|saved_search_id|savedSearchId| + +#### Command `az loganalytics saved-search create` + +##### Example +``` +az loganalytics saved-search create --category "Saved Search Test Category" --display-name "Create or Update Saved \ +Search Test" --function-alias "heartbeat_func" --function-parameters "a:int=1" --query "Heartbeat | summarize Count() \ +by Computer | take a" --tags name="Group" value="Computer" --version 2 --resource-group "TestRG" --saved-search-id \ +"00000000-0000-0000-0000-00000000000" --workspace-name "TestWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--saved-search-id**|string|The id of the saved search.|saved_search_id|savedSearchId| +|**--category**|string|The category of the saved search. This helps the user to find a saved search faster. |category|category| +|**--display-name**|string|Saved search display name.|display_name|displayName| +|**--query**|string|The query expression for the saved search.|query|query| +|**--etag**|string|The ETag of the saved search. To override an existing saved search, use "*" or specify the current Etag|etag|etag| +|**--function-alias**|string|The function alias if query serves as a function.|function_alias|functionAlias| +|**--function-parameters**|string|The optional function parameters if query serves as a function. Value should be in the following format: 'param-name1:type1 = default_value1, param-name2:type2 = default_value2'. For more examples and proper syntax please refer to https://docs.microsoft.com/en-us/azure/kusto/query/functions/user-defined-functions.|function_parameters|functionParameters| +|**--version**|integer|The version number of the query language. The current version is 2 and is the default.|version|version| +|**--tags**|array|The tags attached to the saved search.|tags|tags| + +#### Command `az loganalytics saved-search update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--saved-search-id**|string|The id of the saved search.|saved_search_id|savedSearchId| +|**--category**|string|The category of the saved search. This helps the user to find a saved search faster. |category|category| +|**--display-name**|string|Saved search display name.|display_name|displayName| +|**--query**|string|The query expression for the saved search.|query|query| +|**--etag**|string|The ETag of the saved search. To override an existing saved search, use "*" or specify the current Etag|etag|etag| +|**--function-alias**|string|The function alias if query serves as a function.|function_alias|functionAlias| +|**--function-parameters**|string|The optional function parameters if query serves as a function. Value should be in the following format: 'param-name1:type1 = default_value1, param-name2:type2 = default_value2'. For more examples and proper syntax please refer to https://docs.microsoft.com/en-us/azure/kusto/query/functions/user-defined-functions.|function_parameters|functionParameters| +|**--version**|integer|The version number of the query language. The current version is 2 and is the default.|version|version| +|**--tags**|array|The tags attached to the saved search.|tags|tags| + +#### Command `az loganalytics saved-search delete` + +##### Example +``` +az loganalytics saved-search delete --resource-group "TestRG" --saved-search-id "00000000-0000-0000-0000-00000000000" \ +--workspace-name "TestWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--saved-search-id**|string|The id of the saved search.|saved_search_id|savedSearchId| + +### group `az loganalytics schema` +#### Command `az loganalytics schema get` + +##### Example +``` +az loganalytics schema get --resource-group "mms-eus" --workspace-name "atlantisdemo" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +### group `az loganalytics shared-key` +#### Command `az loganalytics shared-key get-shared-key` + +##### Example +``` +az loganalytics shared-key get-shared-key --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics shared-key regenerate` + +##### Example +``` +az loganalytics shared-key regenerate --resource-group "rg1" --workspace-name "workspace1" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +### group `az loganalytics storage-insight-config` +#### Command `az loganalytics storage-insight-config list` + +##### Example +``` +az loganalytics storage-insight-config list --resource-group "OIAutoRest5123" --workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics storage-insight-config show` + +##### Example +``` +az loganalytics storage-insight-config show --resource-group "OIAutoRest5123" --storage-insight-name "AzTestSI1110" \ +--workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--storage-insight-name**|string|Name of the storageInsightsConfigs resource|storage_insight_name|storageInsightName| + +#### Command `az loganalytics storage-insight-config create` + +##### Example +``` +az loganalytics storage-insight-config create --containers "wad-iis-logfiles" --storage-account \ +id="/subscriptions/00000000-0000-0000-0000-000000000005/resourcegroups/OIAutoRest6987/providers/microsoft.storage/stora\ +geaccounts/AzTestFakeSA9945" key="1234" --tables "WADWindowsEventLogsTable" "LinuxSyslogVer2v0" --resource-group \ +"OIAutoRest5123" --storage-insight-name "AzTestSI1110" --workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--storage-insight-name**|string|Name of the storageInsightsConfigs resource|storage_insight_name|storageInsightName| +|**--e-tag**|string|The ETag of the storage insight.|e_tag|eTag| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--containers**|array|The names of the blob containers that the workspace should read|containers|containers| +|**--tables**|array|The names of the Azure tables that the workspace should read|tables|tables| +|**--storage-account**|object|The storage account connection details|storage_account|storageAccount| + +#### Command `az loganalytics storage-insight-config update` + +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--storage-insight-name**|string|Name of the storageInsightsConfigs resource|storage_insight_name|storageInsightName| +|**--e-tag**|string|The ETag of the storage insight.|e_tag|eTag| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--containers**|array|The names of the blob containers that the workspace should read|containers|containers| +|**--tables**|array|The names of the Azure tables that the workspace should read|tables|tables| +|**--storage-account**|object|The storage account connection details|storage_account|storageAccount| + +#### Command `az loganalytics storage-insight-config delete` + +##### Example +``` +az loganalytics storage-insight-config delete --resource-group "OIAutoRest5123" --storage-insight-name "AzTestSI1110" \ +--workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--storage-insight-name**|string|Name of the storageInsightsConfigs resource|storage_insight_name|storageInsightName| + +### group `az loganalytics table` +#### Command `az loganalytics table list` + +##### Example +``` +az loganalytics table list --resource-group "oiautorest6685" --workspace-name "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics table show` + +##### Example +``` +az loganalytics table show --resource-group "oiautorest6685" --name "table1" --workspace-name "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--table-name**|string|The name of the table.|table_name|tableName| + +#### Command `az loganalytics table update` + +##### Example +``` +az loganalytics table update --retention-in-days 30 --resource-group "oiautorest6685" --name "table1" --workspace-name \ +"oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--table-name**|string|The name of the table.|table_name|tableName| +|**--retention-in-days**|integer|The data table data retention in days, between 30 and 730. Setting this property to null will default to the workspace retention.|retention_in_days|retentionInDays| + +### group `az loganalytics usage` +#### Command `az loganalytics usage list` + +##### Example +``` +az loganalytics usage list --resource-group "rg1" --workspace-name "TestLinkWS" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +### group `az loganalytics workspace` +#### Command `az loganalytics workspace list` + +##### Example +``` +az loganalytics workspace list --resource-group "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| + +#### Command `az loganalytics workspace list` + +##### Example +``` +az loganalytics workspace list +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +#### Command `az loganalytics workspace show` + +##### Example +``` +az loganalytics workspace show --resource-group "oiautorest6685" --name "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| + +#### Command `az loganalytics workspace create` + +##### Example +``` +az loganalytics workspace create --location "australiasoutheast" --retention-in-days 30 --sku name="PerGB2018" --tags \ +tag1="val1" --resource-group "oiautorest6685" --name "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--location**|string|The geo-location where the resource lives|location|location| +|**--tags**|dictionary|Resource tags.|tags|tags| +|**--e-tag**|string|The ETag of the workspace.|e_tag|eTag| +|**--provisioning-state**|choice|The provisioning state of the workspace.|provisioning_state|provisioningState| +|**--sku**|object|The SKU of the workspace.|sku|sku| +|**--retention-in-days**|integer|The workspace data retention in days. Allowed values are per pricing plan. See pricing tiers documentation for details.|retention_in_days|retentionInDays| +|**--public-network-access-for-ingestion**|choice|The network access type for accessing Log Analytics ingestion.|public_network_access_for_ingestion|publicNetworkAccessForIngestion| +|**--public-network-access-for-query**|choice|The network access type for accessing Log Analytics query.|public_network_access_for_query|publicNetworkAccessForQuery| +|**--force-cmk-for-query**|boolean|Indicates whether customer managed storage is mandatory for query management.|force_cmk_for_query|forceCmkForQuery| +|**--features**|object|Workspace features.|features|features| +|**--daily-quota-gb**|number|The workspace daily quota for ingestion.|daily_quota_gb|dailyQuotaGb| + +#### Command `az loganalytics workspace update` + +##### Example +``` +az loganalytics workspace update --retention-in-days 30 --sku name="PerGB2018" --daily-quota-gb -1 --resource-group \ +"oiautorest6685" --name "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--tags**|dictionary|Resource tags. Optional.|tags|tags| +|**--provisioning-state**|choice|The provisioning state of the workspace.|provisioning_state|provisioningState| +|**--sku**|object|The SKU of the workspace.|sku|sku| +|**--retention-in-days**|integer|The workspace data retention in days. Allowed values are per pricing plan. See pricing tiers documentation for details.|retention_in_days|retentionInDays| +|**--public-network-access-for-ingestion**|choice|The network access type for accessing Log Analytics ingestion.|public_network_access_for_ingestion|publicNetworkAccessForIngestion| +|**--public-network-access-for-query**|choice|The network access type for accessing Log Analytics query.|public_network_access_for_query|publicNetworkAccessForQuery| +|**--force-cmk-for-query**|boolean|Indicates whether customer managed storage is mandatory for query management.|force_cmk_for_query|forceCmkForQuery| +|**--features**|object|Workspace features.|features|features| +|**--daily-quota-gb**|number|The workspace daily quota for ingestion.|daily_quota_gb|dailyQuotaGb| + +#### Command `az loganalytics workspace delete` + +##### Example +``` +az loganalytics workspace delete --resource-group "oiautorest6685" --name "oiautorest6685" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--force**|boolean|Deletes the workspace without the recovery option. A workspace that was deleted with this flag cannot be recovered.|force|force| + +### group `az loganalytics workspace-purge` +#### Command `az loganalytics workspace-purge purge` + +##### Example +``` +az loganalytics workspace-purge purge --filters "[{\\"column\\":\\"TimeGenerated\\",\\"operator\\":\\">\\",\\"value\\":\ +\\"2017-09-01T00:00:00\\"}]" --table "Heartbeat" --resource-group "OIAutoRest5123" --workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--table**|string|Table from which to purge data.|table|table| +|**--filters**|array|The set of columns and filters (queries) to run over them to purge the resulting data.|filters|filters| + +#### Command `az loganalytics workspace-purge show-purge-status` + +##### Example +``` +az loganalytics workspace-purge show-purge-status --purge-id "purge-970318e7-b859-4edb-8903-83b1b54d0b74" \ +--resource-group "OIAutoRest5123" --workspace-name "aztest5048" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|The name of the workspace.|workspace_name|workspaceName| +|**--purge-id**|string|In a purge status request, this is the Id of the operation the status of which is returned.|purge_id|purgeId| diff --git a/src/loganalytics/setup.cfg b/src/loganalytics/setup.cfg new file mode 100644 index 00000000000..2fdd96e5d39 --- /dev/null +++ b/src/loganalytics/setup.cfg @@ -0,0 +1 @@ +#setup.cfg \ No newline at end of file diff --git a/src/loganalytics/setup.py b/src/loganalytics/setup.py new file mode 100644 index 00000000000..b1665903e23 --- /dev/null +++ b/src/loganalytics/setup.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + + +from codecs import open +from setuptools import setup, find_packages + +# HISTORY.rst entry. +VERSION = '0.1.0' +try: + from azext_loganalytics.manual.version import VERSION +except ImportError: + pass + +# The full list of classifiers is available at +# https://pypi.python.org/pypi?%3Aaction=list_classifiers +CLASSIFIERS = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: MIT License', +] + +DEPENDENCIES = [] + +try: + from azext_loganalytics.manual.dependency import DEPENDENCIES +except ImportError: + pass + +with open('README.md', 'r', encoding='utf-8') as f: + README = f.read() +with open('HISTORY.rst', 'r', encoding='utf-8') as f: + HISTORY = f.read() + +setup( + name='loganalytics', + version=VERSION, + description='Microsoft Azure Command-Line Tools OperationalInsightsManagementClient Extension', + author='Microsoft Corporation', + author_email='azpycli@microsoft.com', + url='https://github.com/Azure/azure-cli-extensions/tree/master/src/loganalytics', + long_description=README + '\n\n' + HISTORY, + license='MIT', + classifiers=CLASSIFIERS, + packages=find_packages(), + install_requires=DEPENDENCIES, + package_data={'azext_loganalytics': ['azext_metadata.json']}, +)