-
Notifications
You must be signed in to change notification settings - Fork 3.4k
Expand file tree
/
Copy pathbase.py
More file actions
333 lines (272 loc) · 13.3 KB
/
base.py
File metadata and controls
333 lines (272 loc) · 13.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import json
import shlex
import logging
import inspect
import unittest
import tempfile
from .scenario_tests import (IntegrationTestBase, ReplayableTest, SubscriptionRecordingProcessor,
LargeRequestBodyProcessor,
LargeResponseBodyProcessor, LargeResponseBodyReplacer, RequestUrlNormalizer,
GeneralNameReplacer,
live_only, DeploymentNameReplacer, patch_time_sleep_api, create_random_name)
from .scenario_tests.const import MOCKED_SUBSCRIPTION_ID, ENV_SKIP_ASSERT
from .patches import (patch_load_cached_subscriptions, patch_main_exception_handler,
patch_retrieve_token_for_user, patch_long_run_operation_delay,
patch_progress_controller, patch_get_current_system_username)
from .exceptions import CliExecutionError
from .utilities import (find_recording_dir, StorageAccountKeyReplacer, GraphClientPasswordReplacer,
MSGraphClientPasswordReplacer, AADAuthRequestFilter)
from .reverse_dependency import get_dummy_cli
logger = logging.getLogger('azure.cli.testsdk')
ENV_COMMAND_COVERAGE = 'AZURE_CLI_TEST_COMMAND_COVERAGE'
COVERAGE_FILE = 'az_command_coverage.txt'
class CheckerMixin(object):
def _apply_kwargs(self, val):
try:
return val.format(**self.kwargs)
except AttributeError:
return val
except KeyError as ex:
# due to mis-spelled kwarg
raise KeyError("Key '{}' not found in kwargs. Check spelling and ensure it has been registered."
.format(ex.args[0]))
def check(self, query, expected_results, case_sensitive=True):
from azure.cli.testsdk.checkers import JMESPathCheck
query = self._apply_kwargs(query)
expected_results = self._apply_kwargs(expected_results)
return JMESPathCheck(query, expected_results, case_sensitive)
def exists(self, query):
from azure.cli.testsdk.checkers import JMESPathCheckExists
query = self._apply_kwargs(query)
return JMESPathCheckExists(query)
def not_exists(self, query):
from azure.cli.testsdk.checkers import JMESPathCheckNotExists
query = self._apply_kwargs(query)
return JMESPathCheckNotExists(query)
def greater_than(self, query, expected_results):
from azure.cli.testsdk.checkers import JMESPathCheckGreaterThan
query = self._apply_kwargs(query)
expected_results = self._apply_kwargs(expected_results)
return JMESPathCheckGreaterThan(query, expected_results)
def check_pattern(self, query, expected_results):
from azure.cli.testsdk.checkers import JMESPathPatternCheck
query = self._apply_kwargs(query)
expected_results = self._apply_kwargs(expected_results)
return JMESPathPatternCheck(query, expected_results)
def is_empty(self): # pylint: disable=no-self-use
from azure.cli.testsdk.checkers import NoneCheck
return NoneCheck()
class ScenarioTest(ReplayableTest, CheckerMixin, unittest.TestCase):
def __init__(self, method_name, config_file=None, recording_name=None,
recording_processors=None, replay_processors=None, recording_patches=None, replay_patches=None,
random_config_dir=False):
self.cli_ctx = get_dummy_cli(random_config_dir=random_config_dir)
self.random_config_dir = random_config_dir
self.name_replacer = GeneralNameReplacer()
self.kwargs = {}
self.test_guid_count = 0
self._processors_to_reset = [StorageAccountKeyReplacer(), GraphClientPasswordReplacer(),
MSGraphClientPasswordReplacer()]
default_recording_processors = [
SubscriptionRecordingProcessor(MOCKED_SUBSCRIPTION_ID),
AADAuthRequestFilter(),
LargeRequestBodyProcessor(),
LargeResponseBodyProcessor(),
DeploymentNameReplacer(),
RequestUrlNormalizer(),
self.name_replacer
] + self._processors_to_reset
default_replay_processors = [
LargeResponseBodyReplacer(),
DeploymentNameReplacer(),
RequestUrlNormalizer(),
]
default_recording_patches = [patch_main_exception_handler]
default_replay_patches = [
patch_main_exception_handler,
patch_time_sleep_api,
patch_long_run_operation_delay,
patch_load_cached_subscriptions,
patch_retrieve_token_for_user,
patch_progress_controller,
]
def _merge_lists(base, patches):
merged = list(base)
if patches and not isinstance(patches, list):
patches = [patches]
if patches:
merged = list(set(merged).union(set(patches)))
return merged
super(ScenarioTest, self).__init__(
method_name,
config_file=config_file,
recording_processors=_merge_lists(default_recording_processors, recording_processors),
replay_processors=_merge_lists(default_replay_processors, replay_processors),
recording_patches=_merge_lists(default_recording_patches, recording_patches),
replay_patches=_merge_lists(default_replay_patches, replay_patches),
recording_dir=find_recording_dir(inspect.getfile(self.__class__)),
recording_name=recording_name
)
def tearDown(self):
for processor in self._processors_to_reset:
processor.reset()
if self.random_config_dir:
from azure.cli.core.util import rmtree_with_retry
rmtree_with_retry(self.cli_ctx.config.config_dir)
super(ScenarioTest, self).tearDown()
def create_random_name(self, prefix, length):
self.test_resources_count += 1
moniker = '{}{:06}'.format(prefix, self.test_resources_count)
if self.in_recording:
name = create_random_name(prefix, length)
self.name_replacer.register_name_pair(name, moniker)
return name
return moniker
# Use this helper to make playback work when guids are created and used in request urls, e.g. role assignment or AAD
# service principals. For usages, in test code, patch the "guid-gen" routine to this one, e.g.
# with mock.patch('azure.cli.command_modules.role.custom._gen_guid', side_effect=self.create_guid)
def create_guid(self):
import uuid
self.test_guid_count += 1
moniker = '88888888-0000-0000-0000-00000000' + ("%0.4x" % self.test_guid_count)
if self.in_recording:
name = uuid.uuid4()
self.name_replacer.register_name_pair(str(name), moniker)
return name
return uuid.UUID(moniker)
def cmd(self, command, checks=None, expect_failure=False):
command = self._apply_kwargs(command)
return execute(self.cli_ctx, command, expect_failure=expect_failure).assert_with_checks(checks)
def get_subscription_id(self):
if self.in_recording or self.is_live:
subscription_id = self.cmd('account list --query "[?isDefault].id" -o tsv').output.strip()
else:
subscription_id = MOCKED_SUBSCRIPTION_ID
return subscription_id
class LocalContextScenarioTest(ScenarioTest):
def __init__(self, method_name, config_file=None, recording_name=None, recording_processors=None,
replay_processors=None, recording_patches=None, replay_patches=None, working_dir=None):
super(LocalContextScenarioTest, self).__init__(method_name, config_file, recording_name, recording_processors,
replay_processors, recording_patches, replay_patches,
random_config_dir=True)
if self.in_recording:
self.recording_patches.append(patch_get_current_system_username)
else:
self.replay_patches.append(patch_get_current_system_username)
self.original_working_dir = os.getcwd()
if working_dir:
self.working_dir = working_dir
else:
self.working_dir = tempfile.mkdtemp()
def setUp(self):
super(LocalContextScenarioTest, self).setUp()
self.cli_ctx.local_context.initialize()
os.chdir(self.working_dir)
self.cmd('config param-persist on')
def tearDown(self):
super(LocalContextScenarioTest, self).tearDown()
self.cmd('config param-persist off')
self.cmd('config param-persist delete --all --purge -y')
os.chdir(self.original_working_dir)
if os.path.exists(self.working_dir):
import shutil
shutil.rmtree(self.working_dir)
@live_only()
class LiveScenarioTest(IntegrationTestBase, CheckerMixin, unittest.TestCase):
def __init__(self, method_name):
super(LiveScenarioTest, self).__init__(method_name)
self.cli_ctx = get_dummy_cli()
self.kwargs = {}
self.test_resources_count = 0
def setUp(self):
patch_main_exception_handler(self)
def cmd(self, command, checks=None, expect_failure=False):
command = self._apply_kwargs(command)
return execute(self.cli_ctx, command, expect_failure=expect_failure).assert_with_checks(checks)
def get_subscription_id(self):
return self.cmd('account list --query "[?isDefault].id" -o tsv').output.strip()
class ExecutionResult(object):
def __init__(self, cli_ctx, command, expect_failure=False):
self.output = ''
self.applog = ''
self.command_coverage = {}
cli_ctx.data['_cache'] = None
if os.environ.get(ENV_COMMAND_COVERAGE, None):
with open(COVERAGE_FILE, 'a') as coverage_file:
if command.startswith('az '):
command = command[3:]
coverage_file.write(command + '\n')
self._in_process_execute(cli_ctx, command, expect_failure=expect_failure)
log_val = ('Logging ' + self.applog) if self.applog else ''
if expect_failure and self.exit_code == 0:
logger.error('Command "%s" => %d. (It did not fail as expected). %s\n', command,
self.exit_code, log_val)
raise AssertionError('The command did not fail as it was expected.')
if not expect_failure and self.exit_code != 0:
logger.error('Command "%s" => %d. %s\n', command, self.exit_code, log_val)
raise AssertionError('The command failed. Exit code: {}'.format(self.exit_code))
logger.info('Command "%s" => %d. %s\n', command, self.exit_code, log_val)
self.json_value = None
self.skip_assert = os.environ.get(ENV_SKIP_ASSERT, None) == 'True'
def assert_with_checks(self, *args):
checks = []
for each in args:
if isinstance(each, list):
checks.extend(each)
elif callable(each):
checks.append(each)
if not self.skip_assert:
for c in checks:
c(self)
return self
def get_output_in_json(self):
if not self.json_value:
self.json_value = json.loads(self.output)
if self.json_value is None:
raise AssertionError('The command output cannot be parsed in json.')
return self.json_value
def _in_process_execute(self, cli_ctx, command, expect_failure=False):
from io import StringIO
from vcr.errors import CannotOverwriteExistingCassetteException
if command.startswith('az '):
command = command[3:]
stdout_buf = StringIO()
logging_buf = StringIO()
try:
# issue: stderr cannot be redirect in this form, as a result some failure information
# is lost when command fails.
self.exit_code = cli_ctx.invoke(shlex.split(command), out_file=stdout_buf) or 0
self.output = stdout_buf.getvalue()
self.applog = logging_buf.getvalue()
except CannotOverwriteExistingCassetteException as ex:
raise AssertionError(ex)
except CliExecutionError as ex:
if expect_failure:
self.exit_code = 1
self.output = stdout_buf.getvalue()
self.applog = logging_buf.getvalue()
elif ex.exception:
raise ex.exception
else:
raise ex
except Exception as ex: # pylint: disable=broad-except
self.exit_code = 1
self.output = stdout_buf.getvalue()
self.process_error = ex
except SystemExit as ex:
# SystemExit not caught by broad exception, check for sys.exit(3)
if ex.code == 3 and expect_failure:
self.exit_code = 1
self.output = stdout_buf.getvalue()
self.applog = logging_buf.getvalue()
else:
raise
finally:
stdout_buf.close()
logging_buf.close()
execute = ExecutionResult