blob: a5ad2b818ea9db0e28c0b51a8113e4bb123468f7 [file] [log] [blame]
Dennis Dmitriev6f59add2016-10-18 13:45:27 +03001# Copyright 2016 Mirantis, Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15import copy
16import os
17import shutil
18import tempfile
19import time
20import traceback
21
22import paramiko
23import yaml
24from devops.helpers import helpers
25from devops.helpers import ssh_client
26from elasticsearch import Elasticsearch
27
28from tcp_tests import logger
29from tcp_tests import settings
30from tcp_tests.helpers import ext
31
32LOG = logger.logger
33
34
35def get_test_method_name():
36 raise NotImplementedError
37
38
39def update_yaml(yaml_tree=None, yaml_value='', is_uniq=True,
40 yaml_file=settings.TIMESTAT_PATH_YAML, remote=None):
41 """Store/update a variable in YAML file.
42
43 yaml_tree - path to the variable in YAML file, will be created if absent,
44 yaml_value - value of the variable, will be overwritten if exists,
45 is_uniq - If false, add the unique two-digit suffix to the variable name.
46 """
47 def get_file(path, remote=None, mode="r"):
48 if remote:
49 return remote.open(path, mode)
50 else:
51 return open(path, mode)
52
53 if yaml_tree is None:
54 yaml_tree = []
55 with get_file(yaml_file, remote) as file_obj:
56 yaml_data = yaml.safe_load(file_obj)
57
58 # Walk through the 'yaml_data' dict, find or create a tree using
59 # sub-keys in order provided in 'yaml_tree' list
60 item = yaml_data
61 for n in yaml_tree[:-1]:
62 if n not in item:
63 item[n] = {}
64 item = item[n]
65
66 if is_uniq:
67 last = yaml_tree[-1]
68 else:
69 # Create an uniq suffix in range '_00' to '_99'
70 for n in range(100):
71 last = str(yaml_tree[-1]) + '_' + str(n).zfill(2)
72 if last not in item:
73 break
74
75 item[last] = yaml_value
76 with get_file(yaml_file, remote, mode='w') as file_obj:
77 yaml.dump(yaml_data, file_obj, default_flow_style=False)
78
79
80class TimeStat(object):
81 """Context manager for measuring the execution time of the code.
82
83 Usage:
84 with TimeStat([name],[is_uniq=True]):
85 """
86
87 def __init__(self, name=None, is_uniq=False):
88 if name:
89 self.name = name
90 else:
91 self.name = 'timestat'
92 self.is_uniq = is_uniq
93 self.begin_time = 0
94 self.end_time = 0
95 self.total_time = 0
96
97 def __enter__(self):
98 self.begin_time = time.time()
99 return self
100
101 def __exit__(self, exc_type, exc_value, exc_tb):
102 self.end_time = time.time()
103 self.total_time = self.end_time - self.begin_time
104
105 # Create a path where the 'self.total_time' will be stored.
106 yaml_path = []
107
108 # There will be a list of one or two yaml subkeys:
109 # - first key name is the method name of the test
110 method_name = get_test_method_name()
111 if method_name:
112 yaml_path.append(method_name)
113
114 # - second (subkey) name is provided from the decorator (the name of
115 # the just executed function), or manually.
116 yaml_path.append(self.name)
117
118 try:
119 update_yaml(yaml_path, '{:.2f}'.format(self.total_time),
120 self.is_uniq)
121 except Exception:
122 LOG.error("Error storing time statistic for {0}"
123 " {1}".format(yaml_path, traceback.format_exc()))
124 raise
125
126 @property
127 def spent_time(self):
128 return time.time() - self.begin_time
129
130
131def reduce_occurrences(items, text):
132 """ Return string without items(substrings)
133 Args:
134 items: iterable of strings
135 test: string
136 Returns:
137 string
138 Raise:
139 AssertionError if any substing not present in source text
140 """
141 for item in items:
142 LOG.debug(
143 "Verifying string {} is shown in "
144 "\"\"\"\n{}\n\"\"\"".format(item, text))
145 assert text.count(item) != 0
146 text = text.replace(item, "", 1)
147 return text
148
149
150def generate_keys():
151 key = paramiko.RSAKey.generate(1024)
152 public = key.get_base64()
153 dirpath = tempfile.mkdtemp()
154 key.write_private_key_file(os.path.join(dirpath, 'id_rsa'))
155 with open(os.path.join(dirpath, 'id_rsa.pub'), 'w') as pub_file:
156 pub_file.write(public)
157 return dirpath
158
159
160def clean_dir(dirpath):
161 shutil.rmtree(dirpath)
162
163
164def retry(tries_number=3, exception=Exception):
165 def _retry(func):
166 assert tries_number >= 1, 'ERROR! @retry is called with no tries!'
167
168 def wrapper(*args, **kwargs):
169 iter_number = 1
170 while True:
171 try:
172 LOG.debug('Calling function "{0}" with args "{1}" and '
173 'kwargs "{2}". Try # {3}.'.format(func.__name__,
174 args,
175 kwargs,
176 iter_number))
177 return func(*args, **kwargs)
178 except exception as e:
179 if iter_number > tries_number:
180 LOG.debug('Failed to execute function "{0}" with {1} '
181 'tries!'.format(func.__name__, tries_number))
182 raise e
183 iter_number += 1
184 return wrapper
185 return _retry
186
187
188class ElasticClient(object):
189 def __init__(self, host='localhost', port=9200):
190 self.es = Elasticsearch([{'host': '{}'.format(host),
191 'port': port}])
192 self.host = host
193 self.port = port
194
195 def find(self, key, value):
196 LOG.info('Search for {} for {}'.format(key, value))
197 search_request_body = '{' +\
198 ' "query": {' +\
199 ' "simple_query_string": {' +\
200 ' "query": "{}",'.format(value) +\
201 ' "analyze_wildcard" : "true",' +\
202 ' "fields" : ["{}"],'.format(key) +\
203 ' "default_operator": "AND"' +\
204 ' }' +\
205 ' },' +\
206 ' "size": 1' +\
207 '}'
208 LOG.info('Search by {}'.format(search_request_body))
209
210 def is_found():
211 def temporary_status():
212 res = self.es.search(index='_all', body=search_request_body)
213 return res['hits']['total'] != 0
214 return temporary_status
215
216 predicate = is_found()
217 helpers.wait(predicate, timeout=300,
218 timeout_msg='Timeout waiting, result from elastic')
219
220 es_raw = self.es.search(index='_all', body=search_request_body)
221 if es_raw['timed_out']:
222 raise RuntimeError('Elastic search timeout exception')
223
224 return ElasticSearchResult(key, value, es_raw['hits']['total'], es_raw)
225
226
227class ElasticSearchResult(object):
228 def __init__(self, key, value, count, raw):
229 self.key = key
230 self.value = value
231 self.count = count
232 self.raw = raw
233 if self.count != 0:
234 self.items = raw['hits']['hits']
235
236 def get(self, index):
237 if self.count != 0:
238 return self.items[index]['_source']
239 else:
240 None
241
242
243def create_file(node, pod, path, size,
244 namespace=ext.Namespace.BASE_NAMESPACE):
245 node.check_call(
246 'kubectl exec {} --namespace={} {}'.format(
247 pod.name,
248 namespace,
249 'dd -- if=/dev/zero -- of={} bs=1MB count={}'.format(path, size)),
250 expected=[ext.ExitCodes.EX_OK])
251
252
253def run_daily_cron(node, pod, task,
254 namespace=ext.Namespace.BASE_NAMESPACE):
255 node.check_call(
256 'kubectl exec {} --namespace={} {}'.format(
257 pod.name,
258 namespace,
259 '/etc/cron.daily/{}'.format(task)),
260 expected=[ext.ExitCodes.EX_OK])
261
262
263def list_files(node, pod, path, mask,
264 namespace=ext.Namespace.BASE_NAMESPACE):
265 return "".join(node.check_call(
266 'kubectl exec {} --namespace={} {}'.format(
267 pod.name,
268 namespace,
269 'find {} -- -iname {}'.format(path, mask)),
270 expected=[ext.ExitCodes.EX_OK])['stdout']) \
271 .replace('\n', ' ').strip().split(" ")
272
273
274def rm_files(node, pod, path,
275 namespace=ext.Namespace.BASE_NAMESPACE):
276 node.execute(
277 'kubectl exec {} --namespace={} {}'.format(
278 pod.name,
279 namespace,
280 'rm -- {}'.format(path)))
281
282
283class YamlEditor(object):
284 """Manipulations with local or remote .yaml files.
285
286 Usage:
287
288 with YamlEditor("tasks.yaml") as editor:
289 editor.content[key] = "value"
290
291 with YamlEditor("astute.yaml", ip=self.admin_ip) as editor:
292 editor.content[key] = "value"
293 """
294
295 def __init__(self, file_path, host=None, port=None,
296 username=None, password=None, private_keys=None,
297 document_id=0,
298 default_flow_style=False, default_style=None):
299 self.__file_path = file_path
300 self.host = host
301 self.port = port or 22
302 self.username = username
303 self.__password = password
304 self.__private_keys = private_keys or []
305 self.__content = None
306 self.__documents = [{}, ]
307 self.__document_id = document_id
308 self.__original_content = None
309 self.default_flow_style = default_flow_style
310 self.default_style = default_style
311
312 @property
313 def file_path(self):
314 """Open file path
315
316 :rtype: str
317 """
318 return self.__file_path
319
320 @property
321 def content(self):
322 if self.__content is None:
323 self.__content = self.get_content()
324 return self.__content
325
326 @content.setter
327 def content(self, new_content):
328 self.__content = new_content
329
330 def __get_file(self, mode="r"):
331 if self.host:
332 remote = ssh_client.SSHClient(
333 host=self.host,
334 port=self.port,
335 username=self.username,
336 password=self.__password,
337 private_keys=self.__private_keys)
338
339 return remote.open(self.__file_path, mode=mode)
340 else:
341 return open(self.__file_path, mode=mode)
342
343 def get_content(self):
344 """Return a single document from YAML"""
345 def multi_constructor(loader, tag_suffix, node):
346 """Stores all unknown tags content into a dict
347
348 Original yaml:
349 !unknown_tag
350 - some content
351
352 Python object:
353 {"!unknown_tag": ["some content", ]}
354 """
355 if type(node.value) is list:
356 if type(node.value[0]) is tuple:
357 return {node.tag: loader.construct_mapping(node)}
358 else:
359 return {node.tag: loader.construct_sequence(node)}
360 else:
361 return {node.tag: loader.construct_scalar(node)}
362
363 yaml.add_multi_constructor("!", multi_constructor)
364 with self.__get_file() as file_obj:
365 self.__documents = [x for x in yaml.load_all(file_obj)]
366 return self.__documents[self.__document_id]
367
368 def write_content(self, content=None):
369 if content:
370 self.content = content
371 self.__documents[self.__document_id] = self.content
372
373 def representer(dumper, data):
374 """Represents a dict key started with '!' as a YAML tag
375
376 Assumes that there is only one !tag in the dict at the
377 current indent.
378
379 Python object:
380 {"!unknown_tag": ["some content", ]}
381
382 Resulting yaml:
383 !unknown_tag
384 - some content
385 """
386 key = data.keys()[0]
387 if key.startswith("!"):
388 value = data[key]
389 if type(value) is dict:
390 node = dumper.represent_mapping(key, value)
391 elif type(value) is list:
392 node = dumper.represent_sequence(key, value)
393 else:
394 node = dumper.represent_scalar(key, value)
395 else:
396 node = dumper.represent_mapping(u'tag:yaml.org,2002:map', data)
397 return node
398
399 yaml.add_representer(dict, representer)
400 with self.__get_file("w") as file_obj:
401 yaml.dump_all(self.__documents, file_obj,
402 default_flow_style=self.default_flow_style,
403 default_style=self.default_style)
404
405 def __enter__(self):
406 self.__content = self.get_content()
407 self.__original_content = copy.deepcopy(self.content)
408 return self
409
410 def __exit__(self, x, y, z):
411 if self.content == self.__original_content:
412 return
413 self.write_content()