discovering
diff --git a/run_test.py b/run_test.py
old mode 100644
new mode 100755
index f1800fd..cafbc59
--- a/run_test.py
+++ b/run_test.py
@@ -2,26 +2,20 @@
import sys
import json
import time
-import shutil
import pprint
-import weakref
import logging
import os.path
import argparse
-import traceback
-import subprocess
-import contextlib
-
+from nodes import discover
import ssh_runner
import io_scenario
+from config import cfg_dict
from utils import log_error
from rest_api import add_test
-from itest import IOPerfTest, run_test_iter, PgBenchTest
-from starts_vms import nova_connect, create_vms_mt, clear_all
+from itest import IOPerfTest,PgBenchTest
from formatters import get_formatter
-
logger = logging.getLogger("io-perf-tool")
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
@@ -60,73 +54,6 @@
return test_runner(obj)
-class FileWrapper(object):
- def __init__(self, fd, conn):
- self.fd = fd
- self.channel_wr = weakref.ref(conn)
-
- def read(self):
- return self.fd.read()
-
- @property
- def channel(self):
- return self.channel_wr()
-
-
-class LocalConnection(object):
- def __init__(self):
- self.proc = None
-
- def exec_command(self, cmd):
- PIPE = subprocess.PIPE
- self.proc = subprocess.Popen(cmd,
- shell=True,
- stdout=PIPE,
- stderr=PIPE,
- stdin=PIPE)
- res = (self.proc.stdin,
- FileWrapper(self.proc.stdout, self),
- self.proc.stderr)
- return res
-
- def recv_exit_status(self):
- return self.proc.wait()
-
- def open_sftp(self):
- return self
-
- def close(self):
- pass
-
- def put(self, localfile, remfile):
- return shutil.copy(localfile, remfile)
-
- def mkdir(self, remotepath, mode):
- os.mkdir(remotepath)
- os.chmod(remotepath, mode)
-
- def chmod(self, remotepath, mode):
- os.chmod(remotepath, mode)
-
- def copytree(self, src, dst):
- shutil.copytree(src, dst)
-
-
-def get_local_runner(clear_tmp_files=True):
- def closure(obj):
- res = []
- obj.set_result_cb(res.append)
- test_iter = run_test_iter(obj,
- LocalConnection())
- next(test_iter)
-
- with log_error("!Run test"):
- next(test_iter)
- return res
-
- return closure
-
-
def parse_args(argv):
parser = argparse.ArgumentParser(
description="Run disk io performance test")
@@ -176,41 +103,6 @@
return parser.parse_args(argv)
-def get_opts(opts_file, test_opts):
- if opts_file is not None and test_opts is not None:
- print "Options --opts-file and --opts can't be " + \
- "provided same time"
- exit(1)
-
- if opts_file is None and test_opts is None:
- print "Either --opts-file or --opts should " + \
- "be provided"
- exit(1)
-
- if opts_file is not None:
- opts = []
-
- opt_lines = opts_file.readlines()
- opt_lines = [i for i in opt_lines if i != "" and not i.startswith("#")]
-
- for opt_line in opt_lines:
- if opt_line.strip() != "":
- opts.append([opt.strip()
- for opt in opt_line.strip().split(" ")
- if opt.strip() != ""])
- else:
- opts = [[opt.strip()
- for opt in test_opts.split(" ")
- if opt.strip() != ""]]
-
- if len(opts) == 0:
- print "Can't found parameters for tests. Check" + \
- "--opts-file or --opts options"
- exit(1)
-
- return opts
-
-
def format_result(res, formatter):
data = "\n{0}\n".format("=" * 80)
data += pprint.pformat(res) + "\n"
@@ -219,117 +111,40 @@
return templ.format(data, formatter(res), "=" * 80)
-@contextlib.contextmanager
-def start_test_vms(opts):
- create_vms_opts = {}
- for opt in opts.split(","):
- name, val = opt.split("=", 1)
- create_vms_opts[name] = val
-
- user = create_vms_opts.pop("user")
- key_file = create_vms_opts.pop("key_file")
- aff_group = create_vms_opts.pop("aff_group", None)
- raw_count = create_vms_opts.pop("count", "x1")
-
- logger.debug("Connection to nova")
- nova = nova_connect()
-
- if raw_count.startswith("x"):
- logger.debug("Getting amount of compute services")
- count = len(nova.services.list(binary="nova-compute"))
- count *= int(raw_count[1:])
- else:
- count = int(raw_count)
-
- if aff_group is not None:
- scheduler_hints = {'group': aff_group}
- else:
- scheduler_hints = None
-
- create_vms_opts['scheduler_hints'] = scheduler_hints
-
- logger.debug("Will start {0} vms".format(count))
-
- try:
- ips = [i[0] for i in create_vms_mt(nova, count, **create_vms_opts)]
-
- uris = ["{0}@{1}::{2}".format(user, ip, key_file) for ip in ips]
-
- yield uris
- except:
- traceback.print_exc()
- finally:
- logger.debug("Clearing")
- clear_all(nova)
+def deploy_and_start_sensors(sensors_conf, nodes):
+ pass
def main(argv):
- opts = parse_args(argv)
+ logging_conf = cfg_dict.get('logging')
+ if logging_conf:
+ if logging_conf.get('extra_logs'):
+ logger.setLevel(logging.DEBUG)
+ ch.setLevel(logging.DEBUG)
- if opts.extra_logs:
- logger.setLevel(logging.DEBUG)
- ch.setLevel(logging.DEBUG)
+ # Discover nodes
+ nodes_to_run = discover.discover(cfg_dict.get('cluster'))
- test_opts = get_opts(opts.opts_file, opts.opts)
+ tests = cfg_dict.get("tests", [])
- if opts.runner == "local":
- logger.debug("Run on local computer")
- try:
- for script_args in test_opts:
- cmd_line = " ".join(script_args)
- logger.debug("Run test with {0!r} params".format(cmd_line))
- runner = get_local_runner(opts.keep_temp_files)
- res = run_io_test(opts.tool_type,
- script_args,
- runner,
- opts.keep_temp_files)
- logger.debug(format_result(res, get_formatter(opts.tool_type)))
- except:
- traceback.print_exc()
- return 1
+ # Deploy and start sensors
+ deploy_and_start_sensors(cfg_dict.get('sensors'), nodes_to_run)
- elif opts.runner == "ssh":
- logger.debug("Use ssh runner")
+ for test_name, opts in tests.items():
+ cmd_line = " ".join(opts['opts'])
+ logger.debug("Run test with {0!r} params".format(cmd_line))
+ latest_start_time = 300 + time.time()
+ uris = [node.connection_url for node in nodes_to_run]
+ runner = ssh_runner.get_ssh_runner(uris,
+ latest_start_time,
+ opts.get('keep_temp_files'))
+ res = run_io_test(test_name,
+ opts['opts'],
+ runner,
+ opts.get('keep_temp_files'))
+ logger.debug(format_result(res, get_formatter(test_name)))
- uris = []
-
- if opts.create_vms_opts is not None:
- vm_context = start_test_vms(opts.create_vms_opts)
- uris += vm_context.__enter__()
- else:
- vm_context = None
-
- if opts.runner_opts is not None:
- uris += opts.runner_opts.split(";")
-
- if len(uris) == 0:
- logger.critical("You need to provide at least" +
- " vm spawn params or ssh params")
- return 1
-
- try:
- for script_args in test_opts:
- cmd_line = " ".join(script_args)
- logger.debug("Run test with {0!r} params".format(cmd_line))
- latest_start_time = opts.max_preparation_time + time.time()
- runner = ssh_runner.get_ssh_runner(uris,
- latest_start_time,
- opts.keep_temp_files)
- res = run_io_test(opts.tool_type,
- script_args,
- runner,
- opts.keep_temp_files)
- logger.debug(format_result(res, get_formatter(opts.tool_type)))
-
- except:
- traceback.print_exc()
- return 1
- finally:
- if vm_context is not None:
- vm_context.__exit__(None, None, None)
- logger.debug("Clearing")
-
- if opts.data_server_url:
+ if cfg_dict.get('data_server_url'):
result = json.loads(get_formatter(opts.tool_type)(res))
result['name'] = opts.build_name
add_test(opts.build_name, result, opts.data_server_url)