remove fuel support, many bugfixes, add sudo support for some cmd, add default ssh user
diff --git a/scripts/build_fio_ubuntu.sh b/scripts/build_fio_ubuntu.sh
index 1ce35d5..c534cfa 100644
--- a/scripts/build_fio_ubuntu.sh
+++ b/scripts/build_fio_ubuntu.sh
@@ -2,14 +2,13 @@
set -xe
apt update
-apt -y install g++ git zlib1g-dev libaio-dev librbd-dev make bzip2
+apt -y install g++ git zlib1g-dev libaio-dev make bzip2
cd /tmp
git clone https://github.com/axboe/fio.git
cd fio
./configure
make -j 4
. /etc/lsb-release
-# VERSION=$(cat /etc/lsb-release | grep DISTRIB_CODENAME | awk -F= '{print $2}')
chmod a-x fio
bzip2 -z -9 fio
mv fio.bz2 "fio_${DISTRIB_CODENAME}_x86_64.bz2"
diff --git a/scripts/connector.py b/scripts/connector.py
deleted file mode 100644
index 6f0f744..0000000
--- a/scripts/connector.py
+++ /dev/null
@@ -1,143 +0,0 @@
-import os
-import sys
-import logging
-import argparse
-import tempfile
-import paramiko
-
-import fuel_rest_api
-from nodes.node import Node
-from utils import parse_creds
-from urlparse import urlparse
-
-
-tmp_file = tempfile.NamedTemporaryFile().name
-openrc_path = tempfile.NamedTemporaryFile().name
-logger = logging.getLogger("io-perf-tool")
-
-
-def discover_fuel_nodes(fuel_url, creds, cluster_name):
- username, tenant_name, password = parse_creds(creds)
- creds = {"username": username,
- "tenant_name": tenant_name,
- "password": password}
-
- conn = fuel_rest_api.KeystoneAuth(fuel_url, creds, headers=None)
- cluster_id = fuel_rest_api.get_cluster_id(conn, cluster_name)
- cluster = fuel_rest_api.reflect_cluster(conn, cluster_id)
-
- nodes = list(cluster.get_nodes())
- ips = [node.get_ip('admin') for node in nodes]
- roles = [node["roles"] for node in nodes]
-
- host = urlparse(fuel_url).hostname
-
- nodes, to_clean = run_agent(ips, roles, host, tmp_file)
- nodes = [Node(node[0], node[1]) for node in nodes]
-
- openrc_dict = cluster.get_openrc()
-
- logger.debug("Found %s fuel nodes for env %r" % (len(nodes), cluster_name))
- return nodes, to_clean, openrc_dict
-
-
-def discover_fuel_nodes_clean(fuel_url, ssh_creds, nodes, base_port=12345):
- admin_ip = urlparse(fuel_url).hostname
- ssh = paramiko.SSHClient()
- ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- ssh.connect(hostname=admin_ip, port=ssh_creds["port"],
- password=ssh_creds["password"], username=ssh_creds["username"])
-
- command = "python /tmp/agent.py --clean=True --ext_ip=" + \
- admin_ip + " --base_port=" \
- + str(base_port) + " --ports"
-
- for node in nodes:
- ip = urlparse(node[0]).hostname
- command += " " + ip
-
- (stdin, stdout, stderr) = ssh.exec_command(command)
- for line in stdout.readlines():
- print line
-
-
-def run_agent(ip_addresses, roles, host, tmp_name, password="test37", port=22,
- base_port=12345):
- ssh = paramiko.SSHClient()
- ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- ssh.connect(hostname=host, port=port, password=password, username="root")
- sftp = ssh.open_sftp()
- sftp.put(os.path.join(os.path.dirname(__file__), 'agent.py'),
- "/tmp/agent.py")
- fuel_id_rsa_path = tmp_name
- sftp.get('/root/.ssh/id_rsa', fuel_id_rsa_path)
- os.chmod(fuel_id_rsa_path, 0o700)
- command = "python /tmp/agent.py --base_port=" + \
- str(base_port) + " --ext_ip=" \
- + host + " --ports"
-
- for address in ip_addresses:
- command += " " + address
-
- (stdin, stdout, stderr) = ssh.exec_command(command)
- node_port_mapping = {}
-
- for line in stdout.readlines():
- results = line.split(' ')
-
- if len(results) != 2:
- continue
-
- node, port = results
- node_port_mapping[node] = port
-
- nodes = []
- nodes_to_clean = []
-
- for i in range(len(ip_addresses)):
- ip = ip_addresses[i]
- role = roles[i]
- port = node_port_mapping[ip]
-
- nodes_to_clean.append(("ssh://root@" + ip + ":" +
- port.rstrip('\n')
- + ":" + fuel_id_rsa_path, role))
-
- nodes.append(("ssh://root@" + host + ":" + port.rstrip('\n')
- + ":" + fuel_id_rsa_path, role))
-
- ssh.close()
- logger.info('Files has been transferred successfully to Fuel node, ' +
- 'agent has been launched')
-
- return nodes, nodes_to_clean
-
-
-def parse_command_line(argv):
- parser = argparse.ArgumentParser(
- description="Connect to fuel master and setup ssh agent")
- parser.add_argument(
- "--fuel_url", required=True)
- parser.add_argument(
- "--cluster_name", required=True)
- parser.add_argument(
- "--iface", default="eth1")
- parser.add_argument(
- "--creds", default="admin:admin@admin")
-
- return parser.parse_args(argv)
-
-
-def main(argv):
- args = parse_command_line(argv)
-
- nodes, to_clean, _ = discover_fuel_nodes(args.fuel_url,
- args.creds,
- args.cluster_name)
- discover_fuel_nodes_clean(args.fuel_url, {"username": "root",
- "password": "test37",
- "port": 22}, to_clean)
-
-
-if __name__ == "__main__":
- main(sys.argv[1:])
diff --git a/scripts/storage/__init__.py b/scripts/storage/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/scripts/storage/__init__.py
+++ /dev/null
diff --git a/scripts/storage/data_processing.py b/scripts/storage/data_processing.py
deleted file mode 100644
index b15c579..0000000
--- a/scripts/storage/data_processing.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# class displays measurement. Moved from storage_api_v_1
-# to avoid circular imports.
-import math
-
-# fix and update all this. Take statistic code from scripts/data2.py
-
-
-class Measurement(object):
- def __init__(self):
- self.build = ""
- self.build_type = 0 # GA/Master/Other
- self.md5 = ""
- self.name = ""
- self.date = None
- self.results = {
- "": (float, float)
- }
-
- def __str__(self):
- return self.build + " " + self.build_type + " " + \
- self.md5 + " " + str(self.results)
-
-
-def mean(l):
- return sum(l) / len(l)
-
-
-def stdev(l):
- m = mean(l)
- return math.sqrt(sum(map(lambda x: (x - m) ** 2, l)))
-
-
-def process_build_data(build):
- """ Function computes mean of all the data from particular build"""
- for item in build.items():
- if type(item[1]) is list:
- m = mean(item[1])
- s = stdev(item[1])
- build[item[0]] = [m, s]
-
-
-def create_measurement(data):
- """ Function creates measurement from data was extracted from database."""
-
- build_data = data[0]
-
- m = Measurement()
- m.build = build_data.build_id
- m.build_type = build_data.type
- m.name = build_data.name
- m.results = {}
-
- for i in range(1, len(data), 2):
- result = data[i]
- param_combination = data[i + 1]
-
- if not str(param_combination) in m.results:
- m.results[str(param_combination)] = [result.bandwith]
- else:
- m.results[str(param_combination)] += [result.bandwith]
-
- for k in m.results.keys():
- m.results[k] = [mean(m.results[k]), stdev(m.results[k])]
-
- return m
diff --git a/scripts/storage/db_manage.py b/scripts/storage/db_manage.py
deleted file mode 100644
index b9435d6..0000000
--- a/scripts/storage/db_manage.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import argparse
-import imp
-import os.path
-import shutil
-import sqlite3
-import sys
-
-from os import remove
-from web_app.app import db
-from config import DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, basedir
-from migrate.versioning import api
-
-
-ACTIONS = {}
-
-
-def action(act):
- def wrap(f):
- ACTIONS[act] = f
-
- def inner(*args, **kwargs):
- return f(*args, **kwargs)
- return inner
- return wrap
-
-
-def parse_args(argv):
- parser = argparse.ArgumentParser(
- description="Manage DB")
- parser.add_argument("action",
- choices=["dropdb", "createdb", "migrate", "downgrade"])
- return parser.parse_args(argv)
-
-
-@action("createdb")
-def createdb():
- sqlite3.connect(os.path.join(basedir, 'app.db'))
-
- db.create_all()
- if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
- api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
- api.version_control(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- else:
- api.version_control(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO,
- api.version(SQLALCHEMY_MIGRATE_REPO))
-
-
-@action("dropdb")
-def dropdb():
- db.create_all()
- if os.path.exists(SQLALCHEMY_MIGRATE_REPO):
- shutil.rmtree(SQLALCHEMY_MIGRATE_REPO)
-
- db.drop_all()
- if os.path.exists(os.path.join(basedir, 'app.db')):
- remove(os.path.join(basedir, 'app.db'))
-
-
-@action("migrate")
-def migrate():
- v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' %
- (v+1))
- tmp_module = imp.new_module('old_model')
- old_model = api.create_model(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
-
- exec old_model in tmp_module.__dict__
- script = api.make_update_script_for_model(DATABASE_URI,
- SQLALCHEMY_MIGRATE_REPO,
- tmp_module.meta, db.metadata)
- open(migration, "wt").write(script)
- api.upgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- print('New migration saved as ' + migration)
- print('Current database version: ' + str(v))
-
-
-@action("upgrade")
-def upgrade():
- api.upgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- print('Current database version: ' + str(v))
-
-
-@action("downgrade")
-def downgrade():
- v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- api.downgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
- v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
- print('Current database version: ' + str(v))
-
-
-def main(argv):
- opts = parse_args(argv)
- func = ACTIONS.get(opts.action)
- func()
-
-
-if __name__ == '__main__':
- exit(main(sys.argv[1:]))
diff --git a/scripts/storage/models.py b/scripts/storage/models.py
deleted file mode 100644
index eb632e4..0000000
--- a/scripts/storage/models.py
+++ /dev/null
@@ -1,72 +0,0 @@
-from sqlalchemy import ForeignKey
-from web_app.app import db
-
-
-class Build(db.Model):
- id = db.Column(db.Integer, primary_key=True)
- build_id = db.Column(db.String(64))
- name = db.Column(db.String(64))
- md5 = db.Column(db.String(64))
- type = db.Column(db.Integer)
-
- def __repr__(self):
- return self.build_id + " " + self.name + " " + self.type
-
-
-class Param(db.Model):
- id = db.Column(db.Integer, primary_key=True)
- name = db.Column(db.String(64))
- type = db.Column(db.String(64))
- descr = db.Column(db.String(4096))
-
-
-class ParamCombination(db.Model):
- id = db.Column(db.Integer, primary_key=True)
- param_1 = db.Column(db.Text())
- param_2 = db.Column(db.Text())
- param_3 = db.Column(db.Text())
- param_4 = db.Column(db.Text())
- param_5 = db.Column(db.Text())
- param_6 = db.Column(db.Text())
- param_7 = db.Column(db.Text())
- param_8 = db.Column(db.Text())
- param_9 = db.Column(db.Text())
- param_10 = db.Column(db.Text())
- param_11 = db.Column(db.Text())
- param_12 = db.Column(db.Text())
- param_13 = db.Column(db.Text())
- param_14 = db.Column(db.Text())
- param_15 = db.Column(db.Text())
- param_16 = db.Column(db.Text())
- param_17 = db.Column(db.Text())
- param_18 = db.Column(db.Text())
- param_19 = db.Column(db.Text())
- param_20 = db.Column(db.Text())
-
- def __repr__(self):
- return self.param_1 + " " + self.param_2 + " " + self.param_3
-
-
-class Lab(db.Model):
- id = db.Column(db.Integer, primary_key=True)
- name = db.Column(db.String(64), unique=True)
- url = db.Column(db.String(256))
- type = db.Column(db.String(4096))
- fuel_version = db.Column(db.String(64))
- ceph_version = db.Column(db.String(64))
- lab_general_info = db.Column(db.Text)
- lab_meta = db.Column(db.Text)
-
-
-class Result(db.Model):
- id = db.Column(db.Integer, primary_key=True)
- build_id = db.Column(db.Integer, ForeignKey('build.id'))
- lab_id = db.Column(db.Integer, ForeignKey('lab.id'))
- date = db.Column(db.DateTime)
- param_combination_id = db.Column(db.Integer,
- ForeignKey('param_combination.id'))
- bandwith = db.Column(db.Float)
- meta = db.Column(db.String(4096))
-
- def __repr__(self):
- return str(self.bandwith) + " " + str(self.date)
diff --git a/scripts/storage/storage_api.py b/scripts/storage/storage_api.py
deleted file mode 100644
index c7b4e3e..0000000
--- a/scripts/storage/storage_api.py
+++ /dev/null
@@ -1,454 +0,0 @@
-import datetime
-from data_processing import Measurement, create_measurement, process_build_data
-from flask import json
-from meta_info import collect_lab_data, total_lab_info
-from sqlalchemy import sql
-from persistance.models import *
-
-
-def get_build_info(build_name):
- session = db.session()
- result = session.query(Result, Build).join(Build).\
- filter(Build.name == build_name).first()
- lab = session.query(Lab).filter(Lab.id == result[0].lab_id).first()
- return eval(lab.lab_general_info)
-
-
-def get_build_detailed_info(build_name):
- data = get_build_info(build_name)
- return total_lab_info(data)
-
-
-def add_io_params(session):
- """Filling Param table with initial parameters. """
-
- param1 = Param(name="operation", type='{"write", "randwrite", '
- '"read", "randread"}',
- descr="type of write operation")
- param2 = Param(name="sync", type='{"a", "s"}',
- descr="Write mode synchronous/asynchronous")
- param3 = Param(name="block size",
- type='{"1k", "2k", "4k", "8k", "16k", '
- '"32k", "64k", "128k", "256k"}')
-
- session.add(param1)
- session.add(param2)
- session.add(param3)
-
- session.commit()
-
-
-def add_build(session, build_id, build_name, build_type, md5):
- """Function which adds particular build to database."""
-
- build = Build(type=build_type, build_id=build_id,
- name=build_name, md5=md5)
- session.add(build)
- session.commit()
-
- return build.id
-
-
-def insert_results(session, build_id, lab_id, params_combination_id,
- time=None, bandwith=0.0, meta=""):
- """Function insert particular result. """
-
- result = Result(build_id=build_id, lab_id=lab_id,
- params_combination_id=params_combination_id, time=time,
- bandwith=bandwith, meta=meta)
- session.add(result)
- session.commit()
-
-
-def add_param_comb(session, *params):
- """function responsible for adding particular params
- combination to database"""
-
- params_names = sorted([s for s in dir(ParamCombination)
- if s.startswith('param_')])
- d = zip(params_names, params)
- where = ""
-
- for item in d:
- where = sql.and_(where, getattr(ParamCombination, item[0]) == item[1])
-
- query = session.query(ParamCombination).filter(where)
- rs = session.execute(query).fetchall()
-
- if len(rs) == 0:
- param_comb = ParamCombination()
-
- for p in params_names:
- i = int(p.split('_')[1])
-
- if i - 1 < len(params):
- param_comb.__setattr__('param_' + str(i), params[i - 1])
- param = session.query(Param).filter(Param.id == i).one()
- values = eval(param.type)
-
- if params[i - 1] not in values:
- values.add(params[i - 1])
- param.type = str(values)
-
- session.add(param_comb)
- session.commit()
- return param_comb.id
- else:
- return rs[0][0]
-
-
-def add_lab(session, lab_url, lab_name, ceph_version,
- fuel_version, data, info):
- """ Function add data about particular lab"""
- result = session.query(Lab).filter(Lab.name == lab_name).all()
-
- if len(result) != 0:
- return result[0].id
- else:
- lab = Lab(name=lab_name, url=lab_url, ceph_version=ceph_version,
- fuel_version=fuel_version, lab_general_info=str(data),
- lab_meta=str(info))
- session.add(lab)
- session.commit()
- return lab.id
-
-
-def add_data(data):
- """Function store list of builds in database"""
-
- data = json.loads(data)
- session = db.session()
- add_io_params(session)
-
- for build_data in data:
- build_id = add_build(session,
- build_data.pop("build_id"),
- build_data.pop("name"),
- build_data.pop("type"),
- build_data.pop("iso_md5"),
- )
-
- creds = {"username": build_data.pop("username"),
- "password": build_data.pop("password"),
- "tenant_name": build_data.pop("tenant_name")}
-
- lab_url = build_data.pop("lab_url")
- lab_name = build_data.pop("lab_name")
- ceph_version = build_data.pop("ceph_version")
- data = collect_lab_data(lab_url, creds)
- data['name'] = lab_name
- info = total_lab_info(data)
- lab_id = add_lab(session, lab_url=lab_name, lab_name=lab_url,
- ceph_version=ceph_version,
- fuel_version=data['fuel_version'],
- data=data, info=info)
-
- date = build_data.pop("date")
- date = datetime.datetime.strptime(date, "%a %b %d %H:%M:%S %Y")
-
- for params, [bw, dev] in build_data.items():
- param_comb_id = add_param_comb(session, *params.split(" "))
- result = Result(param_combination_id=param_comb_id,
- build_id=build_id, bandwith=bw,
- date=date, lab_id=lab_id)
- session.add(result)
- session.commit()
-
-
-def load_data(lab_id=None, build_id=None, *params):
- """ Function loads data by parameters described in *params tuple."""
-
- session = db.session()
- params_names = sorted([s for s in dir(ParamCombination)
- if s.startswith('param_')])
- d = zip(params_names, params)
- where = ""
-
- for item in d:
- where = sql.and_(where, getattr(ParamCombination, item[0]) == item[1])
-
- query = session.query(ParamCombination).filter(where)
- rs = session.execute(query).fetchall()
-
- ids = [r[0] for r in rs]
-
- rs = session.query(Result).\
- filter(Result.param_combination_id.in_(ids)).all()
-
- if lab_id is not None:
- rs = [r for r in rs if r.lab_id == lab_id]
-
- if build_id is not None:
- rs = [r for r in rs if r.build_id == build_id]
-
- return rs
-
-
-def load_all():
- """Load all builds from database"""
-
- session = db.session()
- results = session.query(Result, Build, ParamCombination).\
- join(Build).join(ParamCombination).all()
-
- return results
-
-
-def collect_builds_from_db(*names):
- """ Function collecting all builds from database and filter it by names"""
-
- results = load_all()
- d = {}
-
- for item in results:
- result_data = item[0]
- build_data = item[1]
- param_combination_data = item[2]
-
- if build_data.name not in d:
- d[build_data.name] = \
- [build_data, result_data, param_combination_data]
- else:
- d[build_data.name].append(result_data)
- d[build_data.name].append(param_combination_data)
-
- if len(names) == 0:
- return {k: v for k, v in d.items()}
-
- return {k: v for k, v in d.items() if k in names}
-
-
-def prepare_build_data(build_name):
- """
- #function preparing data for display plots.
- #Format {build_name : Measurement}
- """
-
- session = db.session()
- build = session.query(Build).filter(Build.name == build_name).first()
- names = []
-
- if build.type == 'GA':
- names = [build_name]
- else:
- res = session.query(Build).\
- filter(Build.type.in_(['GA', 'master', build.type])).all()
- for r in res:
- names.append(r.name)
-
- d = collect_builds_from_db()
- d = {k: v for k, v in d.items() if k in names}
- results = {}
-
- for data in d.keys():
- m = create_measurement(d[data])
- results[m.build_type] = m
-
- return results
-
-
-def builds_list():
- """
- Function getting list of all builds available to index page
- returns list of dicts which contains data to display on index page.
- """
-
- res = []
- builds = set()
- data = load_all()
-
- for item in data:
- build = item[1]
- result = item[0]
-
- if not build.name in builds:
- builds.add(build.name)
- d = {}
- d["type"] = build.type
- d["url"] = build.name
- d["date"] = result.date
- d["name"] = build.name
- res.append(d)
-
- return res
-
-
-def get_builds_data(names=None):
- """
- Processing data from database.
- List of dicts, where each dict contains build meta
- info and kev-value measurements.
- key - param combination.
- value - [mean, deviation]
- """
-
- d = collect_builds_from_db()
-
- if not names is None:
- d = {k: v for k, v in d.items() if k in names}
- else:
- d = {k: v for k, v in d.items()}
- output = []
-
- for key, value in d.items():
- result = {}
- build = value[0]
- result["build_id"] = build.build_id
- result["iso_md5"] = build.md5
- result["type"] = build.type
- result["date"] = "Date must be here"
-
- for i in range(1, len(value), 2):
- r = value[i]
- param_combination = value[i + 1]
-
- if not str(param_combination) in result:
- result[str(param_combination)] = [r.bandwith]
- else:
- result[str(param_combination)].append(r.bandwith)
-
- output.append(result)
-
- for build in output:
- process_build_data(build)
-
- return output
-
-
-def get_data_for_table(build_name=""):
- """ Function for getting result to display table """
-
- session = db.session()
- build = session.query(Build).filter(Build.name == build_name).one()
- names = []
-
- # Get names of build that we need.
- if build.type == 'GA':
- names = [build_name]
- else:
- res = session.query(Build).filter(
- Build.type.in_(['GA', 'master', build.type])).all()
- for r in res:
- names.append(r.name)
- # get data for particular builds.
- return get_builds_data(names)
-
-
-if __name__ == '__main__':
- # add_build("Some build", "GA", "bla bla")
- cred = {"username": "admin", "password": "admin", "tenant_name": "admin"}
- json_data = '[{\
- "username": "admin",\
- "password": "admin", \
- "tenant_name": "admin",\
- "lab_url": "http://172.16.52.112:8000",\
- "lab_name": "Perf-1-Env",\
- "ceph_version": "v0.80 Firefly",\
- "randwrite a 256k": [16885, 1869],\
- "randwrite s 4k": [79, 2],\
- "read a 64k": [74398, 11618],\
- "write s 1024k": [7490, 193],\
- "randwrite a 64k": [14167, 4665],\
- "build_id": "1",\
- "randread a 1024k": [68683, 8604],\
- "randwrite s 256k": [3277, 146],\
- "write a 1024k": [24069, 660],\
- "type": "GA",\
- "write a 64k": [24555, 1006],\
- "write s 64k": [1285, 57],\
- "write a 256k": [24928, 503],\
- "write s 256k": [4029, 192],\
- "randwrite a 1024k": [23980, 1897],\
- "randread a 64k": [27257, 17268],\
- "randwrite s 1024k": [8504, 238],\
- "randread a 256k": [60868, 2637],\
- "randread a 4k": [3612, 1355],\
- "read a 1024k": [71122, 9217],\
- "date": "Thu Feb 12 19:11:56 2015",\
- "write s 4k": [87, 3],\
- "read a 4k": [88367, 6471],\
- "read a 256k": [80904, 8930],\
- "name": "GA - 6.0 GA",\
- "randwrite s 1k": [20, 0],\
- "randwrite s 64k": [1029, 34],\
- "write s 1k": [21, 0],\
- "iso_md5": "bla bla"\
- },\
- {\
- "username": "admin",\
- "password": "admin", \
- "tenant_name": "admin",\
- "lab_url": "http://172.16.52.112:8000",\
- "ceph_version": "v0.80 Firefly",\
- "lab_name": "Perf-1-Env",\
- "randwrite a 256k": [20212, 5690],\
- "randwrite s 4k": [83, 6],\
- "read a 64k": [89394, 3912],\
- "write s 1024k": [8054, 280],\
- "randwrite a 64k": [14595, 3245],\
- "build_id": "2",\
- "randread a 1024k": [83277, 9310],\
- "randwrite s 256k": [3628, 433],\
- "write a 1024k": [29226, 8624],\
- "type": "master",\
- "write a 64k": [25089, 790],\
- "write s 64k": [1236, 30],\
- "write a 256k": [30327, 9799],\
- "write s 256k": [4049, 172],\
- "randwrite a 1024k": [29000, 9302],\
- "randread a 64k": [26775, 16319],\
- "randwrite s 1024k": [8665, 1457],\
- "randread a 256k": [63608, 16126],\
- "randread a 4k": [3212, 1620],\
- "read a 1024k": [89676, 4401],\
- "date": "Thu Feb 12 19:11:56 2015",\
- "write s 4k": [88, 3],\
- "read a 4k": [92263, 5186],\
- "read a 256k": [94505, 6868],\
- "name": "6.1 Dev",\
- "randwrite s 1k": [22, 3],\
- "randwrite s 64k": [1105, 46],\
- "write s 1k": [22, 0],\
- "iso_md5": "bla bla"\
- },\
- {\
- "username": "admin",\
- "password": "admin", \
- "tenant_name": "admin",\
- "lab_url": "http://172.16.52.112:8000",\
- "ceph_version": "v0.80 Firefly",\
- "lab_name": "Perf-1-Env",\
- "randwrite a 256k": [16885, 1869],\
- "randwrite s 4k": [79, 2],\
- "read a 64k": [74398, 11618],\
- "write s 1024k": [7490, 193],\
- "randwrite a 64k": [14167, 4665],\
- "build_id": "1",\
- "randread a 1024k": [68683, 8604],\
- "randwrite s 256k": [3277, 146],\
- "write a 1024k": [24069, 660],\
- "type": "sometype",\
- "write a 64k": [24555, 1006],\
- "write s 64k": [1285, 57],\
- "write a 256k": [24928, 503],\
- "write s 256k": [4029, 192],\
- "randwrite a 1024k": [23980, 1897],\
- "randread a 64k": [27257, 17268],\
- "randwrite s 1024k": [8504, 238],\
- "randread a 256k": [60868, 2637],\
- "randread a 4k": [3612, 1355],\
- "read a 1024k": [71122, 9217],\
- "date": "Thu Feb 12 19:11:56 2015",\
- "write s 4k": [87, 3],\
- "read a 4k": [88367, 6471],\
- "read a 256k": [80904, 8930],\
- "name": "somedev",\
- "randwrite s 1k": [20, 0],\
- "randwrite s 64k": [1029, 34],\
- "write s 1k": [21, 0],\
- "iso_md5": "bla bla"\
- }]'
-
- # json_to_db(json_data)
- print load_data(1, 2)
- # add_data(json_data)