very large refactoring
diff --git a/scripts/agent.py b/scripts/agent.py
deleted file mode 100644
index f1fc3da..0000000
--- a/scripts/agent.py
+++ /dev/null
@@ -1,114 +0,0 @@
-import argparse
-import subprocess
-import sys
-import socket
-import fcntl
-import struct
-import array
-
-
-def all_interfaces():
- max_possible = 128 # arbitrary. raise if needed.
- bytes = max_possible * 32
- s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
- names = array.array('B', '\0' * bytes)
- outbytes = struct.unpack('iL', fcntl.ioctl(
- s.fileno(),
- 0x8912, # SIOCGIFCONF
- struct.pack('iL', bytes, names.buffer_info()[0])
- ))[0]
- namestr = names.tostring()
- lst = []
- for i in range(0, outbytes, 40):
- name = namestr[i:i+16].split('\0', 1)[0]
- ip = namestr[i+20:i+24]
- lst.append((name, ip))
- return lst
-
-
-def format_ip(addr):
- return str(ord(addr[0])) + '.' + \
- str(ord(addr[1])) + '.' + \
- str(ord(addr[2])) + '.' + \
- str(ord(addr[3]))
-
-
-def find_interface_by_ip(ext_ip):
- ifs = all_interfaces()
- for i in ifs:
- ip = format_ip(i[1])
-
- if ip == ext_ip:
- return str(i[0])
-
- print "External ip doesnt corresponds to any of available interfaces"
- return None
-
-
-def make_tunnels(ips, ext_ip, base_port=12345, delete=False):
- node_port = {}
-
- if delete is True:
- mode = "-D"
- else:
- mode = "-A"
-
- iface = find_interface_by_ip(ext_ip)
-
- for ip in ips:
- p = subprocess.Popen(["iptables -t nat " + mode + " PREROUTING " +
- "-p tcp -i " + iface + " --dport "
- + str(base_port) +
- " -j DNAT --to " + str(ip) + ":22"],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=True)
-
- out, err = p.communicate()
-
- if out is not None:
- print out
-
- if err is not None:
- print err
-
- node_port[ip] = base_port
- base_port += 1
-
- return node_port
-
-
-def parse_command_line(argv):
- parser = argparse.ArgumentParser(description=
- "Connect to fuel master "
- "and setup ssh agent")
- parser.add_argument(
- "--base_port", type=int, required=True)
-
- parser.add_argument(
- "--ext_ip", type=str, required=True)
-
- parser.add_argument(
- "--clean", type=bool, default=False)
-
- parser.add_argument(
- "--ports", type=str, nargs='+')
-
- return parser.parse_args(argv)
-
-
-def main(argv):
- arg_object = parse_command_line(argv)
- mapping = make_tunnels(arg_object.ports,
- ext_ip=arg_object.ext_ip,
- base_port=arg_object.base_port,
- delete=arg_object.clean)
-
- if arg_object.clean is False:
- for k in mapping:
- print k + " " + str(mapping[k])
-
-
-if __name__ == "__main__":
- main(sys.argv[1:])
diff --git a/scripts/prepare.sh b/scripts/prepare.sh
index 587f577..c114874 100644
--- a/scripts/prepare.sh
+++ b/scripts/prepare.sh
@@ -6,7 +6,7 @@
# settings
FL_RAM=1024
-FL_HDD=50
+FL_HDD=20
FL_CPU=1
diff --git a/scripts/storage/__init__.py b/scripts/storage/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/scripts/storage/__init__.py
diff --git a/scripts/storage/data_processing.py b/scripts/storage/data_processing.py
new file mode 100644
index 0000000..b15c579
--- /dev/null
+++ b/scripts/storage/data_processing.py
@@ -0,0 +1,65 @@
+# class displays measurement. Moved from storage_api_v_1
+# to avoid circular imports.
+import math
+
+# fix and update all this. Take statistic code from scripts/data2.py
+
+
+class Measurement(object):
+ def __init__(self):
+ self.build = ""
+ self.build_type = 0 # GA/Master/Other
+ self.md5 = ""
+ self.name = ""
+ self.date = None
+ self.results = {
+ "": (float, float)
+ }
+
+ def __str__(self):
+ return self.build + " " + self.build_type + " " + \
+ self.md5 + " " + str(self.results)
+
+
+def mean(l):
+ return sum(l) / len(l)
+
+
+def stdev(l):
+ m = mean(l)
+ return math.sqrt(sum(map(lambda x: (x - m) ** 2, l)))
+
+
+def process_build_data(build):
+ """ Function computes mean of all the data from particular build"""
+ for item in build.items():
+ if type(item[1]) is list:
+ m = mean(item[1])
+ s = stdev(item[1])
+ build[item[0]] = [m, s]
+
+
+def create_measurement(data):
+ """ Function creates measurement from data was extracted from database."""
+
+ build_data = data[0]
+
+ m = Measurement()
+ m.build = build_data.build_id
+ m.build_type = build_data.type
+ m.name = build_data.name
+ m.results = {}
+
+ for i in range(1, len(data), 2):
+ result = data[i]
+ param_combination = data[i + 1]
+
+ if not str(param_combination) in m.results:
+ m.results[str(param_combination)] = [result.bandwith]
+ else:
+ m.results[str(param_combination)] += [result.bandwith]
+
+ for k in m.results.keys():
+ m.results[k] = [mean(m.results[k]), stdev(m.results[k])]
+
+ return m
diff --git a/scripts/storage/db_manage.py b/scripts/storage/db_manage.py
new file mode 100644
index 0000000..b9435d6
--- /dev/null
+++ b/scripts/storage/db_manage.py
@@ -0,0 +1,100 @@
+import argparse
+import imp
+import os.path
+import shutil
+import sqlite3
+import sys
+
+from os import remove
+from web_app.app import db
+from config import DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, basedir
+from migrate.versioning import api
+
+
+ACTIONS = {}
+
+
+def action(act):
+ def wrap(f):
+ ACTIONS[act] = f
+
+ def inner(*args, **kwargs):
+ return f(*args, **kwargs)
+ return inner
+ return wrap
+
+
+def parse_args(argv):
+ parser = argparse.ArgumentParser(
+ description="Manage DB")
+ parser.add_argument("action",
+ choices=["dropdb", "createdb", "migrate", "downgrade"])
+ return parser.parse_args(argv)
+
+
+@action("createdb")
+def createdb():
+ sqlite3.connect(os.path.join(basedir, 'app.db'))
+
+ db.create_all()
+ if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
+ api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
+ api.version_control(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ else:
+ api.version_control(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO,
+ api.version(SQLALCHEMY_MIGRATE_REPO))
+
+
+@action("dropdb")
+def dropdb():
+ db.create_all()
+ if os.path.exists(SQLALCHEMY_MIGRATE_REPO):
+ shutil.rmtree(SQLALCHEMY_MIGRATE_REPO)
+
+ db.drop_all()
+ if os.path.exists(os.path.join(basedir, 'app.db')):
+ remove(os.path.join(basedir, 'app.db'))
+
+
+@action("migrate")
+def migrate():
+ v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' %
+ (v+1))
+ tmp_module = imp.new_module('old_model')
+ old_model = api.create_model(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+
+ exec old_model in tmp_module.__dict__
+ script = api.make_update_script_for_model(DATABASE_URI,
+ SQLALCHEMY_MIGRATE_REPO,
+ tmp_module.meta, db.metadata)
+ open(migration, "wt").write(script)
+ api.upgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ print('New migration saved as ' + migration)
+ print('Current database version: ' + str(v))
+
+
+@action("upgrade")
+def upgrade():
+ api.upgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ print('Current database version: ' + str(v))
+
+
+@action("downgrade")
+def downgrade():
+ v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ api.downgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
+ v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+ print('Current database version: ' + str(v))
+
+
+def main(argv):
+ opts = parse_args(argv)
+ func = ACTIONS.get(opts.action)
+ func()
+
+
+if __name__ == '__main__':
+ exit(main(sys.argv[1:]))
diff --git a/scripts/storage/models.py b/scripts/storage/models.py
new file mode 100644
index 0000000..eb632e4
--- /dev/null
+++ b/scripts/storage/models.py
@@ -0,0 +1,72 @@
+from sqlalchemy import ForeignKey
+from web_app.app import db
+
+
+class Build(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ build_id = db.Column(db.String(64))
+ name = db.Column(db.String(64))
+ md5 = db.Column(db.String(64))
+ type = db.Column(db.Integer)
+
+ def __repr__(self):
+ return self.build_id + " " + self.name + " " + self.type
+
+
+class Param(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ name = db.Column(db.String(64))
+ type = db.Column(db.String(64))
+ descr = db.Column(db.String(4096))
+
+
+class ParamCombination(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ param_1 = db.Column(db.Text())
+ param_2 = db.Column(db.Text())
+ param_3 = db.Column(db.Text())
+ param_4 = db.Column(db.Text())
+ param_5 = db.Column(db.Text())
+ param_6 = db.Column(db.Text())
+ param_7 = db.Column(db.Text())
+ param_8 = db.Column(db.Text())
+ param_9 = db.Column(db.Text())
+ param_10 = db.Column(db.Text())
+ param_11 = db.Column(db.Text())
+ param_12 = db.Column(db.Text())
+ param_13 = db.Column(db.Text())
+ param_14 = db.Column(db.Text())
+ param_15 = db.Column(db.Text())
+ param_16 = db.Column(db.Text())
+ param_17 = db.Column(db.Text())
+ param_18 = db.Column(db.Text())
+ param_19 = db.Column(db.Text())
+ param_20 = db.Column(db.Text())
+
+ def __repr__(self):
+ return self.param_1 + " " + self.param_2 + " " + self.param_3
+
+
+class Lab(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ name = db.Column(db.String(64), unique=True)
+ url = db.Column(db.String(256))
+ type = db.Column(db.String(4096))
+ fuel_version = db.Column(db.String(64))
+ ceph_version = db.Column(db.String(64))
+ lab_general_info = db.Column(db.Text)
+ lab_meta = db.Column(db.Text)
+
+
+class Result(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ build_id = db.Column(db.Integer, ForeignKey('build.id'))
+ lab_id = db.Column(db.Integer, ForeignKey('lab.id'))
+ date = db.Column(db.DateTime)
+ param_combination_id = db.Column(db.Integer,
+ ForeignKey('param_combination.id'))
+ bandwith = db.Column(db.Float)
+ meta = db.Column(db.String(4096))
+
+ def __repr__(self):
+ return str(self.bandwith) + " " + str(self.date)
diff --git a/scripts/storage/storage_api.py b/scripts/storage/storage_api.py
new file mode 100644
index 0000000..c7b4e3e
--- /dev/null
+++ b/scripts/storage/storage_api.py
@@ -0,0 +1,454 @@
+import datetime
+from data_processing import Measurement, create_measurement, process_build_data
+from flask import json
+from meta_info import collect_lab_data, total_lab_info
+from sqlalchemy import sql
+from persistance.models import *
+
+
+def get_build_info(build_name):
+ session = db.session()
+ result = session.query(Result, Build).join(Build).\
+ filter(Build.name == build_name).first()
+ lab = session.query(Lab).filter(Lab.id == result[0].lab_id).first()
+ return eval(lab.lab_general_info)
+
+
+def get_build_detailed_info(build_name):
+ data = get_build_info(build_name)
+ return total_lab_info(data)
+
+
+def add_io_params(session):
+ """Filling Param table with initial parameters. """
+
+ param1 = Param(name="operation", type='{"write", "randwrite", '
+ '"read", "randread"}',
+ descr="type of write operation")
+ param2 = Param(name="sync", type='{"a", "s"}',
+ descr="Write mode synchronous/asynchronous")
+ param3 = Param(name="block size",
+ type='{"1k", "2k", "4k", "8k", "16k", '
+ '"32k", "64k", "128k", "256k"}')
+
+ session.add(param1)
+ session.add(param2)
+ session.add(param3)
+
+ session.commit()
+
+
+def add_build(session, build_id, build_name, build_type, md5):
+ """Function which adds particular build to database."""
+
+ build = Build(type=build_type, build_id=build_id,
+ name=build_name, md5=md5)
+ session.add(build)
+ session.commit()
+
+ return build.id
+
+
+def insert_results(session, build_id, lab_id, params_combination_id,
+ time=None, bandwith=0.0, meta=""):
+ """Function insert particular result. """
+
+ result = Result(build_id=build_id, lab_id=lab_id,
+ params_combination_id=params_combination_id, time=time,
+ bandwith=bandwith, meta=meta)
+ session.add(result)
+ session.commit()
+
+
+def add_param_comb(session, *params):
+ """function responsible for adding particular params
+ combination to database"""
+
+ params_names = sorted([s for s in dir(ParamCombination)
+ if s.startswith('param_')])
+ d = zip(params_names, params)
+ where = ""
+
+ for item in d:
+ where = sql.and_(where, getattr(ParamCombination, item[0]) == item[1])
+
+ query = session.query(ParamCombination).filter(where)
+ rs = session.execute(query).fetchall()
+
+ if len(rs) == 0:
+ param_comb = ParamCombination()
+
+ for p in params_names:
+ i = int(p.split('_')[1])
+
+ if i - 1 < len(params):
+ param_comb.__setattr__('param_' + str(i), params[i - 1])
+ param = session.query(Param).filter(Param.id == i).one()
+ values = eval(param.type)
+
+ if params[i - 1] not in values:
+ values.add(params[i - 1])
+ param.type = str(values)
+
+ session.add(param_comb)
+ session.commit()
+ return param_comb.id
+ else:
+ return rs[0][0]
+
+
+def add_lab(session, lab_url, lab_name, ceph_version,
+ fuel_version, data, info):
+ """ Function add data about particular lab"""
+ result = session.query(Lab).filter(Lab.name == lab_name).all()
+
+ if len(result) != 0:
+ return result[0].id
+ else:
+ lab = Lab(name=lab_name, url=lab_url, ceph_version=ceph_version,
+ fuel_version=fuel_version, lab_general_info=str(data),
+ lab_meta=str(info))
+ session.add(lab)
+ session.commit()
+ return lab.id
+
+
+def add_data(data):
+ """Function store list of builds in database"""
+
+ data = json.loads(data)
+ session = db.session()
+ add_io_params(session)
+
+ for build_data in data:
+ build_id = add_build(session,
+ build_data.pop("build_id"),
+ build_data.pop("name"),
+ build_data.pop("type"),
+ build_data.pop("iso_md5"),
+ )
+
+ creds = {"username": build_data.pop("username"),
+ "password": build_data.pop("password"),
+ "tenant_name": build_data.pop("tenant_name")}
+
+ lab_url = build_data.pop("lab_url")
+ lab_name = build_data.pop("lab_name")
+ ceph_version = build_data.pop("ceph_version")
+ data = collect_lab_data(lab_url, creds)
+ data['name'] = lab_name
+ info = total_lab_info(data)
+ lab_id = add_lab(session, lab_url=lab_name, lab_name=lab_url,
+ ceph_version=ceph_version,
+ fuel_version=data['fuel_version'],
+ data=data, info=info)
+
+ date = build_data.pop("date")
+ date = datetime.datetime.strptime(date, "%a %b %d %H:%M:%S %Y")
+
+ for params, [bw, dev] in build_data.items():
+ param_comb_id = add_param_comb(session, *params.split(" "))
+ result = Result(param_combination_id=param_comb_id,
+ build_id=build_id, bandwith=bw,
+ date=date, lab_id=lab_id)
+ session.add(result)
+ session.commit()
+
+
+def load_data(lab_id=None, build_id=None, *params):
+ """ Function loads data by parameters described in *params tuple."""
+
+ session = db.session()
+ params_names = sorted([s for s in dir(ParamCombination)
+ if s.startswith('param_')])
+ d = zip(params_names, params)
+ where = ""
+
+ for item in d:
+ where = sql.and_(where, getattr(ParamCombination, item[0]) == item[1])
+
+ query = session.query(ParamCombination).filter(where)
+ rs = session.execute(query).fetchall()
+
+ ids = [r[0] for r in rs]
+
+ rs = session.query(Result).\
+ filter(Result.param_combination_id.in_(ids)).all()
+
+ if lab_id is not None:
+ rs = [r for r in rs if r.lab_id == lab_id]
+
+ if build_id is not None:
+ rs = [r for r in rs if r.build_id == build_id]
+
+ return rs
+
+
+def load_all():
+ """Load all builds from database"""
+
+ session = db.session()
+ results = session.query(Result, Build, ParamCombination).\
+ join(Build).join(ParamCombination).all()
+
+ return results
+
+
+def collect_builds_from_db(*names):
+ """ Function collecting all builds from database and filter it by names"""
+
+ results = load_all()
+ d = {}
+
+ for item in results:
+ result_data = item[0]
+ build_data = item[1]
+ param_combination_data = item[2]
+
+ if build_data.name not in d:
+ d[build_data.name] = \
+ [build_data, result_data, param_combination_data]
+ else:
+ d[build_data.name].append(result_data)
+ d[build_data.name].append(param_combination_data)
+
+ if len(names) == 0:
+ return {k: v for k, v in d.items()}
+
+ return {k: v for k, v in d.items() if k in names}
+
+
+def prepare_build_data(build_name):
+ """
+ #function preparing data for display plots.
+ #Format {build_name : Measurement}
+ """
+
+ session = db.session()
+ build = session.query(Build).filter(Build.name == build_name).first()
+ names = []
+
+ if build.type == 'GA':
+ names = [build_name]
+ else:
+ res = session.query(Build).\
+ filter(Build.type.in_(['GA', 'master', build.type])).all()
+ for r in res:
+ names.append(r.name)
+
+ d = collect_builds_from_db()
+ d = {k: v for k, v in d.items() if k in names}
+ results = {}
+
+ for data in d.keys():
+ m = create_measurement(d[data])
+ results[m.build_type] = m
+
+ return results
+
+
+def builds_list():
+ """
+ Function getting list of all builds available to index page
+ returns list of dicts which contains data to display on index page.
+ """
+
+ res = []
+ builds = set()
+ data = load_all()
+
+ for item in data:
+ build = item[1]
+ result = item[0]
+
+ if not build.name in builds:
+ builds.add(build.name)
+ d = {}
+ d["type"] = build.type
+ d["url"] = build.name
+ d["date"] = result.date
+ d["name"] = build.name
+ res.append(d)
+
+ return res
+
+
+def get_builds_data(names=None):
+ """
+ Processing data from database.
+ List of dicts, where each dict contains build meta
+ info and kev-value measurements.
+ key - param combination.
+ value - [mean, deviation]
+ """
+
+ d = collect_builds_from_db()
+
+ if not names is None:
+ d = {k: v for k, v in d.items() if k in names}
+ else:
+ d = {k: v for k, v in d.items()}
+ output = []
+
+ for key, value in d.items():
+ result = {}
+ build = value[0]
+ result["build_id"] = build.build_id
+ result["iso_md5"] = build.md5
+ result["type"] = build.type
+ result["date"] = "Date must be here"
+
+ for i in range(1, len(value), 2):
+ r = value[i]
+ param_combination = value[i + 1]
+
+ if not str(param_combination) in result:
+ result[str(param_combination)] = [r.bandwith]
+ else:
+ result[str(param_combination)].append(r.bandwith)
+
+ output.append(result)
+
+ for build in output:
+ process_build_data(build)
+
+ return output
+
+
+def get_data_for_table(build_name=""):
+ """ Function for getting result to display table """
+
+ session = db.session()
+ build = session.query(Build).filter(Build.name == build_name).one()
+ names = []
+
+ # Get names of build that we need.
+ if build.type == 'GA':
+ names = [build_name]
+ else:
+ res = session.query(Build).filter(
+ Build.type.in_(['GA', 'master', build.type])).all()
+ for r in res:
+ names.append(r.name)
+ # get data for particular builds.
+ return get_builds_data(names)
+
+
+if __name__ == '__main__':
+ # add_build("Some build", "GA", "bla bla")
+ cred = {"username": "admin", "password": "admin", "tenant_name": "admin"}
+ json_data = '[{\
+ "username": "admin",\
+ "password": "admin", \
+ "tenant_name": "admin",\
+ "lab_url": "http://172.16.52.112:8000",\
+ "lab_name": "Perf-1-Env",\
+ "ceph_version": "v0.80 Firefly",\
+ "randwrite a 256k": [16885, 1869],\
+ "randwrite s 4k": [79, 2],\
+ "read a 64k": [74398, 11618],\
+ "write s 1024k": [7490, 193],\
+ "randwrite a 64k": [14167, 4665],\
+ "build_id": "1",\
+ "randread a 1024k": [68683, 8604],\
+ "randwrite s 256k": [3277, 146],\
+ "write a 1024k": [24069, 660],\
+ "type": "GA",\
+ "write a 64k": [24555, 1006],\
+ "write s 64k": [1285, 57],\
+ "write a 256k": [24928, 503],\
+ "write s 256k": [4029, 192],\
+ "randwrite a 1024k": [23980, 1897],\
+ "randread a 64k": [27257, 17268],\
+ "randwrite s 1024k": [8504, 238],\
+ "randread a 256k": [60868, 2637],\
+ "randread a 4k": [3612, 1355],\
+ "read a 1024k": [71122, 9217],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [87, 3],\
+ "read a 4k": [88367, 6471],\
+ "read a 256k": [80904, 8930],\
+ "name": "GA - 6.0 GA",\
+ "randwrite s 1k": [20, 0],\
+ "randwrite s 64k": [1029, 34],\
+ "write s 1k": [21, 0],\
+ "iso_md5": "bla bla"\
+ },\
+ {\
+ "username": "admin",\
+ "password": "admin", \
+ "tenant_name": "admin",\
+ "lab_url": "http://172.16.52.112:8000",\
+ "ceph_version": "v0.80 Firefly",\
+ "lab_name": "Perf-1-Env",\
+ "randwrite a 256k": [20212, 5690],\
+ "randwrite s 4k": [83, 6],\
+ "read a 64k": [89394, 3912],\
+ "write s 1024k": [8054, 280],\
+ "randwrite a 64k": [14595, 3245],\
+ "build_id": "2",\
+ "randread a 1024k": [83277, 9310],\
+ "randwrite s 256k": [3628, 433],\
+ "write a 1024k": [29226, 8624],\
+ "type": "master",\
+ "write a 64k": [25089, 790],\
+ "write s 64k": [1236, 30],\
+ "write a 256k": [30327, 9799],\
+ "write s 256k": [4049, 172],\
+ "randwrite a 1024k": [29000, 9302],\
+ "randread a 64k": [26775, 16319],\
+ "randwrite s 1024k": [8665, 1457],\
+ "randread a 256k": [63608, 16126],\
+ "randread a 4k": [3212, 1620],\
+ "read a 1024k": [89676, 4401],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [88, 3],\
+ "read a 4k": [92263, 5186],\
+ "read a 256k": [94505, 6868],\
+ "name": "6.1 Dev",\
+ "randwrite s 1k": [22, 3],\
+ "randwrite s 64k": [1105, 46],\
+ "write s 1k": [22, 0],\
+ "iso_md5": "bla bla"\
+ },\
+ {\
+ "username": "admin",\
+ "password": "admin", \
+ "tenant_name": "admin",\
+ "lab_url": "http://172.16.52.112:8000",\
+ "ceph_version": "v0.80 Firefly",\
+ "lab_name": "Perf-1-Env",\
+ "randwrite a 256k": [16885, 1869],\
+ "randwrite s 4k": [79, 2],\
+ "read a 64k": [74398, 11618],\
+ "write s 1024k": [7490, 193],\
+ "randwrite a 64k": [14167, 4665],\
+ "build_id": "1",\
+ "randread a 1024k": [68683, 8604],\
+ "randwrite s 256k": [3277, 146],\
+ "write a 1024k": [24069, 660],\
+ "type": "sometype",\
+ "write a 64k": [24555, 1006],\
+ "write s 64k": [1285, 57],\
+ "write a 256k": [24928, 503],\
+ "write s 256k": [4029, 192],\
+ "randwrite a 1024k": [23980, 1897],\
+ "randread a 64k": [27257, 17268],\
+ "randwrite s 1024k": [8504, 238],\
+ "randread a 256k": [60868, 2637],\
+ "randread a 4k": [3612, 1355],\
+ "read a 1024k": [71122, 9217],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [87, 3],\
+ "read a 4k": [88367, 6471],\
+ "read a 256k": [80904, 8930],\
+ "name": "somedev",\
+ "randwrite s 1k": [20, 0],\
+ "randwrite s 64k": [1029, 34],\
+ "write s 1k": [21, 0],\
+ "iso_md5": "bla bla"\
+ }]'
+
+ # json_to_db(json_data)
+ print load_data(1, 2)
+ # add_data(json_data)