MOL 186, migration from file storage to SQL Databse
diff --git a/config.py b/config.py
index b92385e..b877bb3 100644
--- a/config.py
+++ b/config.py
@@ -1,5 +1,6 @@
import os
+basedir = os.environ.get('DATABASE_FOLDER', os.path.abspath(os.path.dirname(__file__)))
DOCUMENT_ID = "1Xvd0aHA7mr-_b5C3b-sQ66BQsJiOGIT2UesP7kG26oU"
SHEET_NAME = "aaa"
WORK_SHEET = "Worksheet"
@@ -9,3 +10,10 @@
OUTPUT_FILE = "output.json"
TEST_PATH = os.environ.get("TEST_PATH", os.path.dirname(__file__) + "/test_results")
CHARTS_IMG_PATH = "static/images"
+SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
+
+if os.environ.get('DATABASE_URL') is None:
+ DATABASE_URI = ('sqlite:///' + os.path.join(basedir, 'app.db') +
+ '?check_same_thread=False')
+else:
+ DATABASE_URI = os.environ['DATABASE_URL']
\ No newline at end of file
diff --git a/persistance/__init__.py b/persistance/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/persistance/__init__.py
diff --git a/persistance/db_create.py b/persistance/db_create.py
new file mode 100644
index 0000000..174dee3
--- /dev/null
+++ b/persistance/db_create.py
@@ -0,0 +1,18 @@
+import sqlite3
+from migrate.versioning import api
+from config import DATABASE_URI, basedir
+from config import SQLALCHEMY_MIGRATE_REPO
+from web_app.app import db
+
+import os.path
+
+
+sqlite3.connect(os.path.join(basedir, 'app.db'))
+
+db.create_all()
+if not os.path.exists(SQLALCHEMY_MIGRATE_REPO):
+ api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository')
+ api.version_control(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+else:
+ api.version_control(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO,
+ api.version(SQLALCHEMY_MIGRATE_REPO))
\ No newline at end of file
diff --git a/persistance/db_downgrade.py b/persistance/db_downgrade.py
new file mode 100644
index 0000000..d5614ad
--- /dev/null
+++ b/persistance/db_downgrade.py
@@ -0,0 +1,7 @@
+from migrate.versioning import api
+from config import DATABASE_URI
+from config import SQLALCHEMY_MIGRATE_REPO
+v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+api.downgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
+v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+print('Current database version: ' + str(v))
\ No newline at end of file
diff --git a/persistance/db_drop_database.py b/persistance/db_drop_database.py
new file mode 100644
index 0000000..393495c
--- /dev/null
+++ b/persistance/db_drop_database.py
@@ -0,0 +1,14 @@
+from config import SQLALCHEMY_MIGRATE_REPO, basedir
+from web_app.app import db
+import shutil
+
+import os.path
+from os import remove
+
+db.create_all()
+if os.path.exists(SQLALCHEMY_MIGRATE_REPO):
+ shutil.rmtree(SQLALCHEMY_MIGRATE_REPO)
+
+db.drop_all()
+if os.path.exists(os.path.join(basedir, 'app.db')):
+ remove(os.path.join(basedir, 'app.db'))
\ No newline at end of file
diff --git a/persistance/db_migrate.py b/persistance/db_migrate.py
new file mode 100644
index 0000000..7f2b130
--- /dev/null
+++ b/persistance/db_migrate.py
@@ -0,0 +1,20 @@
+import imp
+from migrate.versioning import api
+from config import SQLALCHEMY_MIGRATE_REPO, DATABASE_URI
+from web_app.app import db
+
+
+v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v+1))
+tmp_module = imp.new_module('old_model')
+old_model = api.create_model(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+
+exec old_model in tmp_module.__dict__
+script = api.make_update_script_for_model(DATABASE_URI,
+ SQLALCHEMY_MIGRATE_REPO,
+ tmp_module.meta, db.metadata)
+open(migration, "wt").write(script)
+api.upgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+print('New migration saved as ' + migration)
+print('Current database version: ' + str(v))
\ No newline at end of file
diff --git a/persistance/db_upgrade.py b/persistance/db_upgrade.py
new file mode 100644
index 0000000..4e31532
--- /dev/null
+++ b/persistance/db_upgrade.py
@@ -0,0 +1,6 @@
+from migrate.versioning import api
+from config import DATABASE_URI
+from config import SQLALCHEMY_MIGRATE_REPO
+api.upgrade(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+v = api.db_version(DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
+print('Current database version: ' + str(v))
\ No newline at end of file
diff --git a/persistance/models.py b/persistance/models.py
new file mode 100644
index 0000000..d09ed95
--- /dev/null
+++ b/persistance/models.py
@@ -0,0 +1,49 @@
+from sqlalchemy import ForeignKey
+from web_app.app import db
+
+
+class Build(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ build_id = db.Column(db.String(64))
+ name = db.Column(db.String(64))
+ md5 = db.Column(db.String(64))
+ type = db.Column(db.Integer)
+
+ def __repr__(self):
+ return self.build_id + " " + self.name + " " + self.type
+
+
+class Param(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ name = db.Column(db.String(64))
+ type = db.Column(db.String(64))
+ descr = db.Column(db.String(4096))
+
+
+class ParamCombination(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ param_1 = db.Column(db.Text())
+ param_2 = db.Column(db.Text())
+ param_3 = db.Column(db.Text())
+
+ def __repr__(self):
+ return self.param_1 + " " + self.param_2 + " " + self.param_3
+
+
+class Lab(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ name = db.Column(db.String(64))
+ type = db.Column(db.String(4096))
+
+
+class Result(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ build_id = db.Column(db.Integer, ForeignKey('build.id'))
+ lab_id = db.Column(db.Integer)
+ date = db.Column(db.DateTime)
+ param_combination_id = db.Column(db.Integer, ForeignKey('param_combination.id'))
+ bandwith = db.Column(db.Float)
+ meta = db.Column(db.String(4096))
+
+ def __repr__(self):
+ return str(self.bandwith) + " " + str(self.date)
diff --git a/persistance/storage_api.py b/persistance/storage_api.py
new file mode 100644
index 0000000..f899084
--- /dev/null
+++ b/persistance/storage_api.py
@@ -0,0 +1,421 @@
+import datetime
+import math
+
+from flask import json
+from sqlalchemy import sql
+from persistance.models import *
+
+
+#class displays measurement. Moved from storage_api_v_1 to avoid circular imports.
+class Measurement(object):
+ def __init__(self):
+ self.build = ""
+ self.build_type = 0 # GA/Master/Other
+ self.md5 = ""
+ self.name = ""
+ self.date = None
+ self.results = {
+ "": (float, float)
+ }
+
+ def __str__(self):
+ return self.build + " " + self.build_type + " " + \
+ self.md5 + " " + str(self.results)
+
+
+def mean(l):
+ n = len(l)
+
+ return sum(l) / n
+
+
+def stdev(l):
+ m = mean(l)
+ return math.sqrt(sum(map(lambda x: (x - m) ** 2, l)))
+
+
+def process_build_data(build):
+ for item in build.items():
+ if type(item[1]) is list:
+ m = mean(item[1])
+ s = stdev(item[1])
+ build[item[0]] = [m, s]
+
+
+#filling Param table with initial parameters.
+def add_io_params(session):
+ param1 = Param(name="operation", type='{"write", "randwrite", "read", "randread"}', descr="type of write operation")
+ param2 = Param(name="sync", type='{"a", "s"}', descr="Write mode synchronous/asynchronous")
+ param3 = Param(name="block size", type='{"1k", "2k", "4k", "8k", "16k", "32k", "64k", "128k", "256k"}')
+
+ session.add(param1)
+ session.add(param2)
+ session.add(param3)
+
+ session.commit()
+
+
+#function which adds particular build to database.
+def add_build(session, build_id, build_name, build_type, md5):
+ build = Build(type=build_type, build_id=build_id, name=build_name, md5=md5)
+ session.add(build)
+ session.commit()
+
+ return build.id
+
+
+#function insert particular result.
+def insert_results(session, build_id, lab_id, params_combination_id,
+ time=None, bandwith=0.0, meta=""):
+ result = Result(build_id=build_id, lab_id=lab_id, params_combination_id=params_combination_id, time=time,
+ bandwith=bandwith, meta=meta)
+ session.add(result)
+ session.commit()
+
+
+#function responsible for adding particular params combination to database
+def add_param_comb(session, *params):
+ params_names = sorted([s for s in dir(ParamCombination) if s.startswith('param_')])
+ d = zip(params_names, params)
+ where = ""
+
+ for item in d:
+ where = sql.and_(where, getattr(ParamCombination, item[0]) == item[1])
+
+ query = session.query(ParamCombination).filter(where)
+ rs = session.execute(query).fetchall()
+
+
+ if len(rs) == 0:
+ param_comb = ParamCombination()
+
+ for p in params_names:
+ i = int(p.split('_')[1])
+ param_comb.__setattr__('param_' + str(i), params[i - 1])
+
+ param = session.query(Param).filter(Param.id == i).one()
+ values = eval(param.type)
+
+ if params[i - 1] not in values:
+ values.add(params[i - 1])
+ param.type = str(values)
+
+ session.add(param_comb)
+ session.commit()
+ return param_comb.id
+ else:
+ return rs[0][0]
+
+
+def add_lab(lab_name):
+ pass
+
+
+#function store list of builds in database
+def add_data(data):
+ data = json.loads(data)
+ session = db.session()
+ add_io_params(session)
+
+ for build_data in data:
+ build_id = add_build(session,
+ build_data.pop("build_id"),
+ build_data.pop("name"),
+ build_data.pop("type"),
+ build_data.pop("iso_md5"),
+ )
+ date = build_data.pop("date")
+ date = datetime.datetime.strptime(date, "%a %b %d %H:%M:%S %Y")
+
+ for params, [bw, dev] in build_data.items():
+ param_comb_id = add_param_comb(session, *params.split(" "))
+ result = Result(param_combination_id=param_comb_id, build_id=build_id, bandwith=bw, date=date)
+ session.add(result)
+ session.commit()
+
+
+#function loads data by parametres described in *params tuple.
+def load_data(*params):
+ session = db.session()
+ params_names = sorted([s for s in dir(ParamCombination) if s.startswith('param_')])
+ d = zip(params_names, params)
+ where = ""
+
+ for item in d:
+ where = sql.and_(where, getattr(ParamCombination, item[0]) == item[1])
+
+ query = session.query(ParamCombination).filter(where)
+ rs = session.execute(query).fetchall()
+
+ ids = [r[0] for r in rs]
+
+ results = session.query(Result).filter(Result.param_combination_id.in_(ids))
+ rs = session.execute(results).fetchall()
+
+ return [r[5] for r in rs]
+
+
+#load all builds from database
+def load_all():
+ session = db.session()
+ r = session.query(Param).filter(Param.id == 1).all()
+ results = session.query(Result, Build, ParamCombination).join(Build).join(ParamCombination).all()
+
+ return results
+
+
+#function collecting all builds from database and filter it by names
+def collect_builds_from_db(*names):
+ results = load_all()
+ d = {}
+
+ for item in results:
+ result_data = item[0]
+ build_data = item[1]
+ param_combination_data = item[2]
+
+ if build_data.name not in d:
+ d[build_data.name] = [build_data, result_data, param_combination_data]
+ else:
+ d[build_data.name].append(result_data)
+ d[build_data.name].append(param_combination_data)
+
+ if len(names) == 0:
+ return {k: v for k, v in d.items()}
+
+ return {k: v for k, v in d.items() if k in names}
+
+
+#function creates measurement from data was extracted from database.
+def create_measurement(data):
+ build_data = data[0]
+
+ m = Measurement()
+ m.build = build_data.build_id
+ m.build_type = build_data.type
+ m.name = build_data.name
+ m.results = {}
+
+ for i in range(1, len(data), 2):
+ result = data[i]
+ param_combination = data[i + 1]
+
+ if not str(param_combination) in m.results:
+ m.results[str(param_combination)] = [result.bandwith]
+ else:
+ m.results[str(param_combination)] += [result.bandwith]
+
+ for k in m.results.keys():
+ m.results[k] = [mean(m.results[k]), stdev(m.results[k])]
+
+ return m
+
+
+#function preparing data for display plots.
+#Format {build_name : Measurement}
+def prepare_build_data(build_name):
+ session = db.session()
+ build = session.query(Build).filter(Build.name == build_name).first()
+ names = []
+
+ if build.type == 'GA':
+ names = [build_name]
+ else:
+ res = session.query(Build).filter(Build.type.in_(['GA', 'master', build.type])).all()
+ for r in res:
+ names.append(r.name)
+
+
+ d = collect_builds_from_db()
+ d = {k: v for k, v in d.items() if k in names}
+ results = {}
+
+ for data in d.keys():
+ m = create_measurement(d[data])
+ results[m.build_type] = m
+
+ return results
+
+
+#function getting list of all builds available to index page
+#returns list of dicts which contains data to display on index page.
+def builds_list():
+ res = []
+ builds = set()
+ data = load_all()
+
+ for item in data:
+ build = item[1]
+ result = item[0]
+
+ if not build.name in builds:
+ builds.add(build.name)
+ d = {}
+ d["type"] = build.type
+ d["url"] = build.name
+ d["date"] = result.date
+ d["name"] = build.name
+ res.append(d)
+
+ return res
+
+
+#Processing data from database.
+#List of dicts, where each dict contains build meta info and kev-value measurements.
+#key - param combination.
+#value - [mean, deviation]
+def get_builds_data(names=None):
+ d = collect_builds_from_db()
+
+ if not names is None:
+ d = {k: v for k, v in d.items() if k in names}
+ else:
+ d = {k: v for k, v in d.items()}
+ output = []
+
+ for key, value in d.items():
+ result = {}
+ build = value[0]
+ result["build_id"] = build.build_id
+ result["iso_md5"] = build.md5
+ result["type"] = build.type
+ result["date"] = "Date must be here"
+
+ for i in range(1, len(value), 2):
+ r = value[i]
+ param_combination = value[i + 1]
+
+ if not str(param_combination) in result:
+ result[str(param_combination)] = [r.bandwith]
+ else:
+ result[str(param_combination)].append(r.bandwith)
+
+ output.append(result)
+
+ for build in output:
+ process_build_data(build)
+
+ return output
+
+
+#Function for getting result to display table
+def get_data_for_table(build_name=""):
+ session = db.session()
+ build = session.query(Build).filter(Build.name == build_name).one()
+ names = []
+
+ #Get names of build that we need.
+ if build.type == 'GA':
+ names = [build_name]
+ else:
+ res = session.query(Build).filter(Build.type.in_(['GA', 'master', build.type])).all()
+ for r in res:
+ names.append(r.name)
+ #get data for particular builds.
+ return get_builds_data(names)
+
+
+if __name__ == '__main__':
+ # add_build("Some build", "GA", "bla bla")
+ json_data = '[{\
+ "randwrite a 256k": [16885, 1869],\
+ "randwrite s 4k": [79, 2],\
+ "read a 64k": [74398, 11618],\
+ "write s 1024k": [7490, 193],\
+ "randwrite a 64k": [14167, 4665],\
+ "build_id": "1",\
+ "randread a 1024k": [68683, 8604],\
+ "randwrite s 256k": [3277, 146],\
+ "write a 1024k": [24069, 660],\
+ "type": "GA",\
+ "write a 64k": [24555, 1006],\
+ "write s 64k": [1285, 57],\
+ "write a 256k": [24928, 503],\
+ "write s 256k": [4029, 192],\
+ "randwrite a 1024k": [23980, 1897],\
+ "randread a 64k": [27257, 17268],\
+ "randwrite s 1024k": [8504, 238],\
+ "randread a 256k": [60868, 2637],\
+ "randread a 4k": [3612, 1355],\
+ "read a 1024k": [71122, 9217],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [87, 3],\
+ "read a 4k": [88367, 6471],\
+ "read a 256k": [80904, 8930],\
+ "name": "GA - 6.0 GA",\
+ "randwrite s 1k": [20, 0],\
+ "randwrite s 64k": [1029, 34],\
+ "write s 1k": [21, 0],\
+ "iso_md5": "bla bla"\
+ },\
+ {\
+ "randwrite a 256k": [20212, 5690],\
+ "randwrite s 4k": [83, 6],\
+ "read a 64k": [89394, 3912],\
+ "write s 1024k": [8054, 280],\
+ "randwrite a 64k": [14595, 3245],\
+ "build_id": "2",\
+ "randread a 1024k": [83277, 9310],\
+ "randwrite s 256k": [3628, 433],\
+ "write a 1024k": [29226, 8624],\
+ "type": "master",\
+ "write a 64k": [25089, 790],\
+ "write s 64k": [1236, 30],\
+ "write a 256k": [30327, 9799],\
+ "write s 256k": [4049, 172],\
+ "randwrite a 1024k": [29000, 9302],\
+ "randread a 64k": [26775, 16319],\
+ "randwrite s 1024k": [8665, 1457],\
+ "randread a 256k": [63608, 16126],\
+ "randread a 4k": [3212, 1620],\
+ "read a 1024k": [89676, 4401],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [88, 3],\
+ "read a 4k": [92263, 5186],\
+ "read a 256k": [94505, 6868],\
+ "name": "6.1 Dev",\
+ "randwrite s 1k": [22, 3],\
+ "randwrite s 64k": [1105, 46],\
+ "write s 1k": [22, 0],\
+ "iso_md5": "bla bla"\
+ },\
+ {\
+ "randwrite a 256k": [16885, 1869],\
+ "randwrite s 4k": [79, 2],\
+ "read a 64k": [74398, 11618],\
+ "write s 1024k": [7490, 193],\
+ "randwrite a 64k": [14167, 4665],\
+ "build_id": "1",\
+ "randread a 1024k": [68683, 8604],\
+ "randwrite s 256k": [3277, 146],\
+ "write a 1024k": [24069, 660],\
+ "type": "sometype",\
+ "write a 64k": [24555, 1006],\
+ "write s 64k": [1285, 57],\
+ "write a 256k": [24928, 503],\
+ "write s 256k": [4029, 192],\
+ "randwrite a 1024k": [23980, 1897],\
+ "randread a 64k": [27257, 17268],\
+ "randwrite s 1024k": [8504, 238],\
+ "randread a 256k": [60868, 2637],\
+ "randread a 4k": [3612, 1355],\
+ "read a 1024k": [71122, 9217],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [87, 3],\
+ "read a 4k": [88367, 6471],\
+ "read a 256k": [80904, 8930],\
+ "name": "somedev",\
+ "randwrite s 1k": [20, 0],\
+ "randwrite s 64k": [1029, 34],\
+ "write s 1k": [21, 0],\
+ "iso_md5": "bla bla"\
+ }]'
+
+ # json_to_db(json_data)
+ # print load_data()
+ add_data(json_data)
+
+ print collect_builds_from_db()
+ print prepare_build_data('6.1 Dev')
+ print builds_list()
+ print get_data_for_table('somedev')
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 5877500..6a56193 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,17 +1,37 @@
+Babel==1.3
Flask==0.10.1
Flask-Bootstrap==3.3.0.1
+Flask-SQLAlchemy==2.0
+GChartWrapper==0.8
Jinja2==2.7.3
MarkupSafe==0.23
+SQLAlchemy==0.9.8
+Tempita==0.5.2
Werkzeug==0.10.1
argparse==1.2.1
+decorator==3.4.0
+futures==2.2.0
+iso8601==0.1.10
itsdangerous==0.24
+netaddr==0.7.13
+oslo.config==1.6.1
+oslo.i18n==1.4.0
+oslo.serialization==1.3.0
+oslo.utils==1.3.0
+paramiko==1.15.2
+pbr==0.10.7
petname==1.7
-wsgiref==0.1.2
-requests==2.2.1
+prettytable==0.7.2
+pyOpenSSL==0.14
+python-cinderclient==1.1.1
+python-glanceclient==0.15.0
python-keystoneclient==1.1.0
-python-novaclient
-python-glanceclient
-python-cinderclient
-paramiko
-futures
-
+python-novaclient==2.20.0
+requests==2.2.1
+simplejson==3.6.5
+six==1.9.0
+sqlalchemy-migrate==0.9.4
+sqlparse==0.1.14
+stevedore==1.2.0
+warlock==1.1.0
+wsgiref==0.1.2
diff --git a/run_tests.py b/run_tests.py
index e69de29..7ac225c 100644
--- a/run_tests.py
+++ b/run_tests.py
@@ -0,0 +1,29 @@
+from flask import Flask, render_template, request, url_for, request, redirect
+from flask.ext.sqlalchemy import SQLAlchemy
+import sqlite3
+import os
+
+app = Flask(__name__)
+sqlite3.connect(os.path.abspath("test.db"))
+app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///test.db"
+db = SQLAlchemy(app)
+
+
+class User(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ username = db.Column(db.String(80), unique=True)
+ email = db.Column(db.String(120), unique=True)
+
+ def __init__(self, username, email):
+ self.username = username
+ self.email = email
+ def __repr__(self):
+ return "<User %r>" % self.username
+
+
+db.create_all()
+x = User("tes2t", "test2@gmail.com")
+db.session.add(x)
+db.session.commit()
+
+
diff --git a/scripts/data_extractor.py b/scripts/data_extractor.py
index 77b6478..6615bf7 100644
--- a/scripts/data_extractor.py
+++ b/scripts/data_extractor.py
@@ -2,7 +2,8 @@
import json
import sqlite3
import contextlib
-from utils import ssize_to_kb
+import os
+# from utils import ssize_to_kb
def connect(url):
@@ -128,8 +129,39 @@
def to_db():
- conn = sqlite3.connect(sys.argv[1])
- json_data = open(sys.argv[2]).read()
+ conn = sqlite3.connect(os.path.abspath("test.db"))
+ # json_data = open(sys.argv[2]).read()
+ json_data = '[{\
+ "randwrite a 256k": [16885, 1869],\
+ "randwrite s 4k": [79, 2],\
+ "read a 64k": [74398, 11618],\
+ "write s 1024k": [7490, 193],\
+ "randwrite a 64k": [14167, 4665],\
+ "build_id": "1",\
+ "randread a 1024k": [68683, 8604],\
+ "randwrite s 256k": [3277, 146],\
+ "write a 1024k": [24069, 660],\
+ "type": "GA",\
+ "write a 64k": [24555, 1006],\
+ "write s 64k": [1285, 57],\
+ "write a 256k": [24928, 503],\
+ "write s 256k": [4029, 192],\
+ "randwrite a 1024k": [23980, 1897],\
+ "randread a 64k": [27257, 17268],\
+ "randwrite s 1024k": [8504, 238],\
+ "randread a 256k": [60868, 2637],\
+ "randread a 4k": [3612, 1355],\
+ "read a 1024k": [71122, 9217],\
+ "date": "Thu Feb 12 19:11:56 2015",\
+ "write s 4k": [87, 3],\
+ "read a 4k": [88367, 6471],\
+ "read a 256k": [80904, 8930],\
+ "name": "GA - 6.0 GA",\
+ "randwrite s 1k": [20, 0],\
+ "randwrite s 64k": [1029, 34],\
+ "write s 1k": [21, 0],\
+ "iso_md5": "bla bla"\
+ }]'
if len(get_all_tables(conn)) == 0:
init_database(conn)
@@ -155,7 +187,7 @@
def from_db():
- conn = sqlite3.connect(sys.argv[1])
+ conn = sqlite3.connect(os.path.abspath("test.db"))
# sql = sys.argv[2]
cursor = conn.cursor()
@@ -181,6 +213,6 @@
plt.xlabel('block size')
plt.xticks(xvals, [dt[1] for dt in data])
plt.show()
-
-
-from_db()
+#
+init_database()
+# from_db()
diff --git a/scripts/test.db b/scripts/test.db
new file mode 100644
index 0000000..7a08f97
--- /dev/null
+++ b/scripts/test.db
Binary files differ
diff --git a/storage_api.py b/storage_api.py
index c772e28..28ec060 100644
--- a/storage_api.py
+++ b/storage_api.py
@@ -6,7 +6,6 @@
from flask import url_for
import os
-
class Measurement(object):
def __init__(self):
self.build = ""
@@ -112,3 +111,7 @@
m.results = {k: v for k, v in build.items()}
return m
+
+
+
+collect_builds()
\ No newline at end of file
diff --git a/web_app/__init__.py b/web_app/__init__.py
index eecb577..446dcc8 100644
--- a/web_app/__init__.py
+++ b/web_app/__init__.py
@@ -1,27 +1,15 @@
# <koder>: order imports in usual way
from urlparse import urlparse
-from flask import Flask, render_template, url_for, request, g, make_response
-from flask_bootstrap import Bootstrap
-from config import TEST_PATH
+from flask import render_template, url_for, make_response, request
from report import build_vertical_bar, build_lines_chart
-from storage_api import builds_list, collect_builds, create_measurement
from logging import getLogger, INFO
-
-import json
-import os.path
-import math
+from web_app import app
from web_app.keystone import KeystoneAuth
-
-app = Flask(__name__)
-Bootstrap(app)
-
-
-def get_resource_as_string(name, charset='utf-8'):
- with app.open_resource(name) as f:
- return f.read().decode(charset)
-
-app.jinja_env.globals['get_resource_as_string'] = get_resource_as_string
+from persistance.storage_api import builds_list, prepare_build_data, get_data_for_table, add_data, get_builds_data
+from web_app.app import app
+import os.path
+from werkzeug.routing import Rule
def total_lab_info(data):
@@ -92,18 +80,31 @@
for pair in b2.items():
if pair[0] in b1 and type(pair[1]) is list:
- b1[pair[0]].extend(pair[1])
+ b1[pair[0]].extend(pair[1])
else:
b1[pair[0]] = pair[1]
-@app.route("/", methods=['GET', 'POST'])
+app.url_map.add(Rule('/', endpoint='index'))
+app.url_map.add(Rule('/images/<image_name>', endpoint='get_image'))
+app.url_map.add(Rule('/tests/<test_name>', endpoint='render_test'))
+app.url_map.add(Rule('/tests/table/<test_name>/', endpoint='render_table'))
+app.url_map.add(Rule('/api/tests/<test_name>', endpoint='add_test', methods=['POST']))
+app.url_map.add(Rule('/api/tests', endpoint='get_all_tests'))
+app.url_map.add(Rule('/api/tests/<test_name>', endpoint='get_test'))
+
+
+@app.endpoint('index')
def index():
data = builds_list()
+
+ for elem in data:
+ elem['url'] = url_for('render_test', test_name=elem['url'])
+
return render_template("index.html", tests=data)
-@app.route("/images/<image_name>")
+@app.endpoint('get_image')
def get_image(image_name):
with open("static/images/" + image_name, 'rb') as f:
image_binary = f.read()
@@ -115,72 +116,29 @@
return response
-@app.route("/tests/<test_name>", methods=['GET'])
+@app.endpoint('render_test')
def render_test(test_name):
- tests = []
- header_keys = ['build_id', 'iso_md5', 'type', 'date']
- table = [[]]
- builds = collect_builds()
-
- # <koder>: rename
- l = filter(lambda x: x['name'] == test_name, builds)
-
- if l[0]['type'] == 'GA':
- builds = filter(lambda x: x['type'] == 'GA', builds)
- else:
- l.extend(filter(lambda x: x['type'] in ['GA', 'master'] and x not in l, builds))
- builds = l
-
- results = {}
- # <koder>: magik ip? fixme
meta = {"__meta__": "http://172.16.52.112:8000/api/nodes"}
data = collect_lab_data(meta)
lab_meta = total_lab_info(data)
+ results = prepare_build_data(test_name)
- for build in builds:
- # <koder>: don't use name 'type'
- type = build['type']
- results[type] = create_measurement(build)
-
- urls = build_vertical_bar(results) + build_lines_chart(results)
+ bars = build_vertical_bar(results)
+ lines = build_lines_chart(results)
+ urls = bars + lines
urls = [url_for("get_image", image_name=os.path.basename(url)) if not url.startswith('http') else url for url in urls]
- if len(tests) > 0:
- sorted_keys = sorted(tests[0].keys())
-
- for key in sorted_keys:
- if key not in header_keys:
- header_keys.append(key)
-
- for test in tests:
- row = []
-
- for header in header_keys:
- if isinstance(test[header], list):
- # <koder>: make a named constant from unichr(0x00B1)
- # <koder>: use format in this line
- row.append(str(test[header][0]) + unichr(0x00B1) + str(test[header][1]))
- else:
- row.append(test[header])
-
- table.append(row)
-
- return render_template("test.html", urls=urls, table_url=url_for('render_table', test_name=test_name),
+ return render_template("test.html", urls=urls,
+ table_url=url_for('render_table', test_name=test_name),
index_url=url_for('index'), lab_meta=lab_meta)
-@app.route("/tests/table/<test_name>/")
+@app.endpoint('render_table')
def render_table(test_name):
- builds = collect_builds()
- l = filter(lambda x: x['name'] == test_name, builds)
- if l[0]['type'] == 'GA':
- builds = filter(lambda x: x['type'] == 'GA', builds)
- else:
- l.extend(filter(lambda x: x['type'] in ['GA', 'master'] and x not in l, builds))
- builds = l
+ builds = get_data_for_table(test_name)
- header_keys = ['build_id', 'iso_md5', 'type' ,'date']
+ header_keys = ['build_id', 'iso_md5', 'type', 'date']
table = [[]]
meta = {"__meta__": "http://172.16.52.112:8000/api/nodes"}
data = collect_lab_data(meta)
@@ -207,52 +165,26 @@
back_url=url_for('render_test', test_name=test_name), lab=data)
-@app.route("/api/tests/<test_name>", methods=['POST'])
+@app.endpoint('add_test')
def add_test(test_name):
- test = json.loads(request.data)
-
- file_name = TEST_PATH + '/' + 'storage' + ".json"
-
- if not os.path.exists(file_name):
- with open(file_name, "w+") as f:
- f.write(json.dumps([]))
-
- builds = collect_builds()
- res = None
-
- for b in builds:
- if b['name'] == test['name']:
- res = b
- break
-
- if res is None:
- builds.append(test)
- else:
- merge_builds(res, test)
-
- with open(TEST_PATH + '/' + 'storage' + ".json", 'w+') as f:
- f.write(json.dumps(builds))
-
+ add_data(request.data)
return "Created", 201
-@app.route("/api/tests", methods=['GET'])
+@app.endpoint('get_all_tests')
def get_all_tests():
- return json.dumps(collect_builds())
+ return json.dumps(get_builds_data())
-@app.route("/api/tests/<test_name>", methods=['GET'])
+@app.endpoint('get_test')
def get_test(test_name):
- builds = collect_builds()
+ builds = get_builds_data(test_name)
- for build in builds:
- if build["type"] == test_name:
- return json.dumps(build)
- return "Not Found", 404
+ return json.dumps(builds)
if __name__ == "__main__":
logger = getLogger("logger")
app.logger.setLevel(INFO)
app.logger.addHandler(logger)
- app.run(host='0.0.0.0', debug=True)
+ app.run(host='0.0.0.0', debug=True)
\ No newline at end of file
diff --git a/web_app/app.py b/web_app/app.py
new file mode 100644
index 0000000..b67c804
--- /dev/null
+++ b/web_app/app.py
@@ -0,0 +1,10 @@
+from config import DATABASE_URI
+from flask import Flask
+from flask.ext.bootstrap import Bootstrap
+from flask.ext.sqlalchemy import SQLAlchemy
+
+app = Flask(__name__)
+db = SQLAlchemy(app)
+app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URI
+Bootstrap(app)
+
diff --git a/web_app/templates/base.html b/web_app/templates/base.html
index 8612238..5c6eab9 100644
--- a/web_app/templates/base.html
+++ b/web_app/templates/base.html
@@ -8,7 +8,6 @@
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.8.0/jquery.min.js"></script>
<script type="text/javascript" src="{{ url_for('static', filename='script.js')}}"></script>
<link href="//netdna.bootstrapcdn.com/twitter-bootstrap/2.1.1/css/bootstrap-combined.min.css" rel="stylesheet">
- <style type=text/css>{{ get_resource_as_string('static/style.css') }}</style>
{% endblock %}
</head>
<body>