Merge pull request #6 from Mirantis/bt2

New cpu load option, bottlenecks
diff --git a/scripts/postprocessing/bottleneck.py b/scripts/postprocessing/bottleneck.py
index 8507041..01489f6 100644
--- a/scripts/postprocessing/bottleneck.py
+++ b/scripts/postprocessing/bottleneck.py
@@ -18,6 +18,7 @@
 except ImportError:
     pgv = None
 
+sys.path.append("/mnt/other/work/disk_perf_test_tool")
 from wally.run_test import load_data_from
 from wally.utils import b2ssize, b2ssize_10
 
@@ -37,6 +38,7 @@
     SensorInfo('sectors_read', 'hdd_read', 'Sect', 512),
     SensorInfo('reads_completed', 'read_op', 'OP', None),
     SensorInfo('writes_completed', 'write_op', 'OP', None),
+    SensorInfo('procs_blocked', 'blocked_procs', 'P', None),
 ]
 
 SINFO_MAP = dict((sinfo.name, sinfo) for sinfo in _SINFO)
@@ -83,7 +85,7 @@
 
         it = csv.reader(block.split("\n"))
         headers = next(it)
-        sens_data = [map(int, vals) for vals in it]
+        sens_data = [map(float, vals) for vals in it]
         source_id, hostname = headers[:2]
         headers = [(None, 'time')] + \
                   [header.split('.') for header in headers[2:]]
@@ -102,8 +104,7 @@
         pass
 
     load_data_from(fname)(None, data)
-
-    for test_type, test_results in data.results:
+    for test_type, test_results in data.results.items():
         if test_type == 'io':
             for tests_res in test_results:
                 raw_map[tests_res.config.name].append(tests_res.run_interval)
@@ -142,7 +143,9 @@
 
 critical_values = dict(
     io_queue=1,
-    mem_usage_percent=0.8)
+    usage_percent=0.8,
+    procs_blocked=1,
+    procs_queue=1)
 
 
 class AggregatedData(object):
@@ -173,6 +176,7 @@
 
     for name, sensor_data in sensors_data.items():
         for pos, (dev, sensor) in enumerate(sensor_data.headers):
+
             if 'time' == sensor:
                 continue
 
@@ -212,26 +216,32 @@
     return result
 
 
-def avg_load(data):
-    load = {}
+def avg_load(sensors_data):
+    load = collections.defaultdict(lambda: 0)
 
     min_time = 0xFFFFFFFFFFF
     max_time = 0
 
-    for tm, item in data:
+    for sensor_data in sensors_data.values():
 
-        min_time = min(min_time, item.ctime)
-        max_time = max(max_time, item.ctime)
+        min_time = min(min_time, min(sensor_data.times))
+        max_time = max(max_time, max(sensor_data.times))
 
         for name, max_val in critical_values.items():
-            for (dev, sensor), val in item.values:
-                if sensor == name and val > max_val:
-                    load[(item.hostname, dev, sensor)] += 1
+            for pos, (dev, sensor) in enumerate(sensor_data.headers):
+                if sensor == name:
+                    for vals in sensor_data.values:
+                        if vals[pos] > max_val:
+                            load[(sensor_data.hostname, dev, sensor)] += 1
     return load, max_time - min_time
 
 
-def print_bottlenecks(data_iter, max_bottlenecks=15):
-    load, duration = avg_load(data_iter)
+def print_bottlenecks(sensors_data, max_bottlenecks=15):
+    load, duration = avg_load(sensors_data)
+
+    if not load:
+        return "\n*** No bottlenecks found *** \n"
+
     rev_items = ((v, k) for (k, v) in load.items())
 
     res = sorted(rev_items, reverse=True)[:max_bottlenecks]
@@ -408,9 +418,9 @@
                         type=int, default=None,
                         help="Begin and end time for tests")
     parser.add_argument('-m', '--max-bottlenek', type=int,
-                        default=15, help="Max bottlenek to show")
+                        default=15, help="Max bottleneck to show")
     parser.add_argument('-x', '--max-diff', type=int,
-                        default=10, help="Max bottlenek to show in" +
+                        default=10, help="Max bottleneck to show in" +
                         "0.1% from test nodes summ load")
     parser.add_argument('-d', '--debug-ver', action='store_true',
                         help="Full report with original data")
@@ -477,13 +487,17 @@
 
         consumption = total_consumption(data_chunks, roles_map)
 
+        bottlenecks = print_bottlenecks(data_chunks)
+
         testdata_sz = get_testdata_size(consumption) * max_diff
         testop_count = get_testop_cout(consumption) * max_diff
 
         per_consumer_table = {}
         per_consumer_table_str = {}
 
-        all_consumers = set(consumption.values()[0].all_together)
+        all_consumers = set()#consumption.values()[0].all_together)
+        for value in consumption.values():
+            all_consumers = all_consumers | set(value.all_together)
         fields = [field for field in fields if field in consumption]
         all_consumers_sum = []
 
@@ -534,6 +548,7 @@
         max_len = max(map(len, res.split("\n")))
         print test_name.center(max_len)
         print res
+        print bottlenecks
 
 
 if __name__ == "__main__":
diff --git a/wally/sensors/sensors/syscpu_sensors.py b/wally/sensors/sensors/syscpu_sensors.py
index 3f6cf44..e2fab8c 100644
--- a/wally/sensors/sensors/syscpu_sensors.py
+++ b/wally/sensors/sensors/syscpu_sensors.py
@@ -35,9 +35,17 @@
                 results[sensor_name] = SensorInfo(int(vals[pos]),
                                                   accum_val)
         elif dev_name == 'procs_blocked':
-            val = int(vals[1]) // core_count
+            val = int(vals[1])
             results["cpu.procs_blocked"] = SensorInfo(val, False)
         elif dev_name.startswith('cpu'):
             core_count += 1
 
+    # procs in queue
+    TASKSPOS = 3
+    vals = open('/proc/loadavg').read().split()
+    ready_procs = vals[TASKSPOS].partition('/')[0]
+    # dec on current proc
+    procs_queue = (float(ready_procs) - 1) / core_count
+    results["cpu.procs_queue"] = SensorInfo(procs_queue, False)
+
     return results
diff --git a/wally/sensors/sensors/sysram_sensors.py b/wally/sensors/sensors/sysram_sensors.py
index 2eacf44..9de5d82 100644
--- a/wally/sensors/sensors/sysram_sensors.py
+++ b/wally/sensors/sensors/sysram_sensors.py
@@ -36,6 +36,6 @@
 
     if 'ram.MemFree' in results and 'ram.MemTotal' in results:
         used = results['ram.MemTotal'].value - results['ram.MemFree'].value
-        usage = used / results['ram.MemTotal'].value
+        usage = float(used) / results['ram.MemTotal'].value
         results["ram.usage_percent"] = SensorInfo(usage, False)
     return results