blob: a25658788946c1d8205c5405e0c4c6f630e10dad [file] [log] [blame]
koder aka kdanilov6c491062015-04-09 22:33:13 +03001import math
koder aka kdanilovffaf48d2016-12-27 02:25:29 +02002import logging
koder aka kdanilov6c491062015-04-09 22:33:13 +03003import itertools
koder aka kdanilova732a602017-02-01 20:29:56 +02004from typing import List, Callable, Iterable, cast, Tuple
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03005
koder aka kdanilov7f59d562016-12-26 01:34:23 +02006import numpy
7from scipy import stats, optimize
8from numpy import linalg
9from numpy.polynomial.chebyshev import chebfit, chebval
koder aka kdanilov6c491062015-04-09 22:33:13 +030010
11
koder aka kdanilovf2865172016-12-30 03:35:11 +020012from .result_classes import NormStatProps, HistoStatProps, TimeSeries
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020013from .utils import Number
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030014
15
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020016logger = logging.getLogger("wally")
koder aka kdanilov7f59d562016-12-26 01:34:23 +020017DOUBLE_DELTA = 1e-8
koder aka kdanilov108ac362017-01-19 20:17:16 +020018MIN_VALUES_FOR_CONFIDENCE = 7
koder aka kdanilove87ae652015-04-20 02:14:35 +030019
20
koder aka kdanilov108ac362017-01-19 20:17:16 +020021average = numpy.mean
22dev = lambda x: math.sqrt(numpy.var(x, ddof=1))
koder aka kdanilov6c491062015-04-09 22:33:13 +030023
24
kdanylov aka koder150b2192017-04-01 16:53:01 +030025def calc_norm_stat_props(ts: TimeSeries, bins_count: int = None, confidence: float = 0.95) -> NormStatProps:
koder aka kdanilov7f59d562016-12-26 01:34:23 +020026 "Calculate statistical properties of array of numbers"
27
koder aka kdanilov108ac362017-01-19 20:17:16 +020028 # array.array has very basic support
29 data = cast(List[int], ts.data)
30 res = NormStatProps(data) # type: ignore
koder aka kdanilov7f59d562016-12-26 01:34:23 +020031
32 if len(data) == 0:
33 raise ValueError("Input array is empty")
34
35 data = sorted(data)
36 res.average = average(data)
37 res.deviation = dev(data)
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020038
koder aka kdanilov7f59d562016-12-26 01:34:23 +020039 res.max = data[-1]
40 res.min = data[0]
41
koder aka kdanilova732a602017-02-01 20:29:56 +020042 pcs = numpy.percentile(data, q=[1.0, 5.0, 10., 50., 90., 95., 99.])
43 res.perc_1, res.perc_5, res.perc_10, res.perc_50, res.perc_90, res.perc_95, res.perc_99 = pcs
koder aka kdanilov7f59d562016-12-26 01:34:23 +020044
koder aka kdanilov108ac362017-01-19 20:17:16 +020045 if len(data) >= MIN_VALUES_FOR_CONFIDENCE:
koder aka kdanilov7f59d562016-12-26 01:34:23 +020046 res.confidence = stats.sem(data) * \
47 stats.t.ppf((1 + confidence) / 2, len(data) - 1)
koder aka kdanilov108ac362017-01-19 20:17:16 +020048 res.confidence_level = confidence
koder aka kdanilov7f59d562016-12-26 01:34:23 +020049 else:
50 res.confidence = None
koder aka kdanilov108ac362017-01-19 20:17:16 +020051 res.confidence_level = None
koder aka kdanilov7f59d562016-12-26 01:34:23 +020052
kdanylov aka koder150b2192017-04-01 16:53:01 +030053 if bins_count is not None:
54 res.bins_populations, res.bins_edges = numpy.histogram(data, bins=bins_count)
55 res.bins_edges = res.bins_edges[:-1]
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020056
57 try:
58 res.normtest = stats.mstats.normaltest(data)
59 except Exception as exc:
60 logger.warning("stats.mstats.normaltest failed with error: %s", exc)
61
koder aka kdanilov108ac362017-01-19 20:17:16 +020062 res.skew = stats.skew(data)
63 res.kurt = stats.kurtosis(data)
64
koder aka kdanilov7f59d562016-12-26 01:34:23 +020065 return res
66
67
koder aka kdanilova732a602017-02-01 20:29:56 +020068# update this code
69def rebin_histogram(bins_populations: numpy.array,
70 bins_edges: numpy.array,
71 new_bins_count: int,
72 left_tail_idx: int = None,
73 right_tail_idx: int = None,
74 log_bins: bool = False) -> Tuple[numpy.array, numpy.array]:
75 # rebin large histogram into smaller with new_bins bins, linearly distributes across
76 # left_tail_idx:right_tail_idx range
77
78 assert len(bins_populations.shape) == 1
79 assert len(bins_edges.shape) == 1
80 assert bins_edges.shape[0] == bins_populations.shape[0]
81
82 if left_tail_idx is None:
83 min_val = bins_edges[0]
84 else:
85 min_val = bins_edges[left_tail_idx]
86
87 if right_tail_idx is None:
88 max_val = bins_edges[-1]
89 else:
90 max_val = bins_edges[right_tail_idx]
91
92 if log_bins:
93 assert min_val > 1E-3
94 step = (max_val / min_val) ** (1 / new_bins_count)
95 new_bins_edges = min_val * (step ** numpy.arange(new_bins_count)) # type: numpy.array
96 else:
97 new_bins_edges = numpy.linspace(min_val, max_val, new_bins_count + 1, dtype='float')[:-1] # type: numpy.array
98
99 old_bins_pos = numpy.searchsorted(new_bins_edges, bins_edges, side='right')
100 new_bins = numpy.zeros(new_bins_count, dtype=int) # type: numpy.array
101
102 # last source bin can't be split
103 # TODO: need to add assert for this
104 new_bins[-1] += bins_populations[-1]
105 bin_sizes = bins_edges[1:] - bins_edges[:-1]
106
107 # correct position to get bin idx from edge idx
108 old_bins_pos -= 1
109 old_bins_pos[old_bins_pos < 0] = 0
110 new_bins_sizes = new_bins_edges[1:] - new_bins_edges[:-1]
111
112 for population, begin, end, bsize in zip(bins_populations[:-1], old_bins_pos[:-1], old_bins_pos[1:], bin_sizes):
113 if begin == end:
114 new_bins[begin] += population
115 else:
116 density = population / bsize
117 for curr_box in range(begin, end):
118 cnt = min(int(new_bins_sizes[begin] * density + 0.5), population)
119 new_bins[begin] += cnt
120 population -= cnt
121
122 return new_bins, new_bins_edges
123
124
koder aka kdanilov108ac362017-01-19 20:17:16 +0200125def calc_histo_stat_props(ts: TimeSeries,
126 bins_edges: numpy.array,
kdanylov aka koder150b2192017-04-01 16:53:01 +0300127 rebins_count: int = None,
koder aka kdanilova732a602017-02-01 20:29:56 +0200128 tail: float = 0.005) -> HistoStatProps:
129 log_bins = False
130 res = HistoStatProps(ts.data)
koder aka kdanilovf2865172016-12-30 03:35:11 +0200131
koder aka kdanilov108ac362017-01-19 20:17:16 +0200132 # summ across all series
koder aka kdanilova732a602017-02-01 20:29:56 +0200133 aggregated = ts.data.sum(axis=0, dtype='int')
134 total = aggregated.sum()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200135
136 # percentiles levels
koder aka kdanilova732a602017-02-01 20:29:56 +0200137 expected = list(numpy.array([0.01, 0.05, 0.1, 0.5, 0.9, 0.95, 0.99]) * total)
138 cumsum = numpy.cumsum(aggregated)
koder aka kdanilovf2865172016-12-30 03:35:11 +0200139
koder aka kdanilova732a602017-02-01 20:29:56 +0200140 percentiles_bins = numpy.searchsorted(cumsum, expected)
141 percentiles = bins_edges[percentiles_bins]
142 res.perc_1, res.perc_5, res.perc_10, res.perc_50, res.perc_90, res.perc_95, res.perc_99 = percentiles
koder aka kdanilovf2865172016-12-30 03:35:11 +0200143
koder aka kdanilova732a602017-02-01 20:29:56 +0200144 # don't show tail ranges on histogram
145 left_tail_idx, right_tail_idx = numpy.searchsorted(cumsum, [tail * total, (1 - tail) * total])
koder aka kdanilov108ac362017-01-19 20:17:16 +0200146
147 # minimax and maximal non-zero elements
koder aka kdanilova732a602017-02-01 20:29:56 +0200148 non_zero = numpy.nonzero(aggregated)[0]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200149 res.min = bins_edges[aggregated[non_zero[0]]]
150 res.max = bins_edges[non_zero[-1] + (1 if non_zero[-1] != len(bins_edges) else 0)]
151
koder aka kdanilova732a602017-02-01 20:29:56 +0200152 res.log_bins = False
kdanylov aka koder150b2192017-04-01 16:53:01 +0300153 if rebins_count is not None:
154 res.bins_populations, res.bins_edges = rebin_histogram(aggregated, bins_edges, rebins_count,
155 left_tail_idx, right_tail_idx)
156 else:
157 res.bins_populations = aggregated
158 res.bins_edges = bins_edges.copy()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200159
koder aka kdanilovf2865172016-12-30 03:35:11 +0200160 return res
161
162
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200163def groupby_globally(data: Iterable, key_func: Callable):
164 grouped = {} # type: ignore
koder aka kdanilov6c491062015-04-09 22:33:13 +0300165 grouped_iter = itertools.groupby(data, key_func)
166
167 for (bs, cache_tp, act, conc), curr_data_it in grouped_iter:
168 key = (bs, cache_tp, act, conc)
169 grouped.setdefault(key, []).extend(curr_data_it)
170
171 return grouped
172
173
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200174def approximate_curve(x: List[Number], y: List[float], xnew: List[Number], curved_coef: int) -> List[float]:
koder aka kdanilov6c491062015-04-09 22:33:13 +0300175 """returns ynew - y values of some curve approximation"""
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200176 return cast(List[float], chebval(xnew, chebfit(x, y, curved_coef)))
koder aka kdanilov6c491062015-04-09 22:33:13 +0300177
178
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200179def approximate_line(x: List[Number], y: List[float], xnew: List[Number], relative_dist: bool = False) -> List[float]:
180 """
181 x, y - test data, xnew - dots, where we want find approximation
182 if not relative_dist distance = y - newy
183 returns ynew - y values of linear approximation
184 """
185 ox = numpy.array(x)
186 oy = numpy.array(y)
koder aka kdanilov66839a92015-04-11 13:22:31 +0300187
Ved-vampir03166442015-04-10 17:28:23 +0300188 # set approximation function
koder aka kdanilov66839a92015-04-11 13:22:31 +0300189 def func_line(tpl, x):
190 return tpl[0] * x + tpl[1]
191
192 def error_func_rel(tpl, x, y):
193 return 1.0 - y / func_line(tpl, x)
194
195 def error_func_abs(tpl, x, y):
196 return y - func_line(tpl, x)
197
Ved-vampir03166442015-04-10 17:28:23 +0300198 # choose distance mode
koder aka kdanilov66839a92015-04-11 13:22:31 +0300199 error_func = error_func_rel if relative_dist else error_func_abs
200
201 tpl_initial = tuple(linalg.solve([[ox[0], 1.0], [ox[1], 1.0]],
202 oy[:2]))
203
Ved-vampir03166442015-04-10 17:28:23 +0300204 # find line
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200205 tpl_final, success = optimize.leastsq(error_func, tpl_initial[:], args=(ox, oy))
koder aka kdanilov66839a92015-04-11 13:22:31 +0300206
Ved-vampir03166442015-04-10 17:28:23 +0300207 # if error
208 if success not in range(1, 5):
209 raise ValueError("No line for this dots")
koder aka kdanilov66839a92015-04-11 13:22:31 +0300210
Ved-vampir03166442015-04-10 17:28:23 +0300211 # return new dots
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200212 return func_line(tpl_final, numpy.array(xnew))
koder aka kdanilov6c491062015-04-09 22:33:13 +0300213
214
koder aka kdanilova732a602017-02-01 20:29:56 +0200215def moving_average(data: numpy.array, window: int) -> numpy.array:
216 cumsum = numpy.cumsum(data)
217 cumsum[window:] = cumsum[window:] - cumsum[:-window]
218 return cumsum[window - 1:] / window
219
220
221def moving_dev(data: numpy.array, window: int) -> numpy.array:
222 cumsum = numpy.cumsum(data)
223 cumsum2 = numpy.cumsum(data ** 2)
224 cumsum[window:] = cumsum[window:] - cumsum[:-window]
225 cumsum2[window:] = cumsum2[window:] - cumsum2[:-window]
226 return ((cumsum2[window - 1:] - cumsum[window - 1:] ** 2 / window) / (window - 1)) ** 0.5
227
228
229def find_ouliers(data: numpy.array,
230 center_range: Tuple[int, int] = (25, 75),
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300231 cut_range: float = 3.0) -> numpy.array:
koder aka kdanilova732a602017-02-01 20:29:56 +0200232 v1, v2 = numpy.percentile(data, center_range)
233 return numpy.abs(data - (v1 + v2) / 2) > ((v2 - v1) / 2 * cut_range)
234
235
236def find_ouliers_ts(data: numpy.array,
237 windows_size: int = 30,
238 center_range: Tuple[int, int] = (25, 75),
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300239 cut_range: float = 3.0) -> numpy.array:
koder aka kdanilova732a602017-02-01 20:29:56 +0200240 outliers = numpy.empty(data.shape, dtype=bool)
241
242 if len(data) < windows_size:
243 outliers[:] = False
244 return outliers
245
246 begin_idx = 0
247 if len(data) < windows_size * 2:
248 end_idx = (len(data) % windows_size) // 2 + windows_size
249 else:
250 end_idx = len(data)
251
252 while True:
253 cdata = data[begin_idx: end_idx]
254 outliers[begin_idx: end_idx] = find_ouliers(cdata, center_range, cut_range)
255 begin_idx = end_idx
256
257 if end_idx == len(data):
258 break
259
260 end_idx += windows_size
261 if len(data) - end_idx < windows_size:
262 end_idx = len(data)
263
264 return outliers
265
266
267def hist_outliers_nd(bin_populations: numpy.array,
268 bin_centers: numpy.array,
269 center_range: Tuple[int, int] = (25, 75),
270 cut_range: float = 3.0) -> Tuple[int, int]:
271 assert len(bin_populations) == len(bin_centers)
272 total_count = bin_populations.sum()
273
274 perc25 = total_count / 100.0 * center_range[0]
275 perc75 = total_count / 100.0 * center_range[1]
276
277 perc25_idx, perc75_idx = numpy.searchsorted(numpy.cumsum(bin_populations), [perc25, perc75])
278 middle = (bin_centers[perc75_idx] + bin_centers[perc25_idx]) / 2
279 r = (bin_centers[perc75_idx] - bin_centers[perc25_idx]) / 2
280
281 lower_bound = middle - r * cut_range
282 upper_bound = middle + r * cut_range
283
284 lower_cut_idx, upper_cut_idx = numpy.searchsorted(bin_centers, [lower_bound, upper_bound])
285 return lower_cut_idx, upper_cut_idx
286
287
288def hist_outliers_perc(bin_populations: numpy.array,
289 bounds_perc: Tuple[float, float] = (0.01, 0.99)) -> Tuple[int, int]:
290 assert len(bin_populations.shape) == 1
291 total_count = bin_populations.sum()
292 lower_perc = total_count * bounds_perc[0]
293 upper_perc = total_count * bounds_perc[1]
294 return numpy.searchsorted(numpy.cumsum(bin_populations), [lower_perc, upper_perc])
295
296
297def ts_hist_outliers_perc(bin_populations: numpy.array,
298 window_size: int = 10,
299 bounds_perc: Tuple[float, float] = (0.01, 0.99)) -> Tuple[int, int]:
300 assert len(bin_populations.shape) == 2
301
302 points = list(range(0, len(bin_populations), window_size))
303 if len(bin_populations) % window_size != 0:
304 points.append(points[-1] + window_size)
305
306 ranges = []
307 for begin, end in zip(points[:-1], points[1:]):
308 window_hist = bin_populations[begin:end].sum(axis=0)
309 ranges.append(hist_outliers_perc(window_hist, bounds_perc=bounds_perc))
310
311 return min(i[0] for i in ranges), max(i[1] for i in ranges)
312
313
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200314# TODO: revise next
315# def difference(y, ynew):
316# """returns average and maximum relative and
317# absolute differences between y and ynew
318# result may contain None values for y = 0
319# return value - tuple:
320# [(abs dif, rel dif) * len(y)],
321# (abs average, abs max),
322# (rel average, rel max)"""
323#
324# abs_dlist = []
325# rel_dlist = []
326#
327# for y1, y2 in zip(y, ynew):
328# # absolute
329# abs_dlist.append(y1 - y2)
330#
331# if y1 > 1E-6:
332# rel_dlist.append(abs(abs_dlist[-1] / y1))
333# else:
334# raise ZeroDivisionError("{0!r} is too small".format(y1))
335#
336# da_avg = sum(abs_dlist) / len(abs_dlist)
337# dr_avg = sum(rel_dlist) / len(rel_dlist)
338#
339# return (zip(abs_dlist, rel_dlist),
340# (da_avg, max(abs_dlist)), (dr_avg, max(rel_dlist))
341# )