|
| 1 | +from ..core.ledsa_conf import ConfigData |
| 2 | +import numpy as np |
| 3 | +import pandas as pd |
| 4 | +from ..core import _led_helper as led |
| 5 | +import os |
| 6 | + |
| 7 | +# os path separator |
| 8 | +sep = os.path.sep |
| 9 | + |
| 10 | + |
| 11 | +def normalize_fitpar(fitpar, channel): |
| 12 | + fit_parameters = read_hdf(channel) |
| 13 | + average = calculate_average_fitpar_without_smoke(fitpar, channel) |
| 14 | + fit_parameters[f'normalized_{fitpar}'] = fit_parameters[fitpar].div(average) |
| 15 | + os.remove(f".{sep}analysis{sep}channel{channel}{sep}all_parameters.h5") |
| 16 | + fit_parameters.to_hdf(f".{sep}analysis{sep}channel{channel}{sep}all_parameters.h5", 'table') |
| 17 | + |
| 18 | + |
| 19 | +def calculate_average_fitpar_without_smoke(fitpar, channel, num_of_imgs=20): |
| 20 | + fit_parameters = read_hdf(channel) |
| 21 | + idx = pd.IndexSlice |
| 22 | + fit_parameters = fit_parameters.loc[idx[1:num_of_imgs, :]] |
| 23 | + return fit_parameters[fitpar].mean(0, level='led_id') |
| 24 | + |
| 25 | + |
| 26 | +def create_binary_data(channel): |
| 27 | + conf = ConfigData() |
| 28 | + fit_params = pd.DataFrame({"img_id": [], |
| 29 | + "led_id": [], |
| 30 | + "line": [], |
| 31 | + "x": [], |
| 32 | + "y": [], |
| 33 | + "dx": [], |
| 34 | + "dy": [], |
| 35 | + "A": [], |
| 36 | + "alpha": [], |
| 37 | + "wx": [], |
| 38 | + "wy": [], |
| 39 | + "fit_success": [], |
| 40 | + "fit_fun": [], |
| 41 | + "fit_nfev": [], |
| 42 | + "sum_col_val": [], |
| 43 | + "mean_col_val": [], |
| 44 | + "width": [], |
| 45 | + "height": [] |
| 46 | + }) |
| 47 | + |
| 48 | + # find time and fit parameter for every image |
| 49 | + first_img = int(conf['analyse_photo']['first_img']) |
| 50 | + last_img = int(conf['analyse_photo']['last_img']) |
| 51 | + # TODO: add max img range to config |
| 52 | + number_of_images = (9999 + last_img - first_img) % 9999 |
| 53 | + number_of_images //= int(conf['analyse_photo']['skip_imgs']) + 1 |
| 54 | + print('Loading fit parameters...') |
| 55 | + exception_counter = 0 |
| 56 | + for image_id in range(1, number_of_images + 1): |
| 57 | + try: |
| 58 | + parameters = led.load_file(".{}analysis{}channel{}{}{}_led_positions.csv".format( |
| 59 | + sep, sep, channel, sep, image_id), delim=',', silent=True) |
| 60 | + except (FileNotFoundError, IOError): |
| 61 | + fit_params = fit_params.append(_param_array_to_dataframe([[np.nan] * (fit_params.shape[1] - 1)], image_id), |
| 62 | + ignore_index=True, sort=False) |
| 63 | + exception_counter += 1 |
| 64 | + continue |
| 65 | + |
| 66 | + parameters = parameters[parameters[:, 0].argsort()] # sort for led_id |
| 67 | + parameters = _append_coordinates(parameters) |
| 68 | + fit_params = fit_params.append(_param_array_to_dataframe(parameters, image_id), ignore_index=True, sort=False) |
| 69 | + |
| 70 | + print(f'{number_of_images - exception_counter} of {number_of_images} loaded.') |
| 71 | + fit_params.set_index(['img_id', 'led_id'], inplace=True) |
| 72 | + fit_params.to_hdf(f".{sep}analysis{sep}channel{channel}{sep}all_parameters.h5", 'table', append=True) |
| 73 | + |
| 74 | + |
| 75 | +def clean_bin_data(channel=-1): |
| 76 | + exit('clean_bin_data not implemented') |
| 77 | + |
| 78 | + |
| 79 | +def _param_array_to_dataframe(array, img_id): |
| 80 | + appended_array = np.empty((np.shape(array)[0], np.shape(array)[1] + 1)) |
| 81 | + appended_array[:, 0] = img_id |
| 82 | + appended_array[:, 1:] = array |
| 83 | + fit_params = pd.DataFrame(appended_array, columns=["img_id", "led_id", "line", "x", "y", "dx", "dy", "A", "alpha", |
| 84 | + "wx", "wy", "fit_success", "fit_fun", "fit_nfev", "sum_col_val", |
| 85 | + "mean_col_val", "width", "height"]) |
| 86 | + return fit_params |
| 87 | + |
| 88 | + |
| 89 | +def _append_coordinates(params): |
| 90 | + ac = _append_coordinates |
| 91 | + if "coord" not in ac.__dict__: |
| 92 | + try: |
| 93 | + ac.coord = led.load_file(".{}analysis{}led_search_areas_with_coordinates.csv".format(sep, sep), |
| 94 | + delim=',', silent=True)[:, [0, -2, -1]] |
| 95 | + except (FileNotFoundError, IOError): |
| 96 | + ac.coord = False |
| 97 | + |
| 98 | + if type(ac.coord) == bool: |
| 99 | + return _append_nans(params) |
| 100 | + else: |
| 101 | + return _append_coordinates_to_params(params, ac.coord) |
| 102 | + |
| 103 | + |
| 104 | +def _append_nans(params): |
| 105 | + p_with_nans = np.empty((np.shape(params)[0], np.shape(params)[1] + 2)) |
| 106 | + p_with_nans[:] = np.NaN |
| 107 | + p_with_nans[:, :-2] = params |
| 108 | + return p_with_nans |
| 109 | + |
| 110 | + |
| 111 | +def _append_coordinates_to_params(params, coord): |
| 112 | + p_with_c = np.empty((np.shape(params)[0], np.shape(params)[1] + 2)) |
| 113 | + p_with_c[:, :-2] = params |
| 114 | + |
| 115 | + if p_with_c.shape[0] != coord.shape[0]: |
| 116 | + mask = np.zeros(coord.shape) |
| 117 | + for led_id in p_with_c[:, 0]: |
| 118 | + mask = np.logical_or(mask, np.repeat((coord[:, 0] == led_id), coord.shape[1]).reshape(coord.shape)) |
| 119 | + coord = np.reshape(coord[mask], (params.shape[0], coord.shape[1])) |
| 120 | + |
| 121 | + p_with_c[:, -2:] = coord[:, -2:] |
| 122 | + return p_with_c |
| 123 | + |
| 124 | + |
| 125 | +def read_hdf(channel): |
| 126 | + try: |
| 127 | + fit_parameters = pd.read_hdf(f".{sep}analysis{sep}channel{channel}{sep}all_parameters.h5", 'table') |
| 128 | + except FileNotFoundError: |
| 129 | + create_binary_data(channel) |
| 130 | + fit_parameters = pd.read_hdf(f".{sep}analysis{sep}channel{channel}{sep}all_parameters.h5", 'table') |
| 131 | + return fit_parameters |
| 132 | + |
| 133 | + |
| 134 | +def include_column_if_nonexistent(fit_parameters, fit_par, channel): |
| 135 | + if fit_par not in fit_parameters.columns: |
| 136 | + if fit_par.split('_')[0] == 'normalized': |
| 137 | + normalize_fitpar(fit_par.split('normalized_')[1], channel) |
| 138 | + else: |
| 139 | + raise Exception(f'Can not handle fit parameter: {fit_par}') |
| 140 | + return read_hdf(channel) |
| 141 | + return fit_parameters |
0 commit comments