asotr.py: fix bug with decoding command to human-readable format (cmd_decode), add exception if index not finded for specified time
This commit is contained in:
parent
94ddbeb456
commit
9611d99828
2
.gitignore
vendored
2
.gitignore
vendored
@ -5,4 +5,6 @@
|
|||||||
*.zip
|
*.zip
|
||||||
*.log
|
*.log
|
||||||
*.txt
|
*.txt
|
||||||
|
*.xls
|
||||||
|
*.xlsx
|
||||||
/__pycache__
|
/__pycache__
|
||||||
|
33
asotr.py
33
asotr.py
@ -17,6 +17,13 @@ danila_gamkov@cosmos.ru
|
|||||||
"""
|
"""
|
||||||
__author__ = 'Danila Gamkov'
|
__author__ = 'Danila Gamkov'
|
||||||
|
|
||||||
|
|
||||||
|
class IdxNotFound(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TimeIndexNotFound(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
fname_json_decode = './decode_asotr_cmd.json'
|
fname_json_decode = './decode_asotr_cmd.json'
|
||||||
|
|
||||||
def get_utc_seconds(timestamp_str, timestamp_format):
|
def get_utc_seconds(timestamp_str, timestamp_format):
|
||||||
@ -66,11 +73,9 @@ def cmd_decode(cmd_string):
|
|||||||
if 'OK' in cmd_string:
|
if 'OK' in cmd_string:
|
||||||
return out
|
return out
|
||||||
|
|
||||||
cmd = cmd_string.split(' ')
|
cmd = cmd_string.split()
|
||||||
if len(cmd) > 5:
|
|
||||||
return out
|
|
||||||
|
|
||||||
if cmd[1] == '':
|
if len(cmd) > 5:
|
||||||
return out
|
return out
|
||||||
|
|
||||||
if '1' in cmd[0]:
|
if '1' in cmd[0]:
|
||||||
@ -191,9 +196,6 @@ def find_best_time_idx(time_arr, user_time, accuracy='minutes') -> int:
|
|||||||
|
|
||||||
return mid
|
return mid
|
||||||
|
|
||||||
class TimeIndexNotFound(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def find_time_idx(data_list, keys_list, timestamp, accuracy):
|
def find_time_idx(data_list, keys_list, timestamp, accuracy):
|
||||||
out_dict = dict.fromkeys(keys_list, -1)
|
out_dict = dict.fromkeys(keys_list, -1)
|
||||||
|
|
||||||
@ -412,15 +414,25 @@ def cut_data(data, time_begin, duration_sec, accuracy='seconds'):
|
|||||||
time_end = tstamp_end.strftime(time_format)
|
time_end = tstamp_end.strftime(time_format)
|
||||||
|
|
||||||
idx_begin = find_best_time_idx(data['timestamp'], time_begin, accuracy)
|
idx_begin = find_best_time_idx(data['timestamp'], time_begin, accuracy)
|
||||||
|
|
||||||
idx_end = find_best_time_idx(data['timestamp'], time_end, accuracy)
|
idx_end = find_best_time_idx(data['timestamp'], time_end, accuracy)
|
||||||
|
|
||||||
|
if idx_begin == -1 or idx_end == -1:
|
||||||
|
raise IdxNotFound(f"error finding array index corresponding to timestamp: check time_begin ({time_begin}) or time_end ({time_end})")
|
||||||
|
|
||||||
|
idx_end = idx_end - 1
|
||||||
out = data.loc[idx_begin : idx_end]
|
out = data.loc[idx_begin : idx_end]
|
||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
def cut_norm_data(data, time_begin, duration_sec, channel='ch1',
|
def cut_norm_data(data, time_begin, duration_sec, channel='ch1',
|
||||||
interp={'method': 'cubic', 'order': 2}, accuracy='seconds'):
|
interp={'method': 'cubic', 'order': 2}, accuracy='seconds'):
|
||||||
|
|
||||||
|
try:
|
||||||
data_period = cut_data(data, time_begin, duration_sec, accuracy)
|
data_period = cut_data(data, time_begin, duration_sec, accuracy)
|
||||||
|
except IdxNotFound as e:
|
||||||
|
print(f'{e}')
|
||||||
|
|
||||||
temp_norm = data_period[channel].values - data_period[channel].iloc[0]
|
temp_norm = data_period[channel].values - data_period[channel].iloc[0]
|
||||||
time_l = list(data_period['timestamp'])
|
time_l = list(data_period['timestamp'])
|
||||||
temp_l = list(temp_norm)
|
temp_l = list(temp_norm)
|
||||||
@ -433,13 +445,14 @@ def cut_norm_data(data, time_begin, duration_sec, channel='ch1',
|
|||||||
|
|
||||||
return orig_data, interp_data
|
return orig_data, interp_data
|
||||||
|
|
||||||
|
|
||||||
def get_step_response_diff(data, thermocycle_info, channel='ch1',
|
def get_step_response_diff(data, thermocycle_info, channel='ch1',
|
||||||
interp={'method': 'cubic', 'order': 2}, accuracy='seconds', cut_step_resp={}):
|
interp={'method': 'cubic', 'order': 2}, accuracy='seconds', cut_step_resp={}):
|
||||||
|
|
||||||
date = thermocycle_info['date']
|
date = thermocycle_info['date']
|
||||||
|
|
||||||
time_begin_orig = date + ' ' + thermocycle_info['time_begin'][0]
|
time_begin_orig = date + ' ' + thermocycle_info['time_begin'][0]
|
||||||
time_begin_step = date + ' ' + thermocycle_info['time_begin'][1]
|
time_begin_step = date + ' ' + thermocycle_info['time_begin'][1]
|
||||||
|
|
||||||
duration_sec = thermocycle_info['duration_sec']
|
duration_sec = thermocycle_info['duration_sec']
|
||||||
|
|
||||||
_, orig_interp_cycle = cut_norm_data(data, time_begin_orig, duration_sec, channel,
|
_, orig_interp_cycle = cut_norm_data(data, time_begin_orig, duration_sec, channel,
|
||||||
@ -460,8 +473,12 @@ def get_step_response_diff(data, thermocycle_info, channel='ch1',
|
|||||||
|
|
||||||
if len(cut_step_resp) > 0:
|
if len(cut_step_resp) > 0:
|
||||||
time_begin = date + ' ' + cut_step_resp['time_step_begin']
|
time_begin = date + ' ' + cut_step_resp['time_step_begin']
|
||||||
|
try:
|
||||||
step_response = cut_data(step_response, time_begin,
|
step_response = cut_data(step_response, time_begin,
|
||||||
cut_step_resp['step_duration'], accuracy='seconds')
|
cut_step_resp['step_duration'], accuracy='seconds')
|
||||||
|
except IdxNotFound as e:
|
||||||
|
print(f'{e}')
|
||||||
|
|
||||||
first = step_response['temp'].iloc[0]
|
first = step_response['temp'].iloc[0]
|
||||||
step_response['temp'] = step_response['temp'] - first
|
step_response['temp'] = step_response['temp'] - first
|
||||||
|
|
||||||
|
1
data/.~lock.asotr01_data_T.csv#
Normal file
1
data/.~lock.asotr01_data_T.csv#
Normal file
@ -0,0 +1 @@
|
|||||||
|
,danila,danila-IdeaPad,20.05.2025 18:07,file:///home/danila/.config/libreoffice/4;
|
BIN
data/beta_2025.xlsx
Normal file
BIN
data/beta_2025.xlsx
Normal file
Binary file not shown.
BIN
data/experiments/cyclogram_imp_ident_ch3.xls
Normal file
BIN
data/experiments/cyclogram_imp_ident_ch3.xls
Normal file
Binary file not shown.
BIN
data/experiments/cyclogram_step_ident_ch3.xls
Normal file
BIN
data/experiments/cyclogram_step_ident_ch3.xls
Normal file
Binary file not shown.
@ -7,12 +7,14 @@ import pandas as pd
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
path_data = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
|
path_data = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
|
||||||
fname = '/home/danila/Danila/work/MVN/Soft/PID/data/flight/cmd_asotr/all_flight_cmd_asotr.csv'
|
fname_cmd_flight = '/home/danila/Danila/work/MVN/Soft/PID/data/flight/cmd_asotr/all_flight_cmd_asotr.csv'
|
||||||
fname_cmd_temp = './data/flight_cmd_temp.csv'
|
fname_cmd_temp = './data/flight_cmd_temp.csv'
|
||||||
|
timeformat = '%d.%m.%Y %H:%M:%S'
|
||||||
|
prev_days = 25
|
||||||
|
|
||||||
## get flight commands file (generated by mvn_log_viewer)
|
## get flight commands file (generated by mvn_log_viewer)
|
||||||
## Translate to human-readeble format and take temperatures from flight commands file
|
## Translate to human-readeble format and take temperatures from flight commands file
|
||||||
cmd_list, temperature_list = asotr.get_cmd_data(fname)
|
cmd_list, temperature_list = asotr.get_cmd_data(fname_cmd_flight)
|
||||||
with open('./data/cmd_human.csv', 'w') as file:
|
with open('./data/cmd_human.csv', 'w') as file:
|
||||||
for elem in cmd_list:
|
for elem in cmd_list:
|
||||||
file.write(f'{elem}\n')
|
file.write(f'{elem}\n')
|
||||||
@ -26,22 +28,18 @@ with open(fname_cmd_temp, 'w') as file:
|
|||||||
## insert temperatures from flight commands file to main asotr temperatures data files
|
## insert temperatures from flight commands file to main asotr temperatures data files
|
||||||
df_asotr_ = asotr.insert_temp_data_from_flight_cmd(fname_cmd_temp, path_data)
|
df_asotr_ = asotr.insert_temp_data_from_flight_cmd(fname_cmd_temp, path_data)
|
||||||
|
|
||||||
## form timestamp file where minimum of temperatures were registered
|
## form timestamp file where minimum of temperatures registered
|
||||||
end_date = ''
|
end_date = ''
|
||||||
for i, data in enumerate(df_asotr_):
|
for i, data in enumerate(df_asotr_):
|
||||||
end_date = data['timestamp'].iloc[len(data) - 1][0:18]
|
end_date = data['timestamp'].iloc[len(data) - 1][0:18]
|
||||||
data.to_csv(f'./data/asotr0{i+1}_data_T.csv', index=False, sep=';',
|
data.to_csv(f'./data/asotr0{i+1}_data_T.csv', index=False, sep=';',
|
||||||
encoding='utf-8-sig', decimal='.')
|
encoding='utf-8-sig', decimal='.')
|
||||||
|
|
||||||
timeformat = '%d.%m.%Y %H:%M:%S'
|
|
||||||
prev_days = 14
|
|
||||||
|
|
||||||
delta_date = datetime.strptime(end_date, timeformat) - timedelta(days=prev_days)
|
delta_date = datetime.strptime(end_date, timeformat) - timedelta(days=prev_days)
|
||||||
start_date = delta_date.strftime(timeformat)
|
start_date = delta_date.strftime(timeformat)
|
||||||
|
|
||||||
for kit in range(1,3):
|
for kit in range(1,3):
|
||||||
asotr_kit = f'0{kit}'
|
asotr_kit = f'0{kit}'
|
||||||
print(asotr_kit)
|
|
||||||
|
|
||||||
_, data_dict = asotr.get_data(path_data, asotr_kit, start_date, end_date, 'minutes')
|
_, data_dict = asotr.get_data(path_data, asotr_kit, start_date, end_date, 'minutes')
|
||||||
|
|
||||||
@ -59,12 +57,14 @@ for kit in range(1,3):
|
|||||||
|
|
||||||
min_temp_ch.append(min_temp_period)
|
min_temp_ch.append(min_temp_period)
|
||||||
|
|
||||||
|
fname = f'./data/asotr{asotr_kit}_min_T.csv'
|
||||||
|
|
||||||
df = pd.DataFrame(min_temp_ch).transpose()
|
df = pd.DataFrame(min_temp_ch).transpose()
|
||||||
df.to_csv(f'./data/asotr{asotr_kit}_min_T.csv', header=False, index=False, sep=';',
|
df.to_csv(fname, header=False, index=False, sep=';',
|
||||||
encoding='utf-8-sig', decimal='.')
|
encoding='utf-8-sig', decimal='.')
|
||||||
df1 = pd.read_csv(f'./data/asotr{asotr_kit}_min_T.csv', sep=';',
|
df1 = pd.read_csv(fname, sep=';',
|
||||||
names=['ch1','ch2','ch3','ch4','ch5','ch6'])
|
names=['ch1','ch2','ch3','ch4','ch5','ch6'])
|
||||||
df1.to_csv(f'./data/asotr{asotr_kit}_min_T.csv', index=False, sep=';',
|
df1.to_csv(fname, index=False, sep=';',
|
||||||
encoding='utf-8-sig', decimal='.')
|
encoding='utf-8-sig', decimal='.')
|
||||||
|
|
||||||
|
|
||||||
|
@ -186,4 +186,4 @@ if __name__ == '__main__':
|
|||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
|
|
||||||
plot_asotr_borders(args.source, args.channel, args.asotr, args.begin, args.end,
|
plot_asotr_borders(args.source, args.channel, args.asotr, args.begin, args.end,
|
||||||
args.font, args.cmd, args.plot)
|
args.font, args.cmd, show_flag=args.plot)
|
||||||
|
@ -12,11 +12,11 @@ pict_name = 'periods_profile_10042025.png'
|
|||||||
path = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
|
path = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
|
||||||
channel = 'ch1'
|
channel = 'ch1'
|
||||||
asotr_kit = '01'
|
asotr_kit = '01'
|
||||||
start_date = '10.04.2025 04:00:00'
|
start_date = '24.04.2025 22:30:00'
|
||||||
end_date = '10.04.2025 12:00:00'
|
end_date = '25.04.2025 02:00:00'
|
||||||
# start_date = '06.01.2025 22:40:00'
|
# start_date = '06.01.2025 22:40:00'
|
||||||
# end_date = '21.01.2025 01:20:00'
|
# end_date = '21.01.2025 01:20:00'
|
||||||
shift = True
|
shift = False
|
||||||
|
|
||||||
raw_data, data_dict = asotr.get_data(path, asotr_kit, start_date, end_date, 'minutes')
|
raw_data, data_dict = asotr.get_data(path, asotr_kit, start_date, end_date, 'minutes')
|
||||||
|
|
||||||
|
@ -8,12 +8,12 @@ from datetime import datetime, timedelta
|
|||||||
|
|
||||||
path = './data/experiments/'
|
path = './data/experiments/'
|
||||||
|
|
||||||
timestamp = '03.05.2025 01:27:00'
|
timestamp = '04.05.2025 00:42:00'
|
||||||
cyclogram_file = 'cyclogram_step_ident_ch1.xls'
|
cyclogram_file = 'cyclogram_step_ident_ch3.xls'
|
||||||
asotr.insert_data_cyclo(timestamp, cyclogram_file, path)
|
asotr.insert_data_cyclo(timestamp, cyclogram_file, path)
|
||||||
|
|
||||||
timestamp = '03.05.2025 04:33:00'
|
timestamp = '04.05.2025 03:48:00'
|
||||||
cyclogram_file = 'cyclogram_imp_ident_ch1.xls'
|
cyclogram_file = 'cyclogram_imp_ident_ch3.xls'
|
||||||
asotr.insert_data_cyclo(timestamp, cyclogram_file, path)
|
asotr.insert_data_cyclo(timestamp, cyclogram_file, path)
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,8 +12,8 @@ from datetime import timedelta
|
|||||||
path = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
|
path = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
|
||||||
channel = 'ch1'
|
channel = 'ch1'
|
||||||
asotr_kit = '01'
|
asotr_kit = '01'
|
||||||
start_date = '22.04.2025 00:00:00'
|
start_date = '25.04.2025 00:00:00'
|
||||||
end_date = '23.04.2025 01:20:00'
|
end_date = '25.04.2025 08:00:00'
|
||||||
forecast_days = 20
|
forecast_days = 20
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user