1. add README for windows. 2. rewrite some python scripts and bat-files according to windows requirements. 3. Create tm_brd_parser.py and tm_wheel_parser.py for demonstration how to work with data in python
This commit is contained in:
@@ -232,12 +232,10 @@ def get_data(path, asotr_kit, start_date, end_date, time_accuracy):
|
||||
|
||||
fname = [path + fname_temp, path + fname_tempSet, path + fname_pow]
|
||||
|
||||
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
|
||||
|
||||
try:
|
||||
data = [ pd.read_csv(fname[0], sep=";", parse_dates=["timestamp"], date_parser=dateparse),
|
||||
pd.read_csv(fname[1], sep=";", parse_dates=["timestamp"], date_parser=dateparse),
|
||||
pd.read_csv(fname[2], sep=";", parse_dates=["timestamp"], date_parser=dateparse),]
|
||||
data = [ pd.read_csv(fname[0], sep=";", parse_dates=["timestamp"], date_format="%d.%m.%Y %H:%M:%S.%f"),
|
||||
pd.read_csv(fname[1], sep=";", parse_dates=["timestamp"], date_format="%d.%m.%Y %H:%M:%S.%f"),
|
||||
pd.read_csv(fname[2], sep=";", parse_dates=["timestamp"], date_format="%d.%m.%Y %H:%M:%S.%f"),]
|
||||
except FileNotFoundError:
|
||||
print(f'Error opening file: one (or all) file not found in directory: \n{fname}')
|
||||
return
|
||||
|
@@ -12,10 +12,10 @@ copy "..\asotr_csv\target\release\asotr_csv.exe" .\
|
||||
set "path_=%~1"
|
||||
|
||||
REM unpacking recursively archive using Python script
|
||||
python recursive_unpack_targz.py "%path_%"
|
||||
python recursive_unpack_targz.py %path_%
|
||||
|
||||
REM run parser
|
||||
asotr_csv.exe -d "%path_%"
|
||||
asotr_csv.exe -d %path_%
|
||||
|
||||
REM plot data
|
||||
python plot_asotr_flight_all.py
|
||||
|
@@ -1,140 +0,0 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
import matplotlib.pyplot as plt
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
tstamp_s = '%d.%m.%Y %H:%M:%S.%f'
|
||||
ox_dtime_format = '%d.%m.%Y %H:%M'
|
||||
|
||||
path_itog_brd_data = '../data/brd_data/'
|
||||
|
||||
class PathFileNotFound(Exception):
|
||||
pass
|
||||
|
||||
def find_required_files(root_dir, pattern):
|
||||
result = []
|
||||
for dirpath, _, filenames in os.walk(root_dir):
|
||||
for filename in filenames:
|
||||
match = re.match(pattern, filename)
|
||||
if match:
|
||||
result.append(dirpath + '/' + filename)
|
||||
|
||||
if len(result) == 0:
|
||||
raise PathFileNotFound(f'error: check that the path is correct ({root_dir}) or files pattern is correct ({pattern})')
|
||||
|
||||
return sorted(result)
|
||||
|
||||
def read_files_into_df(fname_list, column_list, dtype_columns={}):
|
||||
data_itog = pd.DataFrame()
|
||||
epoch_start = pd.Timestamp('2000-01-01')
|
||||
|
||||
for fname in fname_list:
|
||||
data = pd.read_csv(fname, sep=r'\s+', dtype=str)
|
||||
data = data.dropna()
|
||||
data = data[column_list]
|
||||
|
||||
if 'TIME' in column_list:
|
||||
# convert TIME value to human-readable timestamp (sinse epoch 01.01.2000)
|
||||
time = data['TIME'].astype(float)
|
||||
tstamp = epoch_start + pd.to_timedelta(time, unit='s')
|
||||
timestamp = tstamp.dt.strftime(tstamp_s)
|
||||
data['timestamp'] = timestamp
|
||||
|
||||
# clear dataframe rows where time value == 0
|
||||
data['time'] = time
|
||||
data_clear = data.query('time != 0.0')
|
||||
|
||||
data_itog = pd.concat([data_itog, data_clear], ignore_index=True)
|
||||
|
||||
return data_itog
|
||||
|
||||
|
||||
def collect_tm_brd_files(root_dir_tm_data, column_list, column_list_itog):
|
||||
patterns_tm = [r'mvn_tm_brd01_(.*)', r'mvn_tm_brd02_(.*)', r'mvn_tm_brd03_(.*)',
|
||||
r'mvn_tm_brd04_(.*)']
|
||||
|
||||
for pattern in patterns_tm:
|
||||
fname = path_itog_brd_data + pattern[:12] + '.csv'
|
||||
try:
|
||||
found_files = find_required_files(root_dir_tm_data, pattern)
|
||||
data = read_files_into_df(found_files, column_list, dtype_columns={11: float})
|
||||
except KeyError as e:
|
||||
print(f'error in collect_tm_brd_files: the specified column name was not found in the data file (path: {root_dir_tm_data}) ({e})')
|
||||
break
|
||||
except Exception as e:
|
||||
print(f'error in collect_tm_brd_files: {e}')
|
||||
break
|
||||
|
||||
data.to_csv(fname, index=False, sep=';', columns=column_list_itog, encoding='utf-8-sig')
|
||||
print('data saved: ' + fname)
|
||||
|
||||
|
||||
def collect_tm_brd_wheel_data(root_dir_wheel_data, column_list, column_list_itog):
|
||||
patterns_wheel = [r'mvn_wheel_brd01_(.*)', r'mvn_wheel_brd02_(.*)', r'mvn_wheel_brd03_(.*)',
|
||||
r'mvn_wheel_brd04_(.*)']
|
||||
|
||||
for pattern in patterns_wheel:
|
||||
fname = path_itog_brd_data + pattern[:15] + '.csv'
|
||||
try:
|
||||
found_files = find_required_files(root_dir_wheel_data, pattern)
|
||||
data = read_files_into_df(found_files, column_list, dtype_columns={0: float, 1: int})
|
||||
except KeyError as e:
|
||||
print(f'error in collect_tm_brd_wheel_data: the specified column name was not found in the data file (path: {root_dir_tm_data}) ({e})')
|
||||
break
|
||||
except Exception as e:
|
||||
print(f'error in collect_tm_brd_wheel_data: {e}')
|
||||
break
|
||||
|
||||
mask = data['STATE'] == '0'
|
||||
data = data[mask]
|
||||
data.to_csv(fname, index=False, sep=';', columns=column_list_itog, encoding='utf-8-sig')
|
||||
print('data saved: ' + fname)
|
||||
|
||||
|
||||
### collect raw tm brd data into one file for each brd ###
|
||||
|
||||
root_dir_tm_data = '/home/danila/Danila/work/MVN/flight/brd_data/arch_for_MB/archive_tm_data_txt/'
|
||||
column_list = ['TIME', 'PER_1Hz', 'ST_HV']
|
||||
column_list_itog = ['TIME', 'timestamp', 'PER_1Hz', 'ST_HV']
|
||||
|
||||
collect_tm_brd_files(root_dir_tm_data, column_list, column_list_itog)
|
||||
|
||||
|
||||
### collect raw tm wheel data into one file for each brd ###
|
||||
|
||||
root_dir_wheel_data = '/home/danila/Danila/work/MVN/flight/brd_data/arch_for_MB/archive_wheel_data_txt/'
|
||||
column_list = ['TIME', 'STATE']
|
||||
column_list_itog = ['TIME', 'timestamp', 'STATE']
|
||||
|
||||
collect_tm_brd_wheel_data(root_dir_wheel_data, column_list, column_list_itog)
|
||||
|
||||
|
||||
## plot 'evolution' 1 Hz from tm brd data
|
||||
|
||||
fname = path_itog_brd_data + 'mvn_tm_brd01.csv'
|
||||
dateparse = lambda x: datetime.strptime(x, tstamp_s)
|
||||
df = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_parser=dateparse)
|
||||
|
||||
plt.plot(df['timestamp'], df['PER_1Hz'], '.')
|
||||
plt.show()
|
||||
|
||||
|
||||
## parse and plot wheel csv data
|
||||
|
||||
border_clr_wheel = 2
|
||||
fname = path_itog_brd_data + 'mvn_wheel_brd01.csv'
|
||||
wheel_df = pd.read_csv(fname, sep=';')
|
||||
wheel_df['TIME_diff'] = wheel_df['TIME'].diff()
|
||||
median_tdiff = wheel_df['TIME_diff'].median()
|
||||
|
||||
wheel_df_clear = wheel_df[(wheel_df['TIME_diff'] > median_tdiff - border_clr_wheel) &
|
||||
(wheel_df['TIME_diff'] < median_tdiff + border_clr_wheel)]
|
||||
|
||||
wheel_df_peaks = wheel_df[(wheel_df['TIME_diff'] <= median_tdiff - border_clr_wheel) |
|
||||
(wheel_df['TIME_diff'] >= median_tdiff + border_clr_wheel)]
|
||||
|
||||
|
||||
plt.plot(wheel_df_clear['TIME'], wheel_df_clear['TIME_diff'])
|
||||
plt.show()
|
@@ -31,21 +31,18 @@ width=[1, 1, 1, 1, 1, 1]
|
||||
marker = ['-', '-', '-', '-', '--', '-'];
|
||||
width_arr = [1, 0.5, 0.2, 0.1, 1, 1]
|
||||
|
||||
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
|
||||
dparse_b = lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
|
||||
|
||||
data_b = pd.read_excel(fname_B,
|
||||
sheet_name=0,
|
||||
usecols=[0,1,2],
|
||||
header=4,
|
||||
names=['turn_num', 'beta_angle', 'timestamp'],
|
||||
parse_dates=['timestamp'],
|
||||
date_parser=dparse_b)
|
||||
date_format='%Y-%m-%d %H:%M:%S')
|
||||
|
||||
|
||||
fname = [path + fname, path + fname_pow]
|
||||
data = [pd.read_csv(fname[0], sep=';', parse_dates=['timestamp'], date_parser=dateparse),
|
||||
pd.read_csv(fname[1], sep=';', parse_dates=['timestamp'], date_parser=dateparse)]
|
||||
data = [pd.read_csv(fname[0], sep=';', parse_dates=['timestamp'], date_format="%d.%m.%Y %H:%M:%S.%f"),
|
||||
pd.read_csv(fname[1], sep=';', parse_dates=['timestamp'], date_format="%d.%m.%Y %H:%M:%S.%f")]
|
||||
|
||||
ch= [[], [], [], [], [], []]
|
||||
ch_signs = ["temp", "pow"]
|
||||
@@ -80,7 +77,7 @@ if plot_windows == 1:
|
||||
|
||||
ax.tick_params(axis="both", width=1, labelsize=font)
|
||||
ax.grid(visible=True, linestyle = 'dotted')
|
||||
ax.set_ylabel('Температура, $^\circ$C', fontsize=font)
|
||||
ax.set_ylabel(r"Температура, $^\circ$C", fontsize=font)
|
||||
ax.set_xlabel('Время', fontsize=font)
|
||||
ax.legend(fontsize=font)
|
||||
|
||||
@@ -117,7 +114,7 @@ elif plot_windows == 2:
|
||||
|
||||
ax1.tick_params(axis="both", width=1, labelsize=font)
|
||||
ax1.grid(visible=True, linestyle = 'dotted')
|
||||
ax1.set_ylabel('Температура, $^\circ$C', fontsize=font)
|
||||
ax1.set_ylabel(r"Температура, $^\circ$C", fontsize=font)
|
||||
ax1.set_xlabel('Время', fontsize=font)
|
||||
ax1.legend(fontsize=font, loc='lower right')
|
||||
|
||||
|
@@ -6,16 +6,16 @@ IF "%~2"=="" (
|
||||
goto :EOF
|
||||
)
|
||||
|
||||
set "path_csv_data=..\data\asotr"
|
||||
set "path_csv_data=..\data\asotr\"
|
||||
set "begin=%~1"
|
||||
set "end=%~2"
|
||||
|
||||
REM run Python-script with parameters
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 111100 -a 01 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 001000 -a 01 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 000011 -a 01 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 111100 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 010100 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 010000 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 000100 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s "%path_csv_data%" -c 000011 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 111100 -a 01 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 001000 -a 01 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 000011 -a 01 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 111100 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 010100 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 010000 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 000100 -a 02 -b %begin% -e %end%
|
||||
python plot_flight_borders.py -s %path_csv_data% -c 000011 -a 02 -b %begin% -e %end%
|
||||
|
@@ -15,7 +15,7 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
|
||||
plot_windows = 2
|
||||
|
||||
channels = list(map(int, ch))
|
||||
pict_name = (f'../plots/reports/ASOTR{asotr_kit}_flight_T_P_{asotr.convert_to_str(channels)}_{begin[0:5].replace(".", "")}_{end[0:5].replace(".", "")}_{end[6:]}.png')
|
||||
pict_name = (f'../plots/reports/ASOTR{asotr_kit}_flight_T_P_{asotr.convert_to_str(channels)}_{begin[0:5].replace(".", "")}_{end[0:5].replace(".", "")}.png')
|
||||
|
||||
plot_task = {"temp": 1, "temp_set": 1, "pow": 1}
|
||||
ox_dtime_format = "%d.%m.%Y"
|
||||
@@ -57,7 +57,7 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
|
||||
|
||||
ax.tick_params(axis="both", width=1, labelsize=font)
|
||||
ax.grid(visible=True, linestyle="dotted")
|
||||
ax.set_ylabel("Температура, $^\circ$C", fontsize=font)
|
||||
ax.set_ylabel(r"Температура, $^\circ$C", fontsize=font)
|
||||
ax.set_xlabel("Время", fontsize=font)
|
||||
ax.legend(fontsize=font)
|
||||
|
||||
@@ -131,7 +131,7 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
|
||||
|
||||
ax1.tick_params(axis="both", width=1, labelsize=font)
|
||||
ax1.grid(visible=True, linestyle="dotted")
|
||||
ax1.set_ylabel("Температура, $^\circ$C", fontsize=font)
|
||||
ax1.set_ylabel(r"Температура, $^\circ$C", fontsize=font)
|
||||
ax1.set_xlabel("Время", fontsize=font)
|
||||
ax1.legend(fontsize=font)
|
||||
|
||||
|
101
bin/tm_brd_parser.py
Normal file
101
bin/tm_brd_parser.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
import matplotlib.pyplot as plt
|
||||
from datetime import datetime, timedelta
|
||||
import sys
|
||||
|
||||
tstamp_s = '%d.%m.%Y %H:%M:%S.%f'
|
||||
ox_dtime_format = '%d.%m.%Y %H:%M'
|
||||
|
||||
path_itog_brd_data = '../data/brd_data/'
|
||||
|
||||
|
||||
class PathFileNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def find_required_files(root_dir, pattern):
|
||||
result = []
|
||||
for dirpath, _, filenames in os.walk(root_dir):
|
||||
for filename in filenames:
|
||||
match = re.match(pattern, filename)
|
||||
if match:
|
||||
result.append(dirpath + '/' + filename)
|
||||
|
||||
if len(result) == 0:
|
||||
raise PathFileNotFound(
|
||||
f'error: check that the path is correct ({root_dir}) or files pattern is correct ({pattern})')
|
||||
|
||||
return sorted(result)
|
||||
|
||||
|
||||
def read_files_into_df(fname_list, column_list, dtype_columns={}):
|
||||
data_itog = pd.DataFrame()
|
||||
epoch_start = pd.Timestamp('2000-01-01')
|
||||
|
||||
for fname in fname_list:
|
||||
data = pd.read_csv(fname, sep=r'\s+', dtype=str)
|
||||
data = data.dropna()
|
||||
data = data[column_list]
|
||||
|
||||
if 'TIME' in column_list:
|
||||
# convert TIME value to human-readable timestamp (sinse epoch 01.01.2000)
|
||||
time = data['TIME'].astype(float)
|
||||
tstamp = epoch_start + pd.to_timedelta(time, unit='s')
|
||||
timestamp = tstamp.dt.strftime(tstamp_s)
|
||||
data['timestamp'] = timestamp
|
||||
|
||||
# clear dataframe rows where time value == 0
|
||||
data['time'] = time
|
||||
data_clear = data.query('time != 0.0')
|
||||
|
||||
data_itog = pd.concat([data_itog, data_clear], ignore_index=True)
|
||||
|
||||
return data_itog
|
||||
|
||||
|
||||
def collect_tm_brd_files(root_dir_tm_data, column_list, column_list_itog):
|
||||
patterns_tm = [r'mvn_tm_brd01_(.*)', r'mvn_tm_brd02_(.*)', r'mvn_tm_brd03_(.*)',
|
||||
r'mvn_tm_brd04_(.*)']
|
||||
|
||||
for pattern in patterns_tm:
|
||||
fname = path_itog_brd_data + pattern[:12] + '.csv'
|
||||
try:
|
||||
found_files = find_required_files(root_dir_tm_data, pattern)
|
||||
data = read_files_into_df(found_files, column_list, dtype_columns={11: float})
|
||||
except KeyError as e:
|
||||
print(
|
||||
f'error in collect_tm_brd_files: the specified column name was not found in the data file (path: {root_dir_tm_data}) ({e})')
|
||||
break
|
||||
except Exception as e:
|
||||
print(f'error in collect_tm_brd_files: {e}')
|
||||
break
|
||||
|
||||
data.to_csv(fname, index=False, sep=';', columns=column_list_itog, encoding='utf-8-sig')
|
||||
print('data saved: ' + fname)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python tm_brd_parser.py /path/to/tm_brd_data/")
|
||||
else:
|
||||
root_dir_tm_data = sys.argv[1]
|
||||
|
||||
print('collect raw brd tm data into one file for each brd')
|
||||
|
||||
column_list = ['TIME', 'PER_1Hz', 'ST_HV']
|
||||
column_list_itog = ['TIME', 'timestamp', 'PER_1Hz', 'ST_HV']
|
||||
|
||||
collect_tm_brd_files(root_dir_tm_data, column_list, column_list_itog)
|
||||
|
||||
## plot 'evolution' 1 Hz from tm brd data
|
||||
print('plot evolution 1 Hz from tm brd data')
|
||||
|
||||
fname = path_itog_brd_data + 'mvn_tm_brd01.csv'
|
||||
df = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_format="%d.%m.%Y %H:%M:%S.%f")
|
||||
|
||||
plt.plot(df['timestamp'], df['PER_1Hz'], '.')
|
||||
plt.show()
|
114
bin/tm_wheel_parser.py
Normal file
114
bin/tm_wheel_parser.py
Normal file
@@ -0,0 +1,114 @@
|
||||
import pandas as pd
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
import matplotlib.pyplot as plt
|
||||
from datetime import datetime, timedelta
|
||||
import sys
|
||||
|
||||
tstamp_s = '%d.%m.%Y %H:%M:%S.%f'
|
||||
ox_dtime_format = '%d.%m.%Y %H:%M'
|
||||
|
||||
path_itog_brd_data = '../data/brd_data/'
|
||||
|
||||
|
||||
class PathFileNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def find_required_files(root_dir, pattern):
|
||||
result = []
|
||||
for dirpath, _, filenames in os.walk(root_dir):
|
||||
for filename in filenames:
|
||||
match = re.match(pattern, filename)
|
||||
if match:
|
||||
result.append(dirpath + '/' + filename)
|
||||
|
||||
if len(result) == 0:
|
||||
raise PathFileNotFound(
|
||||
f'error: check that the path is correct ({root_dir}) or files pattern is correct ({pattern})')
|
||||
|
||||
return sorted(result)
|
||||
|
||||
|
||||
def read_files_into_df(fname_list, column_list, dtype_columns={}):
|
||||
data_itog = pd.DataFrame()
|
||||
epoch_start = pd.Timestamp('2000-01-01')
|
||||
|
||||
for fname in fname_list:
|
||||
data = pd.read_csv(fname, sep=r'\s+', dtype=str)
|
||||
data = data.dropna()
|
||||
data = data[column_list]
|
||||
|
||||
if 'TIME' in column_list:
|
||||
# convert TIME value to human-readable timestamp (sinse epoch 01.01.2000)
|
||||
time = data['TIME'].astype(float)
|
||||
tstamp = epoch_start + pd.to_timedelta(time, unit='s')
|
||||
timestamp = tstamp.dt.strftime(tstamp_s)
|
||||
data['timestamp'] = timestamp
|
||||
|
||||
# clear dataframe rows where time value == 0
|
||||
data['time'] = time
|
||||
data_clear = data.query('time != 0.0')
|
||||
|
||||
data_itog = pd.concat([data_itog, data_clear], ignore_index=True)
|
||||
|
||||
return data_itog
|
||||
|
||||
|
||||
def collect_tm_brd_wheel_data(root_dir_wheel_data, column_list, column_list_itog):
|
||||
patterns_wheel = [r'mvn_wheel_brd01_(.*)', r'mvn_wheel_brd02_(.*)', r'mvn_wheel_brd03_(.*)',
|
||||
r'mvn_wheel_brd04_(.*)']
|
||||
|
||||
for pattern in patterns_wheel:
|
||||
fname = path_itog_brd_data + pattern[:15] + '.csv'
|
||||
try:
|
||||
found_files = find_required_files(root_dir_wheel_data, pattern)
|
||||
data = read_files_into_df(found_files, column_list, dtype_columns={0: float, 1: int})
|
||||
except KeyError as e:
|
||||
print(
|
||||
f'error in collect_tm_brd_wheel_data: the specified column name was not found in the data file (path: {root_dir_tm_data}) ({e})')
|
||||
break
|
||||
except Exception as e:
|
||||
print(f'error in collect_tm_brd_wheel_data: {e}')
|
||||
break
|
||||
|
||||
mask = data['STATE'] == '0'
|
||||
data = data[mask]
|
||||
data.to_csv(fname, index=False, sep=';', columns=column_list_itog, encoding='utf-8-sig')
|
||||
print('data saved: ' + fname)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python tm_wheel_parser.py /path/to/tm_brd_data/")
|
||||
else:
|
||||
root_dir_wheel_data = sys.argv[1]
|
||||
|
||||
### collect raw tm wheel data into one file for each brd ###
|
||||
print('collect raw tm wheel data into one file for each brd')
|
||||
|
||||
column_list = ['TIME', 'STATE']
|
||||
column_list_itog = ['TIME', 'timestamp', 'STATE']
|
||||
|
||||
collect_tm_brd_wheel_data(root_dir_wheel_data, column_list, column_list_itog)
|
||||
|
||||
## parse and plot wheel csv data
|
||||
print('parse and plot wheel csv data')
|
||||
|
||||
border_clr_wheel = 2
|
||||
fname = path_itog_brd_data + 'mvn_wheel_brd01.csv'
|
||||
wheel_df = pd.read_csv(fname, sep=';')
|
||||
wheel_df['TIME_diff'] = wheel_df['TIME'].diff()
|
||||
median_tdiff = wheel_df['TIME_diff'].median()
|
||||
|
||||
wheel_df_clear = wheel_df[(wheel_df['TIME_diff'] > median_tdiff - border_clr_wheel) &
|
||||
(wheel_df['TIME_diff'] < median_tdiff + border_clr_wheel)]
|
||||
|
||||
wheel_df_peaks = wheel_df[(wheel_df['TIME_diff'] <= median_tdiff - border_clr_wheel) |
|
||||
(wheel_df['TIME_diff'] >= median_tdiff + border_clr_wheel)]
|
||||
|
||||
plt.plot(wheel_df_clear['TIME'], wheel_df_clear['TIME_diff'], '-')
|
||||
plt.plot(wheel_df_peaks['TIME'], wheel_df_peaks['TIME_diff'], '.')
|
||||
plt.show()
|
Reference in New Issue
Block a user