project reorganization: 1. executable files in bin directory now. 2. add recursive_unpack_targz.py for recursive unpacking specified in this script archives tar.gz with MVN data. 3. add asotr_unzip_plot.sh bash file for unpacking MVN data, collect asotr data into csv files and plot asotr MVN data. 4. add brd_wheel_1Hz_parser.py for demonstrate how to work with brd telemetry data

This commit is contained in:
Danila Gamkov 2025-06-06 10:54:25 +03:00
parent 2f37a7329b
commit b04009ad27
34 changed files with 2151 additions and 138 deletions

7
.gitignore vendored
View File

@ -7,4 +7,9 @@
*.txt
*.xls
*.xlsx
/__pycache__
*.csv#
*.doc
*.docx
/bin/__pycache__
/asotr_csv/target/debug
/asotr_csv/target/release

560
asotr_csv/Cargo.lock generated Normal file
View File

@ -0,0 +1,560 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
dependencies = [
"memchr",
]
[[package]]
name = "android-tzdata"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
[[package]]
name = "android_system_properties"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
dependencies = [
"libc",
]
[[package]]
name = "anstream"
version = "0.6.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
[[package]]
name = "anstyle-parse"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e"
dependencies = [
"anstyle",
"once_cell",
"windows-sys",
]
[[package]]
name = "asotr_csv"
version = "0.1.0"
dependencies = [
"byteorder",
"chrono",
"clap",
"lazy_static",
"regex",
"strum",
"walkdir",
]
[[package]]
name = "autocfg"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
[[package]]
name = "bumpalo"
version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "byteorder"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "cc"
version = "1.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chrono"
version = "0.4.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
dependencies = [
"android-tzdata",
"iana-time-zone",
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-targets",
]
[[package]]
name = "clap"
version = "4.5.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796"
dependencies = [
"clap_builder",
"clap_derive",
]
[[package]]
name = "clap_builder"
version = "4.5.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b26884eb4b57140e4d2d93652abfa49498b938b3c9179f9fc487b0acc3edad7"
dependencies = [
"anstream",
"anstyle",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_derive"
version = "4.5.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "clap_lex"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "colorchoice"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "core-foundation-sys"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "iana-time-zone"
version = "0.1.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
"windows-core",
]
[[package]]
name = "iana-time-zone-haiku"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
dependencies = [
"cc",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "js-sys"
version = "0.3.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
dependencies = [
"once_cell",
"wasm-bindgen",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
dependencies = [
"spin",
]
[[package]]
name = "libc"
version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "log"
version = "0.4.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "num-traits"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
"autocfg",
]
[[package]]
name = "once_cell"
version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "proc-macro2"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rustversion"
version = "1.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4"
[[package]]
name = "same-file"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
dependencies = [
"winapi-util",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "spin"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
[[package]]
name = "strsim"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "strum"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06"
dependencies = [
"strum_macros",
]
[[package]]
name = "strum_macros"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn",
]
[[package]]
name = "syn"
version = "2.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "utf8parse"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "walkdir"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
dependencies = [
"same-file",
"winapi-util",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
dependencies = [
"cfg-if",
"once_cell",
"rustversion",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
dependencies = [
"bumpalo",
"log",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
dependencies = [
"unicode-ident",
]
[[package]]
name = "winapi-util"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys",
]
[[package]]
name = "windows-core"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"

15
asotr_csv/Cargo.toml Normal file
View File

@ -0,0 +1,15 @@
[package]
name = "asotr_csv"
version = "0.1.0"
edition = "2021"
authors = ["Danila Gamkov <danila_gamkov@cosmos.ru"]
description = "The parser for converting data from the ASOTR MVN control channels into the CSV format (see files in asotr.tar.gz)"
[dependencies]
byteorder = "1.4.3"
chrono = "0.4"
strum = { version = "0.26", features = ["derive"] }
clap = { version = "4.*", features = ["derive"] }
lazy_static = { version = "1.5.0", features = ["spin_no_std"] }
regex = "1.7.0"
walkdir = "2.3.2"

122
asotr_csv/README.markdown Normal file
View File

@ -0,0 +1,122 @@
# asotr_csv
The parser for converting data from the ASOTR MVN control channels into the CSV format
## Contents
- **Setup**
- **Using**
- parsing ASOTR MVN data files in specified directory
- parsing all ASOTR MVN data files in specified directory and subdirectories
- plot data in python
- **Output asotr_csv data files description**
- **Contacts**
**Note**: \<PATH_TO_ASOTR_CSV\> - path where is asotr_csv program is cloned from heagit
## Setup
1. Install Rust compiler (if you don't have).
Installation on Linux:
```
curl --proto '=https' --tlsv1.2 https://sh.rustup.rs -sSf | sh
```
Installation on Windows:
Go to address https://www.rust-lang.org/tools/install and follow instructions
For more detailed information you can go to: https://doc.rust-lang.ru/book/ch01-01-installation.html
**Instruction for setup asotr_csv project**
2. Clone the repo to your computer:
```
git clone http://heagit.cosmos.ru/gamkov/asotr_csv.git
```
3. Enter the repo and compile it:
```
cd <PATH_TO_ASOTR_CSV>
cargo build --release
```
After running this commands you will get an execution file (asotr_csv) in the following directory:
\<PATH_TO_ASOTR_CSV\>/target/release/
## Using
### Parsing ASOTR MVN data files in specified directory
1. Donwload data from science data server to directory \<PATH_TO_ASOTR_DATA\>.
If you don't have MVN data, you might download it from server with science SRG data (IP: 193.232.11.95).
For questions about downloading science data contact Shtykovsky A. (a.shtykovsky@cosmos.ru) or Chelovekov I. (chelovekov@cosmos.ru)
2. Run linux bash script **asotr_unzip.sh** for directory with MVN data in order to unpack **asotr.tar.gz** archive with ASOTR MVN data, for example:
```
cd <PATH_TO_ASOTR_CSV>
./asotr_unzip.sh <PATH_TO_ASOTR_DATA>/20241231-001
```
**Note**: the script **asotr_unzip.sh** will not work on windows, you will need to unpack the archive **\<PATH_TO_ASOTR_DATA\>/20241231-001/data/asotr.tar.gz** manually or write the corresponding Windows bat-file
3. Run program asotr_csv:
```
cd <PATH_TO_ASOTR_CSV>/target/release/
./asotr_csv -d <PATH_TO_ASOTR_DATA>/20241231-001
```
csv data are ready to use in directory:
\<PATH_TO_ASOTR_CSV\>/target/release/
### Parsing all ASOTR MVN data files in specified directory and subdirectories
1. Donwload data from science data server to directory \<PATH_TO_ASOTR_DATA\>.
If you don't have MVN data, you might download it from server with science SRG data (IP: 193.232.11.95).
For questions about downloading science data contact Shtykovsky A. (a.shtykovsky@cosmos.ru) or Chelovekov I. (chelovekov@cosmos.ru)
2. Run linux bash script **asotr_all_unzip.sh** for directory with MVN data in order to unpack all **asotr.tar.gz** archives with ASOTR MVN data, for example:
```
cd <PATH_TO_ASOTR_CSV>
./asotr_all_unzip.sh <PATH_TO_ASOTR_DATA>/
```
**Note**: the script **asotr_all_unzip.sh** will not work on windows, you will need to unpack the each archive **\<PATH_TO_ASOTR_DATA\>/\<DIRECTORY_WITH_DATA\>/data/asotr.tar.gz** manually or write the corresponding Windows bat-file
If you want to parse astor data in specified directory, run program asotr_csv directly:
```
cd <PATH_TO_ASOTR_CSV>/target/release/
./asotr_csv -d <PATH_TO_ASOTR_DATA>
```
Or if you want to parse all raw data from ASOTR into csv files and plot csv data you might use shell script:
```
cd <PATH_TO_ASOTR_CSV>
./asotr_all_unzip_auto.sh <PATH_TO_ASOTR_DATA>/
```
csv data will be in directory:
\<PATH_TO_ASOTR_CSV\>/data/
### Plot csv data in Python
If you want to parse all raw data from astor into csv files and plot csv data you might use shell script:
```
cd <PATH_TO_ASOTR_CSV>
./asotr_all_unzip_auto.sh
```
or if you already have csv files with ASOTR data, you might use plot script only:
```
cd <PATH_TO_ASOTR_CSV>/data/
python3 plot_flight_all.py
```
## Output asotr_csv data files description
**description:**
asotr01_data_T.csv - ASOTR1 temperature data in channels 1-6 (in Celsius)
asotr01_data_P.csv - ASOTR1 power data in channels 1-6 (in %)
asotr01_data_TSET.csv - ASOTR1 temperature sets in channels 1-6 (in Celsius)
asotr02_data_T.csv - ASOTR2 temperature data in channels 1-6 (in Celsius)
asotr02_data_P.csv - ASOTR2 power data in channels 1-6 (in %)
asotr02_data_TSET.csv - ASOTR2 temperature sets in channels 1-6 (in Celsius)
**file data csv fromat:**
column 1: Unix timestamp in seconds
column 2: timestamp (data and time)
columns 3-8 - data from control channels (power, temperature or temperature set)
## Contatcs
For questions about the program, please contact Danila Gamkov, email: danila_gamkov@cosmos.ru

View File

@ -0,0 +1,16 @@
#! /bin/bash
if [ $# != 1 ]
then
echo "erorr use $0. Right use this script: "
echo "$0 path"
else
cp ../asotr_csv/target/release/asotr_csv ../data/asotr
path_=$1
find ${path_} -maxdepth 1 -type d | xargs -I {} ./asotr_unzip.sh {}
cd ../data/asotr
./asotr_csv -d ${path_}
python3 ../../bin/plot_asotr_flight_all.py
fi

30
asotr_csv/data/prepare_csv.sh Executable file
View File

@ -0,0 +1,30 @@
#! /bin/bash
if [ $# != 2 ]
then
echo "error use $0. Right use this script: "
echo "$0 path_to_file data_type (flight or KDI)"
echo "example 1: $0 ./data/flight/30_12_2024/ASOTR_1_SOTR_T flight"
else
data_file=$1
data_type=$2
if [ "$data_type" == "flight" ]
then
cat ${data_file}.csv | grep -Eo '[0-9]{2}\.[0-9]{2}\.[0-9]{4}' > file1
cat ${data_file}.csv | grep -Eo [0-9]{2}:.* > file2
elif [ "$data_type" == "KDI" ]
then
cat ${data_file}.csv | grep -Eo [0-9]{2}.[0-9]{2}.[0-9]{4} > file1
cat ${data_file}.csv | grep -Eo [0-9]{2}:.* > file2
else
echo "error argument of data_type: write \"flight\" or \"KDI\" in second argument"
exit 1
fi
paste --delimiter=' ' file1 file2 > file.csv
echo "timestamp;ch1;ch2;ch3;ch4;ch5;ch6" > ${data_file}_clear.csv
cat file.csv >> ${data_file}_clear.csv
rm file1 file2 file.csv
fi

355
asotr_csv/src/main.rs Normal file
View File

@ -0,0 +1,355 @@
use clap::{Parser};
pub mod asotr_data {
use std::{fs::File, io::Read};
use byteorder::{LittleEndian, ReadBytesExt};
use chrono::{DateTime, Utc};
use std::time::{SystemTime, UNIX_EPOCH, Duration};
use strum::FromRepr;
use lazy_static::lazy_static;
use regex::Regex;
use walkdir::WalkDir;
lazy_static! {
pub static ref support_dtypes: String =
String::from(".data01.asotr01(02), data02.asotr01(02), data06.asotr01(02)");
pub static ref patterns_fnames_csv_data: Vec<(String, String)> = {
let mut patterns: Vec<(String, String)> = Vec::new();
patterns.push((String::from(".*data01.asotr01"),
String::from("../data/asotr/asotr01_data_T.csv")));
patterns.push((String::from(".*data02.asotr01"),
String::from("../data/asotr/asotr01_data_P.csv")));
patterns.push((String::from(".*data06.asotr01"),
String::from("../data/asotr/asotr01_data_TSET.csv")));
patterns.push((String::from(".*data01.asotr02"),
String::from("../data/asotr/asotr02_data_T.csv")));
patterns.push((String::from(".*data02.asotr02"),
String::from("../data/asotr/asotr02_data_P.csv")));
patterns.push((String::from(".*data06.asotr02"),
String::from("../data/asotr/asotr02_data_TSET.csv")));
patterns
};
pub static ref patterns_disp: Vec<String> = {
let mut patterns: Vec<String> = Vec::new();
patterns.push(String::from("ASOTR01 temperature"));
patterns.push(String::from("ASOTR01 power"));
patterns.push(String::from("ASOTR01 temperature setpoint"));
patterns.push(String::from("ASOTR02 temperature"));
patterns.push(String::from("ASOTR02 power"));
patterns.push(String::from("ASOTR02 temperature setpoint"));
patterns
};
}
#[derive(Debug, FromRepr, PartialEq)]
enum AsotrDataType {
Temp = 1,
Pow = 2,
TempSet = 6,
}
struct AsotrDataDesc {
time_s: u64,
time_mks: u32,
date: String,
time: String,
data_type: AsotrDataType,
// kit: u8,
}
impl AsotrDataDesc {
pub fn new(time_s: u64, time_mks: u32, date: String, time: String,
data_type: AsotrDataType) -> AsotrDataDesc {
AsotrDataDesc { time_s, time_mks, date, time, data_type }
}
}
pub fn read_data(filename_full: String) -> Result<String, String> {
let ch_u16: [u16; 6];
let ch_f32: [f32; 6];
let asotr_head = parse_filename(filename_full.clone())?;
let mut buf = Vec::new();
let mut out = String::new();
let mut data = match File::open(filename_full.clone())
{
Ok(file) => file,
Err(msg) => { return Err(format!("Error opening data file {}: {}", filename_full, msg)) }
};
match data.read_to_end(&mut buf) {
Ok(stat) => stat,
Err(msg) => { return Err(format!("Error reading data file {}: {}", filename_full, msg)) }
};
out.push_str(&format!("{};{} {}.{:02};",
&asotr_head.time_s,
&asotr_head.date,
&asotr_head.time,
asotr_head.time_mks));
if asotr_head.data_type == AsotrDataType::Temp ||
asotr_head.data_type == AsotrDataType::TempSet {
ch_f32 = parse_data_f32(buf)?;
for elem in ch_f32 {
out.push_str(&elem.to_string());
out.push(';');
}
}
else if asotr_head.data_type == AsotrDataType::Pow {
ch_u16 = parse_data_u16(buf)?;
for elem in ch_u16 {
out.push_str(&elem.to_string());
out.push(';');
}
}
out.remove(out.len() - 1);
return Ok(out);
}
pub fn parse_data_dir(dir: &str, disp: bool) -> Result<(), String> {
let mut data: Vec<String> = Vec::new();
println!("parse data from directory: {}", dir);
for (i, (pattern, fname)) in patterns_fnames_csv_data.iter().enumerate() {
let files = find_files_regex(dir, pattern)?;
for elem in files {
data.push(read_data(elem)?);
}
data.sort();
data.dedup();
if disp { disp_data(&data, &patterns_disp[i])?; }
println!("save csv data to file: {}", fname);
save_data_csv(data.clone(), fname)?;
data.clear();
}
return Ok(());
}
fn parse_data_f32(buf: Vec<u8>) -> Result<[f32; 6], String> {
let mut data = &buf[..];
let mut ch: [f32; 6] = [0.0; 6];
for i in 0..6 {
ch[i] = match data.read_f32::<LittleEndian>() {
Ok(val) => val,
Err(msg) => {
return Err(format!(
"Error parsing file: failed parsing float32 data: {}", msg)); }
}
}
return Ok(ch);
}
fn parse_data_u16(buf: Vec<u8>) -> Result<[u16; 6], String> {
let mut data = &buf[..];
let mut ch: [u16; 6] = [0; 6];
for i in 0..6 {
ch[i] = match data.read_u16::<LittleEndian>() {
Ok(val) => val,
Err(msg) => {
return Err(format!(
"Error parsing file: failed parsing uint16 data: {}", msg)); }
}
}
return Ok(ch);
}
fn parse_filename(filename_full: String) -> Result<AsotrDataDesc, String> {
let mut fname = String::new();
let msg_prev = format!("Error parsing filename {}:", filename_full);
match filename_full.rfind('/') {
Some(val) => { fname = (filename_full[val+1..filename_full.len()]).to_string(); }
_ => { fname = filename_full.clone(); }
}
if fname.len() != 32 {
return Err(format!("{} unsupported file", msg_prev));
}
let time_unix_ = fname[0..10].parse::<u64>();
let time_unix = match &time_unix_ {
Ok(data) => data,
Err(msg) => {
return Err(format!("{} expected digits in timestamp sec part ({})",
msg_prev, msg));
}
};
let data_type_ = fname[22..24].parse::<u8>();
let data_type_u8 = match &data_type_ {
Ok(data) => data,
Err(msg) => {
return Err(format!("{} expected digits in data type part ({})",
msg_prev, msg));
}
};
if *data_type_u8 == 1 || *data_type_u8 == 2 || *data_type_u8 == 6 { }
else {
return Err(format!("{} parser supports data types: {}",
msg_prev, support_dtypes.to_string()));
}
let data_type = match AsotrDataType::from_repr(*data_type_u8 as usize) {
Some(value) => value,
_ => return Err(format!("{} expected digits in data type part",
msg_prev))
};
// let _kit = filename[30..32].parse::<u8>();
// let kit = match &_kit {
// Ok(data) => data,
// Err(msg) => { return Err(format!("{}: expected digits in asotr kit part ({})",
// msg_prev, msg)); }
// };
let _time_str_mks = fname[11..14].parse::<u32>();
let time_mks = match &_time_str_mks {
Ok(data) => data,
Err(msg) => { return Err(format!("{}: expected digits in timestamp mks part ({})",
msg_prev, msg)); }
};
let time: SystemTime = UNIX_EPOCH + Duration::from_secs(*time_unix);
let date_time = DateTime::<Utc>::from(time);
let date_s = date_time.format("%d.%m.%Y").to_string();
let time_s = date_time.format("%H:%M:%S").to_string();
let head = AsotrDataDesc::new(*time_unix, *time_mks, date_s, time_s, data_type);
return Ok(head);
}
fn find_files_regex(dir: &str, template: &str) -> Result<Vec<String>, String> {
let mut path_vec: Vec<String> = Vec::new();
let regex = match Regex::new(template) {
Ok(val) => val,
Err(msg) => {
return Err(format!("Error create regex template ({}): {}",
template, msg));
}
};
let data = WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| regex.is_match(e.file_name().to_str().unwrap()))
.map(|e| e.into_path());
for elem in data {
path_vec.push(elem.display().to_string());
}
if path_vec.len() == 0 {
return Err(format!(
"Error searching files for pattern ({}): files not found in directory {}",
template, dir));
}
return Ok(path_vec);
}
fn disp_data(data: &Vec<String>, about: &str) -> Result<(), String> {
println!("{}", about);
println!("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}",
"timestamp_sec", "timestamp",
"ch1", "ch2", "ch3", "ch4", "ch5", "ch6");
for elem in data {
println!("{}", elem.replace(';', "\t"));
}
return Ok(())
}
fn save_data_csv(data: Vec<String>, fname: &str) -> Result<(), String> {
let mut data_w: Vec<String> = Vec::new();
data_w.push(format!("{};{};{};{};{};{};{};{}",
"timestamp_sec", "timestamp",
"ch1", "ch2", "ch3", "ch4", "ch5", "ch6"));
for elem in data {
data_w.push(elem);
}
match std::fs::write(fname, data_w.join("\n")) {
Ok(f) => f,
Err(msg) => {
return Err(format!("Error write data to csv file {}: {}", fname, msg))
}
}
return Ok(());
}
}
#[derive(Parser, Debug)]
#[command(version, author, about, long_about = None)]
struct Cli {
/// file with ASOTR MVN data (.data01.asotr01(02), data02.asotr01(02), data06.asotr01(02))
#[arg(long, short = 'f', value_name = "FILE_DATA")]
filename: Option<String>,
/// directory with ASOTR MVN data
#[arg(long, short = 'd', value_name = "DIRECTORY_DATA")]
directory: Option<String>,
/// show data in console
#[arg(short = 's')]
show: bool,
}
fn main() {
use crate::asotr_data::*;
let cli = Cli::parse();
let show = cli.show;
if let Some(fname) = &cli.filename {
let s = match read_data(fname.clone()) {
Ok(elem) => elem,
Err(msg) => { println!("{}", msg); return; }
};
println!("{}", s);
return;
}
if let Some(dir) = &cli.directory {
match parse_data_dir(&dir.clone(), show) {
Ok(elem) => elem,
Err(msg) => { println!("{}", msg); return; }
}
return;
}
println!("Unexpected command. Type --help for more iformation");
}

View File

@ -0,0 +1 @@
{"rustc_fingerprint":2742313010855374649,"outputs":{"15729799797837862367":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.so\nlib___.so\nlib___.a\nlib___.so\n/home/danila/.rustup/toolchains/stable-x86_64-unknown-linux-gnu\noff\npacked\nunpacked\n___\ndebug_assertions\npanic=\"unwind\"\nproc_macro\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"gnu\"\ntarget_family=\"unix\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_os=\"linux\"\ntarget_pointer_width=\"64\"\ntarget_vendor=\"unknown\"\nunix\n","stderr":""},"4614504638168534921":{"success":true,"status":"","code":0,"stdout":"rustc 1.83.0 (90b35a623 2024-11-26)\nbinary: rustc\ncommit-hash: 90b35a6239c3d8bdabc530a6a0816f7ff89a0aaf\ncommit-date: 2024-11-26\nhost: x86_64-unknown-linux-gnu\nrelease: 1.83.0\nLLVM version: 19.1.1\n","stderr":""}},"successes":{}}

View File

@ -0,0 +1,3 @@
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by cargo.
# For information about cache directory tags see https://bford.info/cachedir/

1
bin/.rustc_info.json Normal file
View File

@ -0,0 +1 @@
{"rustc_fingerprint":2742313010855374649,"outputs":{"4614504638168534921":{"success":true,"status":"","code":0,"stdout":"rustc 1.83.0 (90b35a623 2024-11-26)\nbinary: rustc\ncommit-hash: 90b35a6239c3d8bdabc530a6a0816f7ff89a0aaf\ncommit-date: 2024-11-26\nhost: x86_64-unknown-linux-gnu\nrelease: 1.83.0\nLLVM version: 19.1.1\n","stderr":""},"15729799797837862367":{"success":true,"status":"","code":0,"stdout":"___\nlib___.rlib\nlib___.so\nlib___.so\nlib___.a\nlib___.so\n/home/danila/.rustup/toolchains/stable-x86_64-unknown-linux-gnu\noff\npacked\nunpacked\n___\ndebug_assertions\npanic=\"unwind\"\nproc_macro\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"gnu\"\ntarget_family=\"unix\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_os=\"linux\"\ntarget_pointer_width=\"64\"\ntarget_vendor=\"unknown\"\nunix\n","stderr":""}},"successes":{}}

132
bin/.vimrc Normal file
View File

@ -0,0 +1,132 @@
set tabstop=4
set softtabstop=4
set shiftwidth=4
set noexpandtab
set colorcolumn=90
highlight ColorColumnt ctermbg=darkgray
augroup project
autocmd!
autocmd BufRead,BufNewFile *.h,*.c set filetype=c.doxygen
augroup END
let &path.="src/include, src/source,"
" Включаем использование системного буфера
set clipboard=unnamedplus
" Работа с текстом
" Python использует 4 пробела для отступов
autocmd FileType python setlocal tabstop=4 shiftwidth=4
" Кодировка текста
set encoding=utf-8
set fileencoding=utf-8
set fileencodings=utf-8,cp1251,koi8-r,cp866
" Поиск по тексту
set hlsearch " подсвечивать результаты поиска
" Перемещение по тексту
" Когда достигаем границ строки, то перемещаемся на предыдующую/следующую
set whichwrap+=h,l,<,>,[,]
set number
" Настройки автодополнения
set completeopt=menu,menuone,noselect
" Разделение экрана
set splitbelow " разбивать вниз
set splitright " разбивать вправо
" сочетание клавиш
" Использование h, j, k, l для перемещения с зажатым Ctrl в режиме
" редактирования
inoremap <C-h> <Left>
inoremap <C-j> <Down>
inoremap <C-k> <Up>
inoremap <C-l> <Right>
let g:mapleader = "\<Space>"
" Переключение между вкладками
nnoremap <leader>t :tabnext<CR>
nnoremap <leader>T :tabprevious<CR>
" Список вкладок
nnoremap <leader>tl :tabs<CR>
" nnoremap <leader>tn :tabnew<CR>
nnoremap <leader>tc :tabclose<CR>
nnoremap <leader>to :tabonly<CR>
nnoremap <leader>tm :tabmove<CR>
" Редактировать файл в новой вкладке
nnoremap <leader>te :tabedit |
" Выбор вкладки
nnoremap <leader>1 1gt
nnoremap <leader>2 2gt
nnoremap <leader>3 3gt
nnoremap <leader>4 4gt
nnoremap <leader>5 5gt
nnoremap <leader>6 6gt
nnoremap <leader>7 7gt
nnoremap <leader>8 8gt
nnoremap <leader>9 9gt
nnoremap <leader>0 :tablast<CR>
" Разбиение окон
nnoremap <leader>s :split<CR>
nnoremap <leader>v :vsplit<CR>
" Выбор окна
nnoremap <C-h> <C-w>h
nnoremap <C-j> <C-w>j
nnoremap <C-k> <C-w>k
nnoremap <C-l> <C-w>l
" Размер окна
nnoremap <C-u> <C-w>+
nnoremap <C-d> <C-w>-
nnoremap <C-p> <C-w><
nnoremap <C-n> <C-w>>
" Vimspector
" nnoremap <leader><F2> <F10>
" nnoremap <leader>q <F11>
nmap <Leader><Right> <Plug>VimspectorStepOver
nmap <Leader><Down> <Plug>VimspectorStepInto
nmap <Leader><Up> <Plug>VimspectorStepOut
nmap <Leader><Tab> <Plug>VimspectorDisassemble
" Сделать окна одного размера
nnoremap <leader>= <C-w>=
" Переключения между буферами
" nnoremap <leader>b :bnext<CR>
" nnoremap <leader>B :bprevious<CR>
" nnoremap <leader>l :ls<CR>
" nnoremap <leader>d :bd<CR>
" " Скрыть/раскрыть блок кода
" nnoremap <leader>z za
" настройка плагинов
" настройки для отступов
" let g:indent_guides_enable_on_vim_startup = 1
" Настройки для разноцветной подсветки скобок
let g:rainbow_active = 1
" Настройки для vim-airline
let g:airline#extensions#tabline#enabled = 1
let g:airline#extensions#tabline#buffer_nr_show = 1
let g:airline#extensions#tabline#formatter = 'unique_tail'
let g:airline_powerline_fonts = 1
let g:airline_solarized_bg = 'luna'
let g:vimspector_enable_mappings = 'HUMAN'

View File

@ -26,6 +26,15 @@ class TimeIndexNotFound(Exception):
fname_json_decode = './decode_asotr_cmd.json'
def convert_to_str(lst):
index = [i for i, x in enumerate(lst) if x == 1]
res = f"ch{index[0] + 1}"
for idx in index[1:]:
res += f"_{idx + 1}"
return res
def get_utc_seconds(timestamp_str, timestamp_format):
dt_obj = datetime.strptime(timestamp_str, timestamp_format)
utc_timezone = pytz.utc
@ -377,7 +386,6 @@ def plot_signal_profile(time, data, pattern_t, pattern, method, shift_flag, peak
def insert_temp_data_from_flight_cmd(fname_cmd_temp, dir_asotr):
fname_asotr = [f'{dir_asotr}asotr01_data_T.csv', f'{dir_asotr}asotr02_data_T.csv']
df_cmd = pd.read_csv(fname_cmd_temp, sep=';')
df_asotr = []
@ -486,26 +494,26 @@ def get_step_response_diff(data, thermocycle_info, channel='ch1',
def plot_step_response_in_thermocycle(data_info, thermocycle_info, interp,
cut_step_resp, plot_info):
title = f'{plot_info["title"]}, канал {data_info["channel"][2]} АСОТР КДИ СПИН-X, период опроса {data_info["period"]} ({thermocycle_info["date"]})'
title = f'{plot_info["title"]}, канал {data_info["channel"][2]} АСОТР, {data_info["device"]} СПИН-X1-МВН, период опроса {data_info["period"]} ({thermocycle_info["date"]})'
step_resp, orig_interp_cycle, step_interp_cycle = get_step_response_diff(
data_info['data'], thermocycle_info, channel=data_info['channel'],
interp=interp, accuracy=data_info['find_accuracy'])
fig = plt.figure(figsize=(8, 6), dpi=200)
fig = plt.figure(figsize=(9, 6), dpi=200)
fig.suptitle(title, fontsize=plot_info['font'])
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2)
ax1.plot(step_resp['timestamp'], step_resp['temp'],
label='реакция на ступенчатое воздействие')
label='реакция на ' + thermocycle_info['type_ru'] + ' воздействие')
step_begin = cut_step_resp['time_step_begin']
idx = find_best_time_idx(step_interp_cycle.timestamp, step_begin,
accuracy=data_info['find_accuracy'])
ax1.axvline(x = step_interp_cycle.timestamp[idx], color='r', linestyle='-.',
label='момент подачи ступенчатого воздействия')
label= thermocycle_info['type_ru'] + ' воздействие, начало')
date_formatter = dates.DateFormatter(plot_info['ox_dtime_format'])
ax1.xaxis.set_major_formatter(date_formatter)
@ -515,17 +523,17 @@ def plot_step_response_in_thermocycle(data_info, thermocycle_info, interp,
ax1.set_ylabel(r'$\Delta$T, $^\circ$C', fontsize=plot_info['font'])
ax2.axvline(x = step_interp_cycle.timestamp[idx], color='r', linestyle='-.',
label='момент подачи ступенчатого воздействия')
label= thermocycle_info['type_ru'] + ' воздействие, начало')
ax2.plot(orig_interp_cycle['timestamp'], orig_interp_cycle['temp'], '--',
label='термоцикл')
ax2.plot(step_interp_cycle['timestamp'], step_interp_cycle['temp'],
label='термоцикл с реакцией на ступенчатое воздействие')
label='термоцикл с реакцией на ' + thermocycle_info['type_ru'] + ' воздействие')
ax2.xaxis.set_major_formatter(date_formatter)
ax2.legend(loc=plot_info["legend_pos"][1], fontsize=plot_info['font'],
fancybox=True, framealpha=0.4)
ax2.grid(True)
ax2.tick_params(axis='both', width=1, labelsize=plot_info['font'])
ax2.set_xlabel('время', fontsize=plot_info['font'])
ax2.set_xlabel('Время, ЧЧ:MM:CC', fontsize=plot_info['font'])
ax2.set_ylabel(r'$T_{norm}$, $^\circ$C', fontsize=plot_info['font'])
fig.suptitle(title, fontsize=plot_info['font'])

BIN
bin/asotr_csv Executable file

Binary file not shown.

14
bin/asotr_unzip_plot.sh Executable file
View File

@ -0,0 +1,14 @@
#! /bin/bash
if [ $# != 1 ]
then
echo "erorr use $0. Right use this script: "
echo "$0 path"
else
cp ../asotr_csv/target/release/asotr_csv ./
path_=$1
python3 recursive_unpack_targz.py ${path_}
./asotr_csv -d ${path_}
python3 plot_asotr_flight_all.py
fi

150
bin/brd_wheel_1Hz_parser.py Normal file
View File

@ -0,0 +1,150 @@
import pandas as pd
import os
import re
from pathlib import Path
import matplotlib.pyplot as plt
from datetime import datetime, timedelta
tstamp_s = '%d.%m.%Y %H:%M:%S.%f'
ox_dtime_format = '%d.%m.%Y %H:%M'
path_itog_brd_data = '../data/brd_data/'
class PathFileNotFound(Exception):
pass
def find_required_files(root_dir, pattern):
result = []
for dirpath, _, filenames in os.walk(root_dir):
for filename in filenames:
match = re.match(pattern, filename)
if match:
result.append(dirpath + '/' + filename)
if len(result) == 0:
raise PathFileNotFound(f'error: check that the path is correct ({root_dir}) or files pattern is correct ({pattern})')
return sorted(result)
def read_files_into_df(fname_list, column_list, dtype_columns={}):
data_itog = pd.DataFrame()
epoch_start = pd.Timestamp('2000-01-01')
for fname in fname_list:
data = pd.read_csv(fname, sep=r'\s+', dtype=str)
data = data.dropna()
data = data[column_list]
if 'TIME' in column_list:
# convert TIME value to human-readable timestamp (sinse epoch 01.01.2000)
time = data['TIME'].astype(float)
tstamp = epoch_start + pd.to_timedelta(time, unit='s')
timestamp = tstamp.dt.strftime(tstamp_s)
data['timestamp'] = timestamp
# clear dataframe rows where time value == 0
data['time'] = time
data_clear = data.query('time != 0.0')
data_itog = pd.concat([data_itog, data_clear], ignore_index=True)
return data_itog
def collect_tm_brd_files(root_dir_tm_data, column_list, column_list_itog):
patterns_tm = [r'mvn_tm_brd01_(.*)', r'mvn_tm_brd02_(.*)', r'mvn_tm_brd03_(.*)',
r'mvn_tm_brd04_(.*)']
for pattern in patterns_tm:
fname = path_itog_brd_data + pattern[:12] + '.csv'
try:
found_files = find_required_files(root_dir_tm_data, pattern)
data = read_files_into_df(found_files, column_list, dtype_columns={11: float})
except KeyError as e:
print(f'error in collect_tm_brd_files: the specified column name was not found in the data file (path: {root_dir_tm_data}) ({e})')
break
except Exception as e:
print(f'error in collect_tm_brd_files: {e}')
break
data.to_csv(fname, index=False, sep=';', columns=column_list_itog, encoding='utf-8-sig')
print('data saved: ' + fname)
def collect_tm_brd_wheel_data(root_dir_wheel_data, column_list, column_list_itog):
patterns_wheel = [r'mvn_wheel_brd01_(.*)', r'mvn_wheel_brd02_(.*)', r'mvn_wheel_brd03_(.*)',
r'mvn_wheel_brd04_(.*)']
for pattern in patterns_wheel:
fname = path_itog_brd_data + pattern[:15] + '.csv'
try:
found_files = find_required_files(root_dir_wheel_data, pattern)
data = read_files_into_df(found_files, column_list, dtype_columns={0: float, 1: int})
except KeyError as e:
print(f'error in collect_tm_brd_wheel_data: the specified column name was not found in the data file (path: {root_dir_tm_data}) ({e})')
break
except Exception as e:
print(f'error in collect_tm_brd_wheel_data: {e}')
break
mask = data['STATE'] == '0'
data = data[mask]
data.to_csv(fname, index=False, sep=';', columns=column_list_itog, encoding='utf-8-sig')
print('data saved: ' + fname)
## collect raw tm brd data into one file for each brd
root_dir_tm_data = '/home/danila/Danila/work/MVN/flight/brd_data/arch_for_MB/archive_tm_data_txt/'
column_list = ['TIME', 'PER_1Hz', 'ST_HV']
column_list_itog = ['TIME', 'timestamp', 'PER_1Hz', 'ST_HV']
collect_tm_brd_files(root_dir_tm_data, column_list, column_list_itog)
root_dir_wheel_data = '/home/danila/Danila/work/MVN/flight/brd_data/arch_for_MB/archive_wheel_data_txt/'
column_list = ['TIME', 'STATE']
column_list_itog = ['TIME', 'timestamp', 'STATE']
collect_tm_brd_wheel_data(root_dir_wheel_data, column_list, column_list_itog)
## plot 'evolution' 1 Hz from tm brd data
fname = path_itog_brd_data + 'mvn_tm_brd01.csv'
dateparse = lambda x: datetime.strptime(x, tstamp_s)
df = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_parser=dateparse)
plt.plot(df['timestamp'], df['PER_1Hz'], '.')
plt.show()
border_clr_wheel = 2
fname = path_itog_brd_data + 'mvn_wheel_brd01.csv'
wheel_df = pd.read_csv(fname, sep=';')
wheel_df['TIME_diff'] = wheel_df['TIME'].diff()
median_tdiff = wheel_df['TIME_diff'].median()
wheel_df_clear = wheel_df[(wheel_df['TIME_diff'] > median_tdiff - border_clr_wheel) &
(wheel_df['TIME_diff'] < median_tdiff + border_clr_wheel)]
wheel_df_peaks = wheel_df[(wheel_df['TIME_diff'] <= median_tdiff - border_clr_wheel) |
(wheel_df['TIME_diff'] >= median_tdiff + border_clr_wheel)]
plt.plot(wheel_df_clear['TIME'], wheel_df_clear['TIME_diff'])
plt.show()
# df1 = df[df['TIME_diff'] < 30.6]
# print(df[df['TIME_diff'] > 30.6 or df['TIME_diff'] < 29.4] )
# for idx, row in df.iterrows():
# print(row['TIME'])

View File

@ -1,25 +1,27 @@
import sys
from importlib import reload
sys.path.append('/home/danila/Danila/work/MVN/Soft/PID/python/')
sys.path.append('./')
import asotr
reload(asotr)
import pandas as pd
from datetime import datetime, timedelta
path_data = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
fname_cmd_flight = '/home/danila/Danila/work/MVN/Soft/PID/data/flight/cmd_asotr/all_flight_cmd_asotr.csv'
fname_cmd_temp = './data/flight_cmd_temp.csv'
path_data = '../data/asotr/'
fname_cmd_flight = '../data/cmd_asotr/all_flight_cmd_asotr.csv'
fname_cmd_temp = '../data/cmd_asotr/flight_cmd_temp.csv'
fname_cmd_human = '../data/cmd_asotr/cmd_human.csv'
timeformat = '%d.%m.%Y %H:%M:%S'
prev_days = 25
## get flight commands file (generated by mvn_log_viewer)
## Translate to human-readeble format and take temperatures from flight commands file
## save in cmd_human
cmd_list, temperature_list = asotr.get_cmd_data(fname_cmd_flight)
with open('./data/cmd_human.csv', 'w') as file:
with open(fname_cmd_human, 'w') as file:
for elem in cmd_list:
file.write(f'{elem}\n')
## temperatures from flight commands file save to file
## temperatures from flight commands file save to file flight_cmd_temp
with open(fname_cmd_temp, 'w') as file:
file.write(f'timestamp_sec;timestamp;asotr_kit;ch1;ch2;ch3;ch4;ch5;ch6\r\n')
for elem in temperature_list:
@ -28,16 +30,16 @@ with open(fname_cmd_temp, 'w') as file:
## insert temperatures from flight commands file to main asotr temperatures data files
df_asotr_ = asotr.insert_temp_data_from_flight_cmd(fname_cmd_temp, path_data)
## form timestamp file where minimum of temperatures registered
end_date = ''
for i, data in enumerate(df_asotr_):
end_date = data['timestamp'].iloc[len(data) - 1][0:18]
data.to_csv(f'./data/asotr0{i+1}_data_T.csv', index=False, sep=';',
data.to_csv(f'{path_data}asotr0{i+1}_data_T.csv', index=False, sep=';',
encoding='utf-8-sig', decimal='.')
delta_date = datetime.strptime(end_date, timeformat) - timedelta(days=prev_days)
start_date = delta_date.strftime(timeformat)
## form timestamp file where minimum of temperatures registered
for kit in range(1,3):
asotr_kit = f'0{kit}'
@ -57,7 +59,7 @@ for kit in range(1,3):
min_temp_ch.append(min_temp_period)
fname = f'./data/asotr{asotr_kit}_min_T.csv'
fname = f'{path_data}asotr{asotr_kit}_min_T.csv'
df = pd.DataFrame(min_temp_ch).transpose()
df.to_csv(fname, header=False, index=False, sep=';',

164
bin/flight_temp_forecast.py Normal file
View File

@ -0,0 +1,164 @@
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider
import pandas as pd
import numpy as np
import sys
from importlib import reload
sys.path.append('./')
import asotr
reload(asotr)
from datetime import datetime, timedelta
from matplotlib import dates
def get_raw_data(year, path_with_data, asotr_kit, data_borders):
if data_borders['flag'] == True:
start_date = data_borders['begin'] + " 00:00:00"
end_date = data_borders['end'] + " 23:59:59"
accuracy = 'minutes'
else:
start_date = '01.01.' + year + " 00:00:00"
end_date = '01.01.' + year + " 23:59:59"
accuracy = 'hours'
try:
data, data_dict_borders = asotr.get_data(path_with_data, asotr_kit,
start_date, end_date, accuracy)
ch_signs = ["temp", "temp_set", "pow"]
ch = [[], [], [], [], [], []]
data_dict = {
"temp": ch,
"temp_set": ch,
"pow": ch,
"time_temp": [],
"time_temp_set": [],
"time_pow": [],
}
data_dict["time_temp"] = data[0]["timestamp"]
data_dict["time_temp_set"] = data[1]["timestamp"]
data_dict["time_pow"] = data[2]["timestamp"]
col = ["ch1", "ch2", "ch3", "ch4", "ch5", "ch6"]
for j in range(len(ch_signs)):
data_dict[ch_signs[j]] = data[j][col]
except Exception as e:
print(f'exception: {e}')
raise
try:
fname_beta = path_with_data + 'beta_' + year + '.xlsx'
dateparse_beta = lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
data_beta = pd.read_excel(fname_beta, sheet_name=0, usecols=[0,1,2], header=4,
names=['turn_num', 'beta_angle', 'timestamp'], parse_dates=['timestamp'],
date_parser=dateparse_beta)
except Exception as e:
print(f'exception: {e}')
raise
return (data_dict, data_dict_borders, data_beta)
def plot_asotr_borders(year, path_with_data, ch, asotr_kit, data_borders,
font=14, save_flag=True):
# get from files and prepare data
print_width = 20
print_height = 12
width = 1
plot_windows = 1
channels = list(map(int, ch))
plot_task = {"temp": 1, "temp_set": 1, "pow": 1}
ox_dtime_format = "%d.%m.%Y"
legend = [
"канал 1 (БРД1)",
"канал 2 (БРД2)",
"канал 3 (БРД3)",
"канал 4 (БРД4)",
"канал 5 (плита МУП МВН)",
"канал 6 (плита МУП МВН)",
]
legend_set = list(map(lambda x: x + " уставка", legend))
width = [1, 1, 1, 1, 1, 1]
width_set = [3, 3, 3, 3, 3, 3]
marker = ["-", "--", "-.", "-", "-", "--"]
width_arr = [1, 0.5, 0.2, 0.1, 1, 1]
try:
data_dict, data_dict_borders, data_beta = get_raw_data(year, path_with_data,
asotr_kit, data_borders)
except Exception as e:
print(f'{e}')
return
if plot_windows == 1:
fig, ax = plt.subplots(figsize=(print_width, print_height), dpi=200)
if plot_task["temp"] == 1:
for i in range(len(channels)):
if channels[i] == 1:
line, = ax.plot(data_dict_borders["time_temp"],
data_dict_borders['temp'].iloc[:,i],
'--',
linewidth=1,
label=legend[i],)
ax.plot(data_dict["time_temp"],
data_dict['temp'].iloc[:,i],
marker[i],
linewidth=width[i],
label=legend[i],)
ch = i
ax.tick_params(axis="both", width=1, labelsize=font)
ax.grid(visible=True, linestyle="dotted")
ax.set_ylabel("Температура, $^\circ$C", fontsize=font)
ax.set_xlabel("Время", fontsize=font)
ax.legend(fontsize=font)
date_formatter = dates.DateFormatter(ox_dtime_format)
ax.xaxis.set_major_formatter(date_formatter)
ax2 = ax.twinx()
ax2.plot(data_beta['timestamp'], data_beta['beta_angle'], marker[4],
color='r', linewidth=width[5], label='угол Бета')
ax2.set_ylabel('Угол Бета', fontsize=font)
ax2.tick_params(axis='y', width=1, labelsize=font)
ax2.legend(fontsize=font, loc='lower right')
plt.tight_layout()
def update(val):
shift_amount = val * pd.Timedelta(days=1)
shifted_timestamps = data_dict_borders['time_temp'] + shift_amount
scaled_values = data_dict_borders['temp'].iloc[:,ch] + 5
line.set_data(shifted_timestamps, scaled_values)
fig.canvas.draw_idle()
slider_ax = plt.axes([0.25, 0.05, 0.65, 0.03])
slider = Slider(slider_ax, 'Shift days', -100, 100, valinit=0)
slider.on_changed(update)
plt.show()
if save_flag == True:
pict_name = (f'../plots/reports/ASOTR{asotr_kit}_flight_T_P_{asotr.convert_to_str(channels)}_{data_borders["begin"][0:5].replace(".", "")}_{data_borders["end"][0:5].replace(".", "")}_{data_borders["end"][6:]}.png')
fig.savefig(pict_name)
ch = '100000'
year = '2025'
path_with_data = '../data/asotr/'
asotr_kit = '01'
data_borders = {'flag': True, 'begin': '15.03.2025', 'end': '01.05.2025'}
plot_asotr_borders(year, path_with_data, ch, asotr_kit, data_borders, font=6, save_flag=True)

View File

@ -2,7 +2,7 @@ import pandas as pd
import matplotlib.pyplot as plt
import sys
from importlib import reload
sys.path.append('/home/danila/Danila/work/MVN/Soft/PID/python/')
sys.path.append('./')
import asotr
reload(asotr)
import matplotlib.pyplot as plt
@ -11,7 +11,7 @@ import pandas as pd
from datetime import datetime
asotr_kit = 1
fname = f'../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
fname = f'../../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
data = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_parser=dateparse)
@ -25,7 +25,7 @@ name = f'{thermocycle_info["type"]}_response_{thermocycle_info["date"].replace("
plot_info = {'title': 'Реакция на импульсное воздействие',
'ox_dtime_format': "%H:%M:%S", 'legend_pos': ['upper right', 'lower left'],
'name_fig': f'{name}.png',
'name_fig': f'../plots/response/{name}.png',
'font': 10}
tstamp_orig_begin = cut_step_resp['orig_time_step_begin']
@ -35,7 +35,7 @@ _, interp_imp_resp = asotr.cut_norm_data(data_info['data'], tstamp_orig_begin,
accuracy=data_info['find_accuracy'])
interp_imp_resp.to_csv(f'./data/asotr0{asotr_kit}_{name}.csv', index=False, sep=';',
interp_imp_resp.to_csv(f'../data/asotr/response/asotr0{asotr_kit}_{name}.csv', index=False, sep=';',
encoding='utf-8-sig', decimal='.')
asotr.plot_imp_response(interp_imp_resp, data_info, plot_info, thermocycle_info)

View File

@ -0,0 +1,184 @@
import matplotlib.pyplot as plt
from matplotlib import dates
import pandas as pd
from datetime import datetime
import sys
font = 6
print_width = 10
print_height = 6
width = 1
plot_windows = 2
channels = [1, 1, 1, 1, 1, 1]
asotr_kit = '01'
xborders=False
begin=0;
end=0;
path = '../data/asotr/'
fname_B = f'{path}beta_2025.xlsx'
fname = 'asotr' + asotr_kit + '_data_T.csv'
fname_pow = 'asotr' + asotr_kit + '_data_P.csv'
pict_name = '../plots/' + 'ASOTR' + asotr_kit + '_flight_T_P_all'
ox_dtime_format = '%Y.%m.%d %H:%M'
legend=['БРД1', 'БРД2', 'БРД3', 'БРД4', 'плита МУП МВН, датчик1', 'плита МУП МВН, датчик 2']
width=[1, 1, 1, 1, 1, 1]
marker = ['-', '-', '-', '-', '--', '-'];
width_arr = [1, 0.5, 0.2, 0.1, 1, 1]
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
dparse_b = lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
data_b = pd.read_excel(fname_B,
sheet_name=0,
usecols=[0,1,2],
header=4,
names=['turn_num', 'beta_angle', 'timestamp'],
parse_dates=['timestamp'],
date_parser=dparse_b)
fname = [path + fname, path + fname_pow]
data = [pd.read_csv(fname[0], sep=';', parse_dates=['timestamp'], date_parser=dateparse),
pd.read_csv(fname[1], sep=';', parse_dates=['timestamp'], date_parser=dateparse)]
ch= [[], [], [], [], [], []]
ch_signs = ["temp", "pow"]
data_dict = {"temp": ch, "pow": ch, "time": []}
data_dict["time"] = data[0]['timestamp']
col=['ch1', 'ch2', 'ch3', 'ch4', 'ch5', 'ch6', 'ch7']
for j in range(2):
for index, row, in data[j].iterrows():
for i in range(6):
ch[i].append(float(row[col[i]]))
data_dict[ch_signs[j]] = ch
ch= [[], [], [], [], [], []]
len_data = [len(data_dict['temp'][0]), len(data_dict['pow'][0])]
len_ = min(len_data)
if xborders == False:
begin = 0
end = len_ - 1
if plot_windows == 1:
fig, ax = plt.subplots(figsize=(print_width, print_height), dpi=200)
i = 0
for elem in data_dict['temp']:
if channels[i] == 1:
ax.plot(data_dict['time'][begin:end], elem[begin:end], marker[i], linewidth=width[i], label=legend[i])
i += 1
ax.tick_params(axis="both", width=1, labelsize=font)
ax.grid(visible=True, linestyle = 'dotted')
ax.set_ylabel('Температура, $^\circ$C', fontsize=font)
ax.set_xlabel('Время', fontsize=font)
ax.legend(fontsize=font)
date_formatter = dates.DateFormatter(ox_dtime_format)
ax.xaxis.set_major_formatter(date_formatter)
plt.tight_layout()
fig.savefig(pict_name)
plt.show()
elif plot_windows == 2:
fig = plt.figure(figsize=(print_width, print_height), dpi=200)
ax1 = fig.add_subplot(2, 1, 1)
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
i = 0
for elem in data_dict['temp']:
if channels[i] == 1:
ax1.plot(data_dict['time'][begin:end], elem[begin:end], marker[i], linewidth=width[i], label=legend[i])
i += 1
ax3 = ax1.twinx()
ax3.plot(data_b['timestamp'], data_b['beta_angle'], marker[4], color='r', linewidth=width[5], label='угол Бета')
ax3.set_ylabel('Угол Бета', fontsize=font)
ax3.tick_params(axis="y", width=1, labelsize=font)
ax3.legend(fontsize=font, loc='upper right')
i = 0
for elem in data_dict['pow']:
if channels[i] == 1:
ax2.plot(data_dict['time'][begin:end], elem[begin:end], marker[i], linewidth=width[i], label=legend[i])
i += 1
ax1.tick_params(axis="both", width=1, labelsize=font)
ax1.grid(visible=True, linestyle = 'dotted')
ax1.set_ylabel('Температура, $^\circ$C', fontsize=font)
ax1.set_xlabel('Время', fontsize=font)
ax1.legend(fontsize=font, loc='lower right')
date_formatter = dates.DateFormatter(ox_dtime_format)
ax1.xaxis.set_major_formatter(date_formatter)
ax2.tick_params(axis="both", width=1, labelsize=font)
ax2.grid(visible=True, linestyle = 'dotted')
ax2.set_ylabel('Мощность, %', fontsize=font)
ax2.set_xlabel('Время', fontsize=font)
ax2.legend(fontsize=font, loc='lower right')
date_formatter = dates.DateFormatter(ox_dtime_format)
ax2.xaxis.set_major_formatter(date_formatter)
plt.title('АСОТР ' + asotr_kit, fontsize=font)
plt.tight_layout()
fig.savefig(pict_name)
plt.show()
# asotr_kit2 = '02'
# fname2 = 'asotr' + asotr_kit2 + '_data_T.csv'
# fname_pow2 = 'asotr' + asotr_kit2 + '_data_P.csv'
# legend2=['2 БРД1', '2 БРД2', '2 БРД3', '2 БРД4', '2 плита МУП МВН, датчик1', '2 плита МУП МВН, датчик 2']
# fname2 = [path + fname2, path + fname_pow2]
# data2 = [pd.read_csv(fname2[0], sep=';', parse_dates=['timestamp'], date_parser=dateparse),
# pd.read_csv(fname2[1], sep=';', parse_dates=['timestamp'], date_parser=dateparse)]
# ch= [[], [], [], [], [], []]
# ch_signs = ["temp", "pow"]
# data_dict2 = {"temp": ch, "pow": ch, "time": []}
# data_dict2["time"] = data2[0]['timestamp']
# col=['ch1', 'ch2', 'ch3', 'ch4', 'ch5', 'ch6', 'ch7']
# for j in range(2):
# for index, row, in data2[j].iterrows():
# for i in range(6):
# ch[i].append(float(row[col[i]]))
# data_dict2[ch_signs[j]] = ch
# ch= [[], [], [], [], [], []]
# len_data2 = [len(data_dict2['temp'][0]), len(data_dict2['pow'][0])]
# len_2 = min(len_data2)
# if xborders == False:
# begin2 = 0
# end2 = len_2 - 1
# i = 0
# for elem in data_dict2['temp']:
# if channels[i] == 1:
# print('legend2: ' + legend2[i])
# ax1.plot(data_dict2['time'][begin2:end2], elem[begin2:end2], marker[i], linewidth=width[i], label=legend2[i])
# i += 1
# ax2.plot(pd.Series(data_dict2['temp'][0]) - pd.Series(data_dict['temp'][0]))

View File

@ -8,14 +8,6 @@ import asotr
reload(asotr)
import pandas as pd
def convert_to_str(lst):
index = [i for i, x in enumerate(lst) if x == 1]
res = f"ch{index[0] + 1}"
for idx in index[1:]:
res += f"_{idx + 1}"
return res
def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0, show_flag=True):
print_width = 20
print_height = 12
@ -23,7 +15,7 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
plot_windows = 2
channels = list(map(int, ch))
pict_name = (f'./reports/ASOTR{asotr_kit}_flight_T_P_{convert_to_str(channels)}_{begin[0:5].replace(".", "")}_{end[0:5].replace(".", "")}_{end[6:]}.png')
pict_name = (f'../plots/reports/ASOTR{asotr_kit}_flight_T_P_{asotr.convert_to_str(channels)}_{begin[0:5].replace(".", "")}_{end[0:5].replace(".", "")}_{end[6:]}.png')
plot_task = {"temp": 1, "temp_set": 1, "pow": 1}
ox_dtime_format = "%d.%m.%Y"
@ -44,8 +36,8 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
width_arr = [1, 0.5, 0.2, 0.1, 1, 1]
# get from files and prepare data
start_date = begin + " 00:00:00"
end_date = end + " 23:59:59"
start_date = begin.replace('_', ' ')
end_date = end.replace('_', ' ')
try:
data, data_dict = asotr.get_data(path_with_data, asotr_kit, start_date, end_date, 'minutes')
except Exception as e:
@ -74,6 +66,7 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
plt.tight_layout()
fig.savefig(pict_name)
print(f'figure saved: {pict_name}')
if show_flag == True:
plt.show()
@ -84,9 +77,8 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
if cmd == '1':
fname = './flight_cmd_human.txt'
try:
cmd_human = pd.read_csv('./data/cmd_human.csv',
cmd_human = pd.read_csv('../data/cmd_asotr/cmd_human.csv',
delimiter=';', names=['timestamp', 'cmd'])
except Exception as e:
print(f'Error parsing file: {e}')
@ -161,6 +153,7 @@ def plot_asotr_borders(path_with_data, ch, asotr_kit, begin, end, font=14, cmd=0
fig.suptitle(title, fontsize=font)
plt.tight_layout()
fig.savefig(pict_name)
print(f'figure saved: {pict_name}')
if show_flag == True:
plt.show()

View File

@ -1,23 +1,21 @@
#! /bin/bash
if [ $# != 5 ]
if [ $# != 2 ]
then
echo "erorr use $0. Right use this script: "
echo "$0 path_to_csv_astor_data/ 25.02.2025 10.03.2025 14 0"
echo "$0 25.02.2025_00:00:00 10.03.2025_23:59:59"
else
path_csv_data=$1
begin=$2
end=$3
font=$4
cmd_flag=$5
path_csv_data=../data/asotr/
begin=$1
end=$2
python3 plot_flight_borders.py -s ${path_csv_data} -c 111100 -a 01 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 001000 -a 01 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 000011 -a 01 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 111100 -a 02 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 010100 -a 02 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 010000 -a 02 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 000100 -a 02 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 000011 -a 02 -b ${begin} -e ${end} -f ${font} --cmd ${cmd_flag}
python3 plot_flight_borders.py -s ${path_csv_data} -c 111100 -a 01 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 001000 -a 01 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 000011 -a 01 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 111100 -a 02 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 010100 -a 02 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 010000 -a 02 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 000100 -a 02 -b ${begin} -e ${end}
python3 plot_flight_borders.py -s ${path_csv_data} -c 000011 -a 02 -b ${begin} -e ${end}
fi

View File

@ -1,6 +1,6 @@
import sys
from importlib import reload
sys.path.append('/home/danila/Danila/work/MVN/Soft/PID/python/')
sys.path.append('./')
import asotr
reload(asotr)
import matplotlib.pyplot as plt
@ -8,8 +8,8 @@ from matplotlib import dates
import numpy as np
from datetime import timedelta
pict_name = 'periods_profile_10042025.png'
path = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
path = '../data/asotr/'
pict_name = '../plots/periods_profile/periods_profile_10042025.png'
channel = 'ch1'
asotr_kit = '01'
start_date = '24.04.2025 22:30:00'
@ -37,10 +37,6 @@ for idx, elem in enumerate(peaks):
delta = time1.iloc[elem] - peaks_forecast[idx-1]
delta_sec.append(delta.total_seconds())
# asotr.plot_signal_profile(time1, data1, [], [], method='peaks', shift_flag=shift)
# asotr.plot_signal_profile(time1, data1, periods_t[0], periods[0], method='corr', shift_flag=shift, peak_height=0.7)
time_, periods_ = asotr.get_signal_profile_corr(time1, data1, periods[0], shift, peak_height=0.7)
print(f'Найдено {len(periods_)} периодов.')
@ -66,7 +62,6 @@ for elem in periods_:
delta1 = elem.values - periods[0].values
delta.append(delta1)
# ax3.plot(delta[1], label=f'период 1', marker='o', linewidth=2)
for idx, elem in enumerate(delta):
if idx == len(delta) - 1:
ax3.plot(elem, label=f'период {idx}', marker='|', linewidth=2)
@ -77,8 +72,6 @@ for idx, elem in enumerate(delta):
elif idx > 0:
ax3.plot(elem, label=f'период {idx}')
# ax4.plot(delta_sec)
ax3.set_title(r'$\Delta$$T_i$ = $T_i$ - $T_1$')
ax1.set_ylabel('Температура, $^\circ$C')
ax2.set_ylabel('Температура, $^\circ$C')
@ -87,7 +80,6 @@ ax3.set_xlabel("Время, мин.")
ax1.grid(True)
ax2.grid(True)
ax3.grid(True)
# ax4.grid(True)
ax2.legend()
ax3.legend()
fig.savefig(pict_name)

View File

@ -6,7 +6,7 @@ reload(asotr)
import pandas as pd
from datetime import datetime, timedelta
path = './data/experiments/'
path = '../data/experiments/'
timestamp = '04.05.2025 00:42:00'
cyclogram_file = 'cyclogram_step_ident_ch3.xls'

View File

@ -0,0 +1,37 @@
import os
import tarfile
def extract_tar_gz(filepath, extract_dir):
""" Unpack archive in specified directory """
try:
with tarfile.open(filepath, "r:gz") as tar:
tar.extractall(path=extract_dir)
print(f"[+] Extracted: {filepath}")
except Exception as e:
print(f"[!] Error extracting {filepath}: {e}")
def should_extract(archive_path):
""" check exist directory's name without .tag.gs """
dirname = os.path.splitext(os.path.splitext(archive_path)[0])[0]
list_ignore = ['brd.tar.gz', 'aux_data.tar.gz', 'uvi.tar.gz', 'aux.tar.gz']
if all(elem not in archive_path for elem in list_ignore):
return not os.path.isdir(dirname)
def walk_and_extract(start_dir):
""" recursive directory traversal with unpacking """
for root, _, files in os.walk(start_dir):
for file in files:
if file.endswith(".tar.gz"):
archive_path = os.path.join(root, file)
target_dir = os.path.splitext(os.path.splitext(archive_path)[0])[0]
if should_extract(archive_path):
extract_tar_gz(archive_path, target_dir)
if __name__ == "__main__":
import sys
if len(sys.argv) != 2:
print("Usage: python recursive_unpack_targz.py /path/to/start/dir")
else:
walk_and_extract(sys.argv[1])

294
bin/step_response.py Normal file
View File

@ -0,0 +1,294 @@
import pandas as pd
import matplotlib.pyplot as plt
import sys
from importlib import reload
sys.path.append('./')
import asotr
reload(asotr)
import matplotlib.pyplot as plt
from matplotlib import dates
import pandas as pd
from datetime import datetime
asotr_kit = 1
# fname = f'../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
fname = f'../data/asotr/asotr0{asotr_kit}_data_T.csv'
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
data = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_parser=dateparse)
# date = '20.03.2025'
# period = '1 мин'
# time_begin_orig = date + ' 17:10:11'
# time_begin1 = date + ' 18:10:17'
# time_begin2 = date + ' 19:10:23'
# step_begin = time_begin2
# duration = 3600
# accuracy = 'seconds'
# name_fig = 'step_response_KDI_20242003.png'
# date = '21.03.2025'
# period = '1 мин'
# time_begin_orig = date + ' 14:00:11'
# time_begin1 = date + ' 15:00:16'
# time_begin2 = date + ' 16:00:16'
# step_begin = time_begin2
# duration = 3600
# accuracy = 'seconds'
# name_fig = 'step_response_KDI_20242103.png'
# date = '24.03.2025'
# period = '1 сек'
# time_begin_orig = date + ' 19:45:11'
# time_begin1 = date + ' 20:45:13'
# time_begin2 = date + ' 21:45:17'
# step_begin = time_begin2
# duration = 3600
# accuracy = 'seconds'
# name_fig = 'step_response_KDI_20242403.png'
# interp = {'method': 'polynomial', 'order': 1}
# thermocycle_info = {'date': '01.04.2025',
# 'time_begin': ['01.04.2025 16:27:00', '01.04.2025 18:00:00'],
# 'duration_sec': 92*60, 'type': 'step'}
# cut_step_resp = {'time_step_begin': '01.04.2025 18:53:21', 'step_duration': 25*60}
# data_info = {'data': data, 'device': 'KDI', 'channel': 'ch1', 'period': '1 мин',
# 'find_accuracy': 'seconds'}
# name = f'{thermocycle_info["type"]}_response_{data_info["device"]}_{thermocycle_info["date"].replace(".","")}'
# plot_info = {'title': 'Реакция на ступенчатое воздействие',
# 'ox_dtime_format': "%H:%M:%S", 'legend_pos': ['upper left', 'lower left'],
# 'name_fig': f'{name}.png', 'font': 10}
interp = {'method': 'polynomial', 'order': 1}
data_info_list = []
thermocycle_info_list = []
cut_step_resp_list = []
data_info = {'data': data, 'device': 'летный', 'channel': 'ch1', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '25.04.2025',
'time_begin': ['24.04.2025 22:46:32', '25.04.2025 00:19:33'],
'duration_sec': 92*60, 'type': 'step', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '25.04.2025 01:18:01', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch2', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '25.04.2025',
'time_begin': ['24.04.2025 22:46:32', '25.04.2025 00:19:33'],
'duration_sec': 92*60, 'type': 'step1_to2', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '25.04.2025 01:18:01', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch1', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '25.04.2025',
'time_begin': ['25.04.2025 01:52:34', '25.04.2025 03:25:34'],
'duration_sec': 92*60, 'type': 'impulse', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '25.04.2025 04:24:00', 'step_duration': 15*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch2', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '25.04.2025',
'time_begin': ['25.04.2025 01:52:34', '25.04.2025 03:25:34'],
'duration_sec': 92*60, 'type': 'impulse1_to2', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '25.04.2025 04:24:00', 'step_duration': 20*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch2', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '30.04.2025',
'time_begin': ['29.04.2025 22:02:54', '29.04.2025 23:35:54'],
'duration_sec': 93*60, 'type': 'step', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '30.04.2025 00:36:01', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch1', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '30.04.2025',
'time_begin': ['29.04.2025 22:02:54', '29.04.2025 23:35:54'],
'duration_sec': 93*60, 'type': 'step2_to1', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '30.04.2025 00:36:01', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch2', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '30.04.2025',
'time_begin': ['30.04.2025 01:09:55', '30.04.2025 02:41:54'],
'duration_sec': 93*60, 'type': 'impulse', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '30.04.2025 03:42:00', 'step_duration': 15*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch1', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '30.04.2025',
'time_begin': ['30.04.2025 01:09:55', '30.04.2025 02:41:54'],
'duration_sec': 93*60, 'type': 'impulse2_to1', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '30.04.2025 03:42:00', 'step_duration': 20*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch4', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '02.05.2025',
'time_begin': ['01.05.2025 22:05:30', '01.05.2025 23:38:40'],
'duration_sec': 93*60, 'type': 'step', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '02.05.2025 00:39:00', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch3', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '02.05.2025',
'time_begin': ['01.05.2025 22:05:30', '01.05.2025 23:38:40'],
'duration_sec': 93*60, 'type': 'step4_to3', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '02.05.2025 00:39:00', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch4', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '02.05.2025',
'time_begin': ['02.05.2025 01:12:30', '02.05.2025 02:46:02'],
'duration_sec': 93*60, 'type': 'impulse', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '02.05.2025 03:45:02', 'step_duration': 15*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch3', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '02.05.2025',
'time_begin': ['02.05.2025 01:12:30', '02.05.2025 02:46:02'],
'duration_sec': 93*60, 'type': 'impulse4_to3', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '02.05.2025 03:45:02', 'step_duration': 20*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch3', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '04.05.2025',
'time_begin': ['03.05.2025 22:12:11', '03.05.2025 23:45:10'],
'duration_sec': 93*60, 'type': 'step', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '04.05.2025 00:42:01', 'step_duration': 26*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch4', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '04.05.2025',
'time_begin': ['03.05.2025 22:12:11', '03.05.2025 23:45:10'],
'duration_sec': 93*60, 'type': 'step3_to4', 'type_ru': 'ступенчатое'}
cut_step_resp = {'time_step_begin': '04.05.2025 00:42:01', 'step_duration': 30*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch3', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '04.05.2025',
'time_begin': ['04.05.2025 01:19:10', '04.05.2025 02:52:11'],
'duration_sec': 93*60, 'type': 'impulse', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '04.05.2025 03:48:01', 'step_duration': 15*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
data_info = {'data': data, 'device': 'летный', 'channel': 'ch4', 'period': '1 мин',
'find_accuracy': 'seconds'}
thermocycle_info = {'date': '04.05.2025',
'time_begin': ['04.05.2025 01:19:10', '04.05.2025 02:52:11'],
'duration_sec': 93*60, 'type': 'impulse3_to4', 'type_ru': 'импульсное'}
cut_step_resp = {'time_step_begin': '04.05.2025 03:48:01', 'step_duration': 20*60}
data_info_list.append(data_info)
thermocycle_info_list.append(thermocycle_info)
cut_step_resp_list.append(cut_step_resp)
def get_step_response(data_info, thermocycle_info, cut_step_resp):
name = f'{data_info["channel"]}_{thermocycle_info["type"]}_response_{data_info["device"]}_{thermocycle_info["date"].replace(".","")}'
plot_info = {'title': 'Реакция на ' + thermocycle_info['type_ru'] + ' воздействие',
'ox_dtime_format': "%H:%M:%S", 'legend_pos': ['upper left', 'lower left'],
'name_fig': f'../plots/response/{name}.png', 'font': 10}
asotr.plot_step_response_in_thermocycle(data_info, thermocycle_info, interp,
cut_step_resp, plot_info)
step_resp_cut, _, _ = asotr.get_step_response_diff(data_info['data'], thermocycle_info,
channel=data_info['channel'], interp=interp, accuracy=data_info['find_accuracy'],
cut_step_resp=cut_step_resp)
max_ = len(step_resp_cut)
step_resp_cut.to_csv(f'../data/asotr/response/asotr0{asotr_kit}_{data_info["channel"]}_{thermocycle_info["type"]}_{thermocycle_info["date"].replace(".","")}.csv', index=False, sep=';', encoding='utf-8-sig', decimal='.')
title = f'{plot_info["title"]}, канал {data_info["channel"][2]} АСОТР, {data_info["device"]} СПИН-X1-МВН, период опроса {data_info["period"]} ({thermocycle_info["date"]})'
fig = plt.figure(figsize=(10, 6), dpi=200)
fig.suptitle(title, fontsize=plot_info['font'])
ax1 = fig.add_subplot(1,1,1)
ax1.plot(step_resp_cut['timestamp'].iloc[0:max_], step_resp_cut['temp'].iloc[0:max_], '-',
label='реакция на ' + thermocycle_info['type_ru'] + ' воздействие с термоциклом')
date_formatter = dates.DateFormatter(plot_info['ox_dtime_format'])
ax1.xaxis.set_major_formatter(date_formatter)
ax1.legend(loc=plot_info["legend_pos"][0], fontsize=plot_info['font'])
ax1.grid(True)
ax1.tick_params(axis='both', width=1, labelsize=plot_info['font'])
ax1.set_ylabel(r'$T_{norm}$, $^\circ$C', fontsize=plot_info['font'])
ax1.set_xlabel('Время, ЧЧ:MM:CC', fontsize=plot_info['font'])
plt.tight_layout()
fig.savefig(plot_info["name_fig"])
plt.show()
for i, elem in enumerate(data_info_list):
get_step_response(data_info_list[i], thermocycle_info_list[i], cut_step_resp_list[i])

View File

@ -2,7 +2,7 @@ import pandas as pd
import matplotlib.pyplot as plt
import sys
from importlib import reload
sys.path.append('/home/danila/Danila/work/MVN/Soft/PID/python/')
sys.path.append('./')
import asotr
reload(asotr)
@ -12,8 +12,7 @@ import pandas as pd
from datetime import datetime
asotr_kit = 1
fname = f'../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
fname = f'../../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
data = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_parser=dateparse)
@ -71,7 +70,7 @@ data_info = {'data': data, 'channel': 'ch1', 'period': '1 мин',
'find_accuracy': 'seconds'}
plot_info = {'title': 'Реакция на ступенч. воздейств.',
'ox_dtime_format': "%H:%M:%S", 'legend_pos': ['lower right', 'lower left'],
'name_fig': 'step_response_diff_KDI_20240401.png', 'font': 10}
'name_fig': '../plots/response/step_response_diff_KDI_20240401.png', 'font': 10}
step_resp_cut, _, _ = asotr.get_step_response_diff(data_info['data'], thermocycle_info,
channel=data_info['channel'], interp=interp, accuracy=data_info['find_accuracy'],
@ -85,7 +84,7 @@ _, interp_step_resp = asotr.cut_norm_data(data_info['data'], tstamp_orig_begin,
max_ = min(len(interp_step_resp), len(step_resp_cut))
interp_step_resp.to_csv(f'./data/asotr0{asotr_kit}_{thermocycle_info["type"]}_{thermocycle_info["date"].replace(".","")}.csv', index=False, sep=';', encoding='utf-8-sig', decimal='.')
step_resp_cut.to_csv(f'../data/asotr/response/asotr0{asotr_kit}_{thermocycle_info["type"]}_{thermocycle_info["date"].replace(".","")}.csv', index=False, sep=';', encoding='utf-8-sig', decimal='.')
title = f'{plot_info["title"]}, канал {data_info["channel"][2]} АСОТР КДИ СПИН-X1-МВН, период опроса {data_info["period"]} ({thermocycle_info["date"]})'

View File

@ -1,7 +1,7 @@
import sys
import statistics
from importlib import reload
sys.path.append('/home/danila/Danila/work/MVN/Soft/PID/python/')
sys.path.append('./')
import asotr
reload(asotr)
from datetime import datetime, timedelta
@ -9,7 +9,7 @@ import matplotlib.pyplot as plt
from matplotlib import dates
from datetime import timedelta
path = '/home/danila/Danila/work/MVN/Soft/asotr_csv/data/'
path = '../data/asotr/'
channel = 'ch1'
asotr_kit = '01'
start_date = '25.04.2025 00:00:00'
@ -34,7 +34,7 @@ _, _, peaks = asotr.find_periods(time1, data1, shift_flag=False, peaks='max')
peaks_forecast = asotr.get_peak_temp_forecast(time1.iloc[peaks[0]], num_peaks_forecast)
with open('peaks_forecast.txt', 'w') as file:
with open('../data/asotr/peaks_forecast.txt', 'w') as file:
for elem in peaks_forecast:
file.write(f'{str(elem)}\n')

View File

@ -1 +0,0 @@
,danila,danila-IdeaPad,21.05.2025 11:32,file:///home/danila/.config/libreoffice/4;

Binary file not shown.

4
data/cmd_asotr/concat_data.sh Executable file
View File

@ -0,0 +1,4 @@
file_itog=../all_flight_cmd_asotr.csv
cd ./csv_data
ls ./ | head -1 | xargs head -1 > ${file_itog}
find ./ | sort | xargs cat | grep -P '^[0-9].*' >> ${file_itog}

View File

@ -0,0 +1,2 @@
find /home/danila/Danila/work/MVN/flight/data/ -type f | grep -P 'out_trans_[0-9]{2}.asotr|out_exec_asotr|in_exec_asotr_|in_trans.asotr0' | sort > flight_cmd_asotr.txt

View File

@ -1,67 +0,0 @@
import pandas as pd
import matplotlib.pyplot as plt
import sys
from importlib import reload
sys.path.append('/home/danila/Danila/work/MVN/Soft/PID/python/')
import asotr
reload(asotr)
import matplotlib.pyplot as plt
from matplotlib import dates
import pandas as pd
from datetime import datetime
asotr_kit = 1
# fname = f'../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
fname = f'../python_cyclo/data/asotr0{asotr_kit}_data_T.csv'
dateparse = lambda x: datetime.strptime(x, "%d.%m.%Y %H:%M:%S.%f")
data = pd.read_csv(fname, sep=';', parse_dates=['timestamp'], date_parser=dateparse)
# date = '20.03.2025'
# period = '1 мин'
# time_begin_orig = date + ' 17:10:11'
# time_begin1 = date + ' 18:10:17'
# time_begin2 = date + ' 19:10:23'
# step_begin = time_begin2
# duration = 3600
# accuracy = 'seconds'
# name_fig = 'step_response_KDI_20242003.png'
# date = '21.03.2025'
# period = '1 мин'
# time_begin_orig = date + ' 14:00:11'
# time_begin1 = date + ' 15:00:16'
# time_begin2 = date + ' 16:00:16'
# step_begin = time_begin2
# duration = 3600
# accuracy = 'seconds'
# name_fig = 'step_response_KDI_20242103.png'
# date = '24.03.2025'
# period = '1 сек'
# time_begin_orig = date + ' 19:45:11'
# time_begin1 = date + ' 20:45:13'
# time_begin2 = date + ' 21:45:17'
# step_begin = time_begin2
# duration = 3600
# accuracy = 'seconds'
# name_fig = 'step_response_KDI_20242403.png'
interp = {'method': 'polynomial', 'order': 2}
thermocycle_info = {'date': '01.04.2025',
'time_begin': ['01.04.2025 16:27:00', '01.04.2025 18:00:00'],
'duration_sec': 92*60, 'type': 'step'}
cut_step_resp = {'time_step_begin': '01.04.2025 18:53:21', 'step_duration': 25*60}
data_info = {'data': data, 'device': 'KDI', 'channel': 'ch1', 'period': '1 мин',
'find_accuracy': 'seconds'}
name = f'{thermocycle_info["type"]}_response_{data_info["device"]}_{thermocycle_info["date"].replace(".","")}'
plot_info = {'title': 'Реакция на ступенчатое воздействие',
'ox_dtime_format': "%H:%M:%S", 'legend_pos': ['upper left', 'lower left'],
'name_fig': f'{name}.png', 'font': 10}
asotr.plot_step_response_in_thermocycle(data_info, thermocycle_info, interp,
cut_step_resp, plot_info)