Commit 5bf31d51 authored by Jean-Marie Favreau's avatar Jean-Marie Favreau
Browse files

Add a script to find a good internal shift (and split if needed the data block)

parent 11c0658c
#!/usr/bin/env python3
# coding: utf-8
import argparse
import math
from construct import *
import sys
from datetime import datetime
parser = argparse.ArgumentParser(description="Convert RM42 (UKK) binary log files to CSV format.")
parser.add_argument('input', metavar="INPUT", type=str, help='Input file (.dat format)')
args = parser.parse_args()
def binToHexUKK(val):
big = int(val / 16)
little = val % 16
return big * 10 + little
input_file = args.input.strip()
print("Input:", input_file)
encoding = Int16sl
d_1sec = Struct(
"aaaa" / Int16ub,
"day" / Int8ub,
"month" / Int8ub,
"year" / Int8ub,
"hour" / Int8ub,
"minutes" / Int8ub,
"seconds" / Int8ub,
"entries" / Array(84, Array(3, encoding)))
step_ms = int(1000. / 84)
step_us = step_ms * 1000
class Block:
invalid_year = 4000
def __init__(self, year, month, day, hour, minutes, seconds):
self.year = year
self.month = month
self.day = day
self.hour = hour
self.minutes = minutes
self.seconds = seconds
self.valid = self.year != Block.invalid_year
self.datetime = datetime(self.year, self.month, self.day, self.hour, self.minutes, self.seconds, 0)
self.records = []
def fake_block():
return Block(Block.invalid_year, 1, 1, 0, 0, 0)
def is_valid(self):
return self.valid
def add_record(self, record):
self.records.append(record)
def get_timestamp(self):
return int(self.datetime.timestamp())
class Batch:
def __init__(self, block):
self.blocks = [block]
self.local_timestamp = 0
self.max_shift = 1.0
self.required_shift = 0.0
def concatenate_if_possible(self, block):
tSecBlock = block.get_timestamp()
tSecLastBlock = self.blocks[len(self.blocks) - 1].get_timestamp()
if not( tSecBlock == tSecLastBlock or tSecBlock == tSecLastBlock + 1):
# is not concatenable (existing gap)
return False
next_local_timestamp = self.local_timestamp + 84
deltaS = self.get_delta_from_first(block)
if math.floor(next_local_timestamp/100) < deltaS:
required_shift = deltaS - next_local_timestamp/100
if required_shift < self.max_shift:
# is concatenable with shift
# update minimum required shift
if required_shift > self.required_shift:
self.required_shift = required_shift
# add element
self.local_timestamp = next_local_timestamp
self.blocks.append(block)
return True
else:
# is not concatenable due to required shift incompatible with precomputed maximum shift
return False
else:
# is concatenable without shift
# compute max shift
m = next_local_timestamp/100 + 1 - deltaS
if self.max_shift > m:
self.max_shift = m
# add element
self.local_timestamp = next_local_timestamp
self.blocks.append(block)
return True
def get_first_datetime(self):
return self.blocks[0].datetime.timestamp()
def get_delta_from_first(self, block):
return block.datetime.timestamp() - self.get_first_datetime()
def get_first_datetime_with_shift(self):
return self.get_first_datetime() + self.required_shift
batches = []
with open(input_file, 'rb') as f_input:
while f_input:
try:
result = d_1sec.parse_stream(f_input)
if result.aaaa == 43690:
year = 2000 + binToHexUKK(result.year)
month = binToHexUKK(result.month)
day = binToHexUKK(result.day)
hour = binToHexUKK(result.hour)
minutes = binToHexUKK(result.minutes)
seconds = binToHexUKK(result.seconds)
b = Block(year, month, day, hour, minutes, seconds)
else:
if len(batches) == 0:
continue
else:
last = batches[len(batches) - 1]
# fake timestamp
b = Block.fake_block()
for entry in result.entries:
b.add_record(entry)
if len(batches) == 0 or not batches[len(batches) - 1].concatenate_if_possible(b):
batches.append(Batch(b))
except:
print("Skip end of file")
break
for b in batches:
i = 0
for bb in b.blocks:
if bb.is_valid():
for e in bb.records:
entry_csv = ";".join([str(v) for v in e])
print('{:f};{:s}'.format(b.get_first_datetime_with_shift() + i / 100, entry_csv))
i += 1
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment