Commit bf4d41e3 authored by Jean-Marie Favreau's avatar Jean-Marie Favreau
Browse files

Add a small step between each block

parent 5fcf0767
......@@ -14,6 +14,7 @@ parser.add_argument('input', metavar="INPUT", type=str, help='Input file (.dat f
args = parser.parse_args()
debug = False
def binToHexUKK(val):
big = int(val / 16)
......@@ -23,7 +24,7 @@ def binToHexUKK(val):
input_file = args.input.strip()
print("Input:", input_file)
block_size = 84
encoding = Int16sl
d_1sec = Struct(
......@@ -34,11 +35,8 @@ d_1sec = Struct(
"hour" / Int8ub,
"minutes" / Int8ub,
"seconds" / Int8ub,
"entries" / Array(84, Array(3, encoding)))
"entries" / Array(block_size, Array(3, encoding)))
step_ms = int(1000. / 84)
step_us = step_ms * 1000
class Block:
......@@ -67,11 +65,12 @@ class Block:
return int(self.datetime.timestamp())
class Batch:
def __init__(self, block):
def __init__(self, block, spacing):
self.blocks = [block]
self.local_timestamp = 0
self.max_shift = 1.0
self.required_shift = 0.0
self.spacing = spacing
def concatenate_if_possible(self, block):
tSecBlock = block.get_timestamp()
......@@ -80,11 +79,11 @@ class Batch:
# is not concatenable (existing gap)
return False
next_local_timestamp = self.local_timestamp + 84
next_local_timestamp = self.local_timestamp + block_size + self.spacing
deltaS = self.get_delta_from_first(block)
if math.floor(next_local_timestamp/100) < deltaS:
required_shift = deltaS - next_local_timestamp/100
if math.floor(next_local_timestamp / 100) < deltaS:
required_shift = deltaS - next_local_timestamp / 100
if required_shift < self.max_shift:
# is concatenable with shift
# update minimum required shift
......@@ -117,27 +116,45 @@ class Batch:
def get_first_datetime_with_shift(self):
return self.get_first_datetime() + self.required_shift
def get_spacing(self):
return self.spacing
class TimeSerie:
def __init__(self):
self.batches = []
self.default_spacing = 4
def add_block(self, block):
if len(self.batches) == 0 or not self.batches[len(batches) - 1].concatenate_if_possible(block):
self.batches.append(Batch(block))
if len(self.batches) == 0 or not self.batches[len(self.batches) - 1].concatenate_if_possible(block):
self.batches.append(Batch(block, self.default_spacing))
def to_csv(self):
result = ""
def to_array(self):
result = []
for b in self.batches:
i = 0
for bb in b.blocks:
if bb.is_valid():
for e in bb.records:
entry_csv = ";".join([str(v) for v in e])
result += '{:f};{:s}\n'.format(b.get_first_datetime_with_shift() + i / 100, entry_csv)
result.append([b.get_first_datetime_with_shift() + i / 100] + e)
if debug and len(result) > 1:
if result[len(result) - 1][0] <= result[len(result) - 2][0]:
print("~ Error in lines", result[len(result) - 1], "<", result[len(result) - 2])
elif abs(result[len(result) - 1][0] - result[len(result) - 2][0] - 0.01) > 0.0001:
print("~ Small gap", (result[len(result) - 1][0] - result[len(result) - 2][0] - 0.01), result[len(result) - 1], "<->", result[len(result) - 2])
i += 1
i += b.get_spacing()
return result
def to_csv(self):
array = self.to_array()
result = "\n".join(['{:f};{:s}'.format(l[0], ";".join([str(v) for v in l[1:]])) for l in array])
return result
def is_empty(self):
return len(self.batches) == 0
ts = TimeSerie()
......@@ -155,10 +172,9 @@ with open(input_file, 'rb') as f_input:
seconds = binToHexUKK(result.seconds)
b = Block(year, month, day, hour, minutes, seconds)
else:
if len(batches) == 0:
if ts.is_empty():
continue
else:
last = batches[len(batches) - 1]
# fake timestamp
b = Block.fake_block()
......@@ -167,9 +183,12 @@ with open(input_file, 'rb') as f_input:
ts.add_block(b)
except StreamError as e:
if debug:
print("Skip end of file")
break
except:
print("Skip end of file")
print("Unknown error", sys.exc_info()[0])
break
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment