Commit a6b995f5 authored by Roman Trüb's avatar Roman Trüb
Browse files

use sleep_overhead value from trace file to add platform dependent time...

use sleep_overhead value from trace file to add platform dependent time correction factor to improve datatrace offset
parent e95ca983
...@@ -482,7 +482,7 @@ def worker_datatrace(queueitem=None, nodeid=None, resultfile_path=None, resultfi ...@@ -482,7 +482,7 @@ def worker_datatrace(queueitem=None, nodeid=None, resultfile_path=None, resultfi
# first line of the log file contains the variable names # first line of the log file contains the variable names
varnames = "" varnames = ""
with open(input_filename, "r") as f: with open(input_filename, "r") as f:
varnames = f.readline().strip().split() varnames = f.readline().strip().split()[:-1] # ignore last element (sleep_overhead value)
try: try:
# process raw datatrace log (parse & apply time correction) # process raw datatrace log (parse & apply time correction)
dfData, dfLocalTs, dfOverflow = dwt.processDatatraceOutput(input_filename) dfData, dfLocalTs, dfOverflow = dwt.processDatatraceOutput(input_filename)
......
...@@ -51,12 +51,16 @@ PRESCALER = 16 # prescaler configured in Trace Control Register ...@@ -51,12 +51,16 @@ PRESCALER = 16 # prescaler configured in Trace Control Register
# NOTE: needs to match the settings on the observer! # NOTE: needs to match the settings on the observer!
# time offset between datatrace and GPIO service (ts_datatrace + offset = ts_gpio) # time offset between datatrace and GPIO service (ts_datatrace + offset = ts_gpio)
DT_FIXED_OFFSET = -0.007448270618915558 # no offset correction # DT_FIXED_OFFSET = 0 # shift min (no fixed offset correction)
DT_FIXED_OFFSET = -5.0e-3 # shift half of loop delay
# DT_FIXED_OFFSET = -0.007448270618915558 # no offset correction
# DT_FIXED_OFFSET = 0.0008908960223197937 # shift by 2*std(residual) # DT_FIXED_OFFSET = 0.0008908960223197937 # shift by 2*std(residual)
# DT_FIXED_OFFSET = 0.0008908960223197937 # shift min(residual) # DT_FIXED_OFFSET = 0.0008908960223197937 # shift min(residual)
FILTER_THRESHOLD = 0.15 # Threshold for percentage of filtered messages to produce an error FILTER_THRESHOLD = 0.15 # Threshold for percentage of filtered messages to produce an error
RESIDUAL_UNFILTERED_THRESHOLD = 0.150 # Threshold for residuals magnitude to producing error (in seconds) RESIDUAL_UNFILTERED_THRESHOLD = 0.300 # Threshold for residuals magnitude to producing error (in seconds)
PLATFORM_CORRECTION_OFFSET = -1.3e-3 # offset for platforms (Linux version) with larger overhead
################################################################################ ################################################################################
# SwoParser Class # SwoParser Class
...@@ -313,7 +317,7 @@ def processDatatraceOutput(input_file): ...@@ -313,7 +317,7 @@ def processDatatraceOutput(input_file):
# plt.close('all') # plt.close('all')
# read raw file into list # read raw file into list
dataTsList = readRaw(input_file) dataTsList, sleepOverhead = readRaw(input_file)
# parse data/globalTs stream from list (returns packet list split into different sync packet epochs) # parse data/globalTs stream from list (returns packet list split into different sync packet epochs)
syncEpochList = parseDataTs(dataTsList) syncEpochList = parseDataTs(dataTsList)
...@@ -342,7 +346,7 @@ def processDatatraceOutput(input_file): ...@@ -342,7 +346,7 @@ def processDatatraceOutput(input_file):
if len(dfLocalTs) < 2: if len(dfLocalTs) < 2:
raise Exception('ERROR: dfLocalTs is empty or does not contain enough pkts -> unable to apply time correction!') raise Exception('ERROR: dfLocalTs is empty or does not contain enough pkts -> unable to apply time correction!')
dfDataCorr, dfLocalTsCorr = timeCorrection(dfData, dfLocalTs) dfDataCorr, dfLocalTsCorr = timeCorrection(dfData, dfLocalTs, sleepOverhead)
dfDataCorrList.append(dfDataCorr) dfDataCorrList.append(dfDataCorr)
dfLocalTsCorrList.append(dfLocalTsCorr) dfLocalTsCorrList.append(dfLocalTsCorr)
...@@ -366,8 +370,8 @@ def readRaw(input_file): ...@@ -366,8 +370,8 @@ def readRaw(input_file):
with open(input_file) as f: with open(input_file) as f:
lines = f.readlines() lines = f.readlines()
# ignore first line with varnames # skip first line with varnames but extract sleep_overhead value
lines.pop(0) sleepOverhead = float(lines.pop(0).split()[-1:][0])
for i in range(int(len(lines)/2)): for i in range(int(len(lines)/2)):
# we expect that raw file starts with data (not with global timestamp) # we expect that raw file starts with data (not with global timestamp)
...@@ -388,7 +392,7 @@ def readRaw(input_file): ...@@ -388,7 +392,7 @@ def readRaw(input_file):
# add data and timestamp as tuple # add data and timestamp as tuple
outList.append((numbers, globalTs)) outList.append((numbers, globalTs))
return outList return outList, sleepOverhead
def parseDataTs(inList): def parseDataTs(inList):
...@@ -604,7 +608,7 @@ def combinePkts(batchList): ...@@ -604,7 +608,7 @@ def combinePkts(batchList):
return dfData, dfLocalTs, dfOverflow return dfData, dfLocalTs, dfOverflow
def timeCorrection(dfData, dfLocalTs): def timeCorrection(dfData, dfLocalTs, sleepOverhead):
""" """
Calculates a regression based on the values in dfLocalTs and adds corrected global timestamps. Calculates a regression based on the values in dfLocalTs and adds corrected global timestamps.
...@@ -665,7 +669,7 @@ def timeCorrection(dfData, dfLocalTs): ...@@ -665,7 +669,7 @@ def timeCorrection(dfData, dfLocalTs):
# # DEBUG visualize # # DEBUG visualize
# import matplotlib.pyplot as plt # import matplotlib.pyplot as plt
# # plt.close('all') # plt.close('all')
# ## regression # ## regression
# fig, ax = plt.subplots() # fig, ax = plt.subplots()
# ax.scatter(x, y, marker='.', label='Data (uncorrected)', c='r') # ax.scatter(x, y, marker='.', label='Data (uncorrected)', c='r')
...@@ -718,9 +722,13 @@ def timeCorrection(dfData, dfLocalTs): ...@@ -718,9 +722,13 @@ def timeCorrection(dfData, dfLocalTs):
# ax.set_xlabel('Diff [s]') # ax.set_xlabel('Diff [s]')
# ax.set_ylabel('Count') # ax.set_ylabel('Count')
# add platform (linux version) dependent correction offset
# measured sleepOverhead is used to identify platform
platformCorrection = PLATFORM_CORRECTION_OFFSET if sleepOverhead > 0.263e-3 else 0
# add corrected timestamps to dataframe # add corrected timestamps to dataframe
dfDataCorr['global_ts'] = dfDataCorr.local_ts * slopeFinal + interceptFinal + DT_FIXED_OFFSET dfDataCorr['global_ts'] = dfDataCorr.local_ts * slopeFinal + interceptFinal + DT_FIXED_OFFSET + platformCorrection
dfLocalTsCorr['global_ts'] = dfLocalTsCorr.local_ts * slopeFinal + interceptFinal + DT_FIXED_OFFSET dfLocalTsCorr['global_ts'] = dfLocalTsCorr.local_ts * slopeFinal + interceptFinal + DT_FIXED_OFFSET + platformCorrection
return dfDataCorr, dfLocalTsCorr return dfDataCorr, dfLocalTsCorr
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment