To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit d0495833 authored by Roman Trüb's avatar Roman Trüb
Browse files

Merge branch 'master' into develop-datatrace

parents 90edc77d acc3fabd
......@@ -29,6 +29,9 @@ timeformat = %%Y-%%m-%%dT%%H:%%M:%%S
starttestscript = /home/flocklab/observer/testmanagement/flocklab_starttest.py
stoptestscript = /home/flocklab/observer/testmanagement/flocklab_stoptest.py
serialidscript = /home/flocklab/observer/testmanagement/tg_serialid.py ;Path to the serial ID readout script on the observer
datatrace_loopdelay = 10 ;Loop delay [ms] used for the datatrace service on the observer
datatrace_prescaler = 16 ;Timestamp prescaler for the datatrace service on the observer
datatrace_cpuspeed = 80000000 ;Default CPU speed for the datatrace service on the observer
; Config for fetcher
[fetcher]
......@@ -124,7 +127,6 @@ user = flocklab
basedir = /home/flocklab/testmanagementserver
scheduler = /home/flocklab/testmanagementserver/flocklab_scheduler.py
archivedir = /home/flocklab/testarchive
sshflags = ;additional flags
logdir = /home/flocklab/logs ;log directory for web
tempdir = /home/flocklab/tmp
venvwrapper = /home/flocklab/tools/wrapper.sh ;activates python virtual environment (leave blank if no venv)
......
......@@ -89,7 +89,7 @@ class StopTestThread(threading.Thread):
if (rs != 0):
if (rs == 1):
if ("No such file or directory" in err):
msg = "SD card on observer ID %s is not mounted, observer will thus be omitted for this test." % (self._obsdict_key[self._obskey][1])
msg = "SD card on observer ID %s is not mounted." % (self._obsdict_key[self._obskey][1])
else:
msg = "Observer ID %s is not reachable (returned %d: %s, %s)." % (self._obsdict_key[self._obskey][1], rs, out, err)
else:
......@@ -181,11 +181,11 @@ class StartTestThread(threading.Thread):
if (rs != 0):
if (rs == 1):
if ("No such file or directory" in err):
msg = "SD card on observer ID %s is not mounted, observer will thus be omitted for this test." % (self._obsdict_key[self._obskey][1])
msg = "SD card on observer ID %s is not mounted (observer will thus be omitted for this test)." % (self._obsdict_key[self._obskey][1])
else:
msg = "Observer ID %s is not reachable, it will thus be omitted for this test (returned: %d: %s, %s)." % (self._obsdict_key[self._obskey][1], rs, out, err)
msg = "Observer ID %s is not reachable and will thus be omitted for this test (returned: %d: %s, %s)." % (self._obsdict_key[self._obskey][1], rs, out, err)
else:
msg = "Observer ID %s is not responsive, it will thus be omitted for this test (SSH returned %d). Command: %s" % (self._obsdict_key[self._obskey][1], rs, " ".join(cmd))
msg = "Observer ID %s is not responsive and will thus be omitted for this test (SSH returned %d)." % (self._obsdict_key[self._obskey][1], rs)
errors.append((msg, errno.EHOSTUNREACH, self._obsdict_key[self._obskey][1]))
logger.error(msg)
else:
......@@ -260,6 +260,28 @@ class StartTestThread(threading.Thread):
### END StartTestThread
##############################################################################
#
# write_to_error_log write into the error log file that is passed to the user together with the test results
#
##############################################################################
def write_to_error_log(testid, obsid, message):
if not isinstance(testid, int):
return
testresultsdir = "%s/%d" %(flocklab.config.get('fetcher', 'testresults_dir'), testid)
if not os.path.exists(testresultsdir):
os.makedirs(testresultsdir)
logger.debug("Created %s" % testresultsdir)
errorlogfilename = "%s/errorlog.csv" % (testresultsdir)
writeheader = False
if not os.path.isfile(errorlogfilename):
writeheader = True
with open("%s/errorlog.csv" % (testresultsdir), "a") as errorlog:
if writeheader:
errorlog.write('timestamp,observer_id,message\n')
errorlog.write("%.3f,%s,%s\n" % (time.time(), str(obsid), message))
errorlog.close()
##############################################################################
#
......@@ -269,8 +291,9 @@ class StartTestThread(threading.Thread):
def start_test(testid, cur, cn, obsdict_key, obsdict_id):
errors = []
warnings = []
abortOnError = False
try:
try:
logger.debug("Entering start_test() function...")
# First, validate the XML file again. If validation fails, return immediately:
cmd = [flocklab.config.get('dispatcher','validationscript'), '--testid=%d' % testid]
......@@ -424,6 +447,10 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
xmldict_key[obs_key] = (xmlpath, xmlfhand)
xmlfhand.write('<?xml version="1.0" encoding="UTF-8"?>\n\n<obsConf>\n\n')
# Go through the blocks of the XML file and write the configs to the affected observer XML configs:
# generalConf ---
ret = tree.xpath('//d:generalConf/d:abortOnError', namespaces=ns)
if ret and ret[0].text.lower() == 'yes':
abortOnError = True
# targetConf ---
targetconfs = tree.xpath('//d:targetConf', namespaces=ns)
if not targetconfs:
......@@ -455,7 +482,9 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
xmldict_key[obskey][1].write("\t<image core=\"%d\">%s/%d/%s</image>\n" % (coreimage[3], flocklab.config.get("observer", "testconfigfolder"),testid, os.path.basename(coreimage[0])))
xmldict_key[obskey][1].write("\t<slotnr>%s</slotnr>\n" % (imagedict_key[obskey][0][1]))
xmldict_key[obskey][1].write("\t<platform>%s</platform>\n" % (imagedict_key[obskey][0][2]))
xmldict_key[obskey][1].write("\t<os>%s</os>\n" % (imagedict_key[obskey][0][2]))
if abortOnError:
# what to do if one of the services fails to start
xmlblock += "\t<abortOnError>yes</abortOnError>\n"
slot = imagedict_key[obskey][0][1]
xmldict_key[obskey][1].write("</obsTargetConf>\n\n")
#logger.debug("Wrote obsTargetConf XML for observer ID %s" %obsid)
......@@ -484,7 +513,9 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
cpuspeed = srconf.xpath('d:cpuSpeed', namespaces=ns)
if cpuspeed:
cpuspeed = cpuspeed[0].text.strip()
xmlblock += "\t<cpuSpeed>%s</cpuSpeed>\n" % cpuspeed
else:
cpuSpeed = flocklab.config.get("observer", "datatrace_cpuspeed") # use default CPU speed
xmlblock += "\t<cpuSpeed>%s</cpuSpeed>\n" % cpuspeed
xmlblock += "</obsSerialConf>\n\n"
for obsid in obsids:
obsid = int(obsid)
......@@ -506,11 +537,15 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
cpuSpeed = dbgconf.xpath('d:cpuSpeed', namespaces=ns)
if cpuSpeed:
cpuSpeed = cpuSpeed[0].text.strip()
xmlblock += "\t<cpuSpeed>%s</cpuSpeed>\n" % (cpuSpeed)
else:
cpuSpeed = flocklab.config.get("observer", "datatrace_cpuspeed") # use default CPU speed
xmlblock += "\t<cpuSpeed>%s</cpuSpeed>\n" % (cpuSpeed)
gdbPort = dbgconf.xpath('d:gdbPort', namespaces=ns)
if gdbPort:
gdbPort = gdbPort[0].text.strip()
xmlblock += "\t<gdbPort>%s</gdbPort>\n" % (gdbPort)
xmlblock += "\t<prescaler>%s</prescaler>\n" % flocklab.config.get("observer", "datatrace_prescaler")
xmlblock += "\t<loopDelay>%s</loopDelay>\n" % flocklab.config.get("observer", "datatrace_loopdelay")
dwtconfs = dbgconf.xpath('d:dataTraceConf', namespaces=ns)
for dwtconf in dwtconfs:
var = dwtconf.xpath('d:variable', namespaces=ns)[0].text.strip()
......@@ -700,9 +735,10 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
#logger.error("Error from test start thread for observer %s: %s" %(str(err[2]), str(err[0])))
obs_error.append(err[2])
warnings.append(err[0])
write_to_error_log(testid, err[2], err[0])
if len(obs_error) > 0:
# Abort or continue?
if not flocklab.config.get("dispatcher", "continue_on_error"):
if abortOnError or not flocklab.config.get("dispatcher", "continue_on_error"):
msg = "At least one observer failed to start the test, going to abort..."
errors.append(msg)
logger.error(msg)
......@@ -724,7 +760,11 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
rs = p.wait()
if (rs != 0):
msg = "Serial proxy for test ID %d could not be started (error code %d)." % (testid, rs)
errors.append(msg)
if abortOnError:
errors.append(msg)
else:
warnings.append(msg)
write_to_error_log(testid, 0, msg)
logger.error(msg)
logger.debug("Executed command was: %s" % (str(cmd)))
else:
......@@ -1284,7 +1324,7 @@ def main(argv):
cn.commit()
if len(errors) != 0:
# Test start failed. Make it abort:
logger.warning("Going to abort test because of errors when trying to start it.")
logger.error("Going to abort test because of errors when trying to start it.")
abort = True
# Inform user:
ret = inform_user(testid, cur, action, errors, warnings)
......
......@@ -194,11 +194,11 @@ def read_from_db_file(dbfile):
# write_to_error_log: Write an error message to the error log file that will be passed to the user
#
##############################################################################
def write_to_error_log(timestamp, obsid, nodeid, message):
def write_to_error_log(timestamp, obsid, message):
try:
testresultsfile_dict['errorlog'][1].acquire()
errorlog = open(testresultsfile_dict['errorlog'][0], "a")
errorlog.write("%s,%d,%d,%s\n" % (str(timestamp), obsid, nodeid, message))
errorlog.write("%s,%d,%s\n" % (str(timestamp), obsid, message))
errorlog.close()
testresultsfile_dict['errorlog'][1].release()
except Exception:
......@@ -411,7 +411,7 @@ def worker_logs(queueitem=None, nodeid=None, resultfile_path=None, resultfile_lo
infile = open(inputfilename, "r")
for line in infile:
(timestamp, msg) = line.strip().split(',', 1)
result.append("%s,%d,%s,%s\n" % (timestamp, obsid, nodeid, msg))
result.append("%s,%d,%s\n" % (timestamp, obsid, msg))
infile.close()
append_lines_to_file(resultfile_path, resultfile_lock, result)
os.remove(inputfilename)
......@@ -481,9 +481,9 @@ def worker_datatrace(queueitem=None, nodeid=None, resultfile_path=None, resultfi
varnames = f.readline().strip().split()[:-1] # ignore last element (sleep_overhead value)
try:
# process raw datatrace log (parse & apply time correction)
dfData, dfLocalTs, dfOverflow = dwt.processDatatraceOutput(input_filename)
dfData, dfLocalTs, dfOverflow, dfError = dwt.processDatatraceOutput(input_filename)
except Exception as e:
write_to_error_log('{}'.format(time.time()), obsid, nodeid, 'Datatrace: Error occurred when processing datatrace raw output! Potential problems: SWO/CPU speed mismatch (see cpuSpeed tag in xml config) or target did not start properly. Error: {}'.format(e))
write_to_error_log('{}'.format(time.time()), obsid, 'A datatrace error occurred when processing raw output ({}). Potential cause: SWO/CPU speed mismatch (see cpuSpeed tag in xml config) or target did not start properly.'.format(e))
else:
if len(dfData):
# add observer and node ID
......@@ -501,13 +501,17 @@ def worker_datatrace(queueitem=None, nodeid=None, resultfile_path=None, resultfi
header=False
)
resultfile_lock.release()
# append parsing errors to errorlog
for idx, row in dfError.iterrows():
write_to_error_log(row['global_ts_uncorrected'], obsid, 'Datatrace: {}'.format(row['message']))
# append overflow events to errorlog
for idx, row in dfOverflow.iterrows():
write_to_error_log(row['global_ts_uncorrected'], obsid, nodeid, 'Datatrace: event rate too high (overflow occurred)!')
write_to_error_log(row['global_ts_uncorrected'], obsid, 'Datatrace: event rate too high (buffer overflow occurred)!')
# append info about delayed timestamps to errorlog
for idx, row in dfLocalTs.iterrows():
if row['tc'] != 0:
write_to_error_log(row['global_ts'], obsid, nodeid, 'Datatrace: timestamp has been delayed (tc={})!'.format(row['tc']))
write_to_error_log(row['global_ts'], obsid, 'Datatrace: timestamp has been delayed (tc={})!'.format(row['tc']))
except:
msg = "Error in datatrace worker process: %s: %s\n%s" % (str(sys.exc_info()[0]), str(sys.exc_info()[1]), traceback.format_exc())
......@@ -1114,7 +1118,7 @@ def main(argv):
testresultsfile_dict[service] = (path, lock)
# Create file and write header:
if service in ('errorlog', 'timesynclog'):
header = 'timestamp,observer_id,node_id,message\n'
header = 'timestamp,observer_id,message\n'
elif service == 'gpiotracing':
header = 'timestamp,observer_id,node_id,pin_name,value\n'
elif service == 'powerprofiling':
......@@ -1126,9 +1130,11 @@ def main(argv):
elif service == 'datatrace':
header = 'timestamp,observer_id,node_id,variable,value,access,pc,delay_marker\n'
lock.acquire()
f = open(path, 'w')
f.write(header)
f.close()
# only create file and write header if it does not yet exist (e.g. the errorlog file may already exist)
if not os.path.isfile(path):
f = open(path, 'w')
f.write(header)
f.close()
lock.release()
# Start logging thread:
logqueue = manager.Queue(maxsize=10000)
......
......@@ -245,18 +245,19 @@ class SwoParser():
"""
retPkt = None
newSyncEpoch = False
parseError = None
# ignore payload bytes of unrecognized packet
if self._ignoreBytes:
self._ignoreBytes -= 1
return retPkt, newSyncEpoch
return retPkt, newSyncEpoch, parseError
# process sync packet (to sync to packet in byte stream)
if self._processingSyncPkt:
# read all zeros until get an 0x80, then we are in sync (Synchronization packet)
# NOTE: dpp2lora bytestream does not contain required single-bit in Synchronization packet
if swoByte == 0:
return retPkt, newSyncEpoch
return retPkt, newSyncEpoch, parseError
# elif swoByte == 0x08:
# return retPkt, newSyncEpoch
else:
......@@ -285,15 +286,23 @@ class SwoParser():
if discriminator_id in [0, 1, 2]:
# 0 Event counter wrapping, 1 Exception tracing, 2 PC sampling
self._ignoreBytes = plSize
print('WARNING: Hardware source packet with discriminator_id={} ignored!'.format(discriminator_id))
msg = 'WARNING: Hardware source packet with discriminator_id={} ignored!'.format(discriminator_id)
print(msg)
parseError = {'global_ts_uncorrected': globalTs, 'message': msg}
elif discriminator_id >= 8 and discriminator_id <= 23:
# Data tracing
self._currentPkt.append(type(self).DatatracePkt(header=swoByte, globalTs=globalTs))
else:
# Other undefined header
raise Exception("ERROR: Unrecognized discriminator ID ({}) in hardware source packet header: {}".format(discriminator_id, swoByte))
# Other undefined discriminator_id
# NOTE: Do not ignore payload bytes based on size (in most cases where discriminator_id is undefined, the header is not actually a valid header)
msg = "ERROR: Unrecognized discriminator ID ({} {:#010b}) in hardware source packet header: {}".format(discriminator_id, swoByte, swoByte)
print(msg)
parseError = {'global_ts_uncorrected': globalTs, 'message': msg}
else:
print("ERROR: Unrecognized DWT packet header: {} {:#010b}".format(swoByte, swoByte))
# NOTE: Do not ignore payload bytes based on size (in most cases where header is not recognized, the header is not actually a valid header)
msg = "ERROR: Unrecognized DWT packet header: {} {:#010b}".format(swoByte, swoByte)
print(msg)
parseError = {'global_ts_uncorrected': globalTs, 'message': msg}
else:
# PAYLOAD: we currently have a begun packet -> add data
self._currentPkt[0].addByte(swoByte)
......@@ -302,7 +311,7 @@ class SwoParser():
if len(self._currentPkt) != 0 and self._currentPkt[0].isComplete():
retPkt = self._currentPkt.pop()
return retPkt, newSyncEpoch
return retPkt, newSyncEpoch, parseError
################################################################################
# METHODS
......@@ -315,7 +324,10 @@ def processDatatraceOutput(input_file):
Parameters:
input_file (str): path to the raw data trace file
Returns:
df: dataframe containing the processed data
dfData: dataframe containing the data trace data
dfLocalTs: dataframe containing the local timestamps
dfOverflow: dataframe containing the overflow packets
dfError: dataframe containing messages about errors which occurred during processing (note this only contains errors that are not raised!)
"""
# # DEBUG
......@@ -326,19 +338,20 @@ def processDatatraceOutput(input_file):
dataTsList, sleepOverhead = readRaw(input_file)
# parse data/globalTs stream from list (returns packet list split into different sync packet epochs)
syncEpochList = parseDataTs(dataTsList)
syncEpochList, parseErrorList = parseDataTs(dataTsList)
# # DEBUG
# for i, pktList in enumerate(syncEpochList):
# with open('pktList_{}.txt'.format(i), 'w') as f:
# for i, pkt in enumerate(pktList):
# f.write('{}\n{}\n'.format(i, pkt))
# process packet list of each sync epoch separately
dfDataCorrList = []
dfLocalTsCorrList = []
dfOverflowList = []
for i, pktList in enumerate(syncEpochList):
# # DEBUG
print('INFO: Sync Epoch {}'.format(i))
# with open('pktList_{}.txt'.format(i), 'w') as f:
# for i, pkt in enumerate(pktList):
# f.write('{}\n{}\n'.format(i, pkt))
# split localTs epochs
batchList = splitEpochs(pktList)
......@@ -362,8 +375,9 @@ def processDatatraceOutput(input_file):
dfData = pd.concat(dfDataCorrList)
dfLocalTs = pd.concat(dfLocalTsCorrList)
dfOverflow = pd.concat(dfOverflowList)
dfError = pd.DataFrame(parseErrorList)
return dfData, dfLocalTs, dfOverflow
return dfData, dfLocalTs, dfOverflow, dfError
def readRaw(input_file):
......@@ -410,6 +424,7 @@ def parseDataTs(inList):
"""
syncEpochList = []
completedPkts = []
parseErrorList = []
firstSyncEpoch = True # we assume that SWO byte stream starts with sync pkt => first sync epoch is expected to be empty
swoParser = SwoParser()
......@@ -417,7 +432,7 @@ def parseDataTs(inList):
data, globalTs = inList.pop(0)
for swoByte in data:
ret, newSyncEpoch = swoParser.addSwoByte(swoByte, globalTs)
ret, newSyncEpoch, parseError = swoParser.addSwoByte(swoByte, globalTs)
if newSyncEpoch:
# print('INFO: new sync epoch started!')
if firstSyncEpoch:
......@@ -428,6 +443,9 @@ def parseDataTs(inList):
completedPkts = [] # NOTE: do not use completedPkts.clear() since this would also clear the list just appended to syncEpochList (as both variables point to the same list object)
if ret:
completedPkts.append(ret)
if parseError:
parseErrorList.append(parseError)
# append last sync epoch to syncEpochList
syncEpochList.append(completedPkts)
......@@ -439,7 +457,7 @@ def parseDataTs(inList):
# print(i)
# print(pkt)
return syncEpochList
return syncEpochList, parseErrorList
def splitEpochs(pktList):
......@@ -467,17 +485,18 @@ def splitEpochs(pktList):
followingRefpktIdx = None
for i in range(startIdx, len(pktList)):
if type(pktList[i]) in (SwoParser.LocalTimestampPkt, SwoParser.OverflowPkt):
if not currentRefpktIdx:
if currentRefpktIdx is None:
currentRefpktIdx = i
else:
followingRefpktIdx = i
break
# we expect that there is at least 1 ref packet
assert currentRefpktIdx
if currentRefpktIdx is None:
raise Exception('ERROR: No reference packet found for batch!')
# ref pkt should not be local timestamp overflow packet
if type(pktList[currentRefpktIdx]) == SwoParser.LocalTimestampPkt:
assert pktList[currentRefpktIdx].ts != FULL_TIMESTAMP
if (type(pktList[currentRefpktIdx]) == SwoParser.LocalTimestampPkt) and (pktList[currentRefpktIdx].ts == FULL_TIMESTAMP):
raise Exception('ERROR: Reference packet should be local timestamp overflow packet!')
# based on following reference packet, determine stopIdx
if not followingRefpktIdx:
......@@ -497,7 +516,9 @@ def splitEpochs(pktList):
data2Idx = followingRefpktIdx
while type(pktList[data2Idx]) != SwoParser.DatatracePkt:
data2Idx -= 1
assert data2Idx >= currentRefpktIdx # at least packets up to the found reference packet should be in the in the current epoch
# at least packets up to the found reference packet should be in the in the current epoch
if data2Idx < currentRefpktIdx:
raise Exception('ERROR: There should be at least 1 one reference packet in the current epoch!')
# find data packet preceding the data2 data pkt
data1Idx = data2Idx - 1
while True:
......
......@@ -73,7 +73,7 @@ RES=$(rsync ${RSYNCPARAMS} -i --dry-run -e 'ssh -q' webserver/ ${USER}@${HOST}:w
if [ -z "$RES" ]; then
echo "Webserver files are up to date."
else
printf "Updating webserver files..."
printf "Updating webserver files... "
rsync ${RSYNCPARAMS} -e 'ssh -q' webserver/ ${USER}@${HOST}:webserver
if [ $? -ne 0 ]; then
printf "failed to copy repository files!\n"
......
......@@ -256,7 +256,7 @@ echo '<h1>Manage Tests for '.$_SESSION['firstname'] . ' ' . $_SESSION['lastname'
<th width="35px">ID</th>
<th width="100px">Title</th>
<th width="150px">Description</th>
<th width="30px">IMG</th>
<th width="35px">IMG</th>
<th width="35px" class='qtip_show' title='State'>State</th>
<th>Start</th>
<th>End</th>
......
......@@ -39,7 +39,7 @@
// check file
$archivepath = $CONFIG['testmanagementserver']['archivedir'];
$archive =
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls -l ".$archivepath.'/'.$testid.".tar.gz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls -l ".$archivepath.'/'.$testid.".tar.gz\"";
exec($cmd , $output, $ret);
if ($ret > 0) {
echo json_encode(array('status'=>'error', 'output'=>'data not available'));
......@@ -58,7 +58,7 @@
echo json_encode(array('status'=>'success', 'testid'=>$testid));
exit();
}
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"cat ".$archivepath.'/'.$testid.".tar.gz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"cat ".$archivepath.'/'.$testid.".tar.gz\"";
$stream = popen($cmd, "r");
// Send the file to the user's browser:
header("Content-Type: application/x-gzip");
......
......@@ -481,12 +481,12 @@ class HTTP_WebDAV_Server_Filesystem extends HTTP_WebDAV_Server
if ($power) {
// pipe file from archive
$archivepath = $CONFIG['testmanagementserver']['archivedir'];
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls ".$archivepath.'/'.$testid.".tar.gz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls ".$archivepath.'/'.$testid.".tar.gz\"";
exec($cmd , $output, $ret);
if ($ret > 0)
return false;
// dump whole archive
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"cat ".$archivepath.'/'.$testid.".tar.gz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"cat ".$archivepath.'/'.$testid.".tar.gz\"";
$stream = popen($cmd, "r");
return $stream;
}
......@@ -494,13 +494,13 @@ class HTTP_WebDAV_Server_Filesystem extends HTTP_WebDAV_Server
flog("nopower");
$archivepath = $CONFIG['testmanagementserver']['archivedir'];
$split_path = $CONFIG['testmanagementserver']['toolsdir'];
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls ".$archivepath.'/'.$testid.".tar.gz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls ".$archivepath.'/'.$testid.".tar.gz\"";
exec($cmd , $output, $ret);
if ($ret > 0)
return false;
// dump stripped archive
flog("nopower dump");
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"/bin/zcat ".$archivepath.'/'.$testid.".tar.gz | ".$split_path."/flocklab_archive_split | /usr/bin/pigz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"/bin/zcat ".$archivepath.'/'.$testid.".tar.gz | ".$split_path."/flocklab_archive_split | /usr/bin/pigz\"";
flog("nopower dump ". $cmd);
$stream = popen($cmd, "r");
return $stream;
......@@ -513,7 +513,7 @@ class HTTP_WebDAV_Server_Filesystem extends HTTP_WebDAV_Server
return null;
// file exists?
$archivepath = $CONFIG['testmanagementserver']['archivedir'];
$cmd = "ssh ".$CONFIG['testmanagementserver']['sshflags']." ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls -l ".$archivepath.'/'.$testid.".tar.gz\"";
$cmd = "ssh ".$CONFIG['testmanagementserver']['user']."@".$CONFIG['testmanagementserver']['host']." \"ls -l ".$archivepath.'/'.$testid.".tar.gz\"";
exec($cmd , $output, $ret);
if ($ret > 0)
return 0;
......
......@@ -116,6 +116,13 @@
</xs:restriction>
</xs:simpleType>
</xs:element>
<xs:element name="abortOnError" minOccurs="0" default="no">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:pattern value="yes|no"/>
</xs:restriction>
</xs:simpleType>
</xs:element>
</xs:sequence>
</xs:complexType>
......
......@@ -32,7 +32,6 @@
<serialConf>
<obsIds>2 4 6 7 9</obsIds>
<baudrate>115200</baudrate>
<port>serial</port>
</serialConf>
<!-- GPIO Tracing Service configuration -->
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment