To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit 594037d9 authored by Reto Da Forno's avatar Reto Da Forno
Browse files

linktest added

parent acc3fabd
......@@ -72,7 +72,7 @@ validationscript = /home/flocklab/testmanagementserver/testconfig_validator.py
dispatcherscript = /home/flocklab/testmanagementserver/flocklab_dispatcher.py ;Path to dispatcher script on testmanagement server
fetcherscript = /home/flocklab/testmanagementserver/flocklab_fetcher.py ;Path to fetcher script on testmanagement server
archiverscript = /home/flocklab/testmanagementserver/flocklab_archiver.py ;Path to archiver script on testmanagement server
testtolinkmapscript = /home/flocklab/testmanagementserver/test_to_linkmap.py ;Path to linkmap evaluation script on testmanagement server
linktestevalscript = /home/flocklab/testmanagementserver/linktests/eval_linktest.py ;Path to linkmap evaluation script on testmanagement server
serialproxyscript = /home/flocklab/testmanagementserver/flocklab_serialproxy.py ;Path to serial proxy script on testmanagement server
default_tg_voltage = 3.3 ;Default voltage for targets if not specified in XML
archiver_waittime = 10 ;Wait time between calls to the archiver if the maximum number of archiver instances is reached
......
......@@ -356,22 +356,20 @@ CREATE TABLE `tbl_serv_users` (
-- Table structure for table `tbl_serv_web_link_measurements`
--
DROP TABLE IF EXISTS `tbl_serv_web_link_measurements`;
DROP TABLE IF EXISTS `tbl_serv_link_measurements`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `tbl_serv_web_link_measurements` (
CREATE TABLE `tbl_serv_link_measurements` (
`serv_link_measurements_key` int(10) unsigned NOT NULL AUTO_INCREMENT,
`test_fk` int(10) unsigned DEFAULT NULL,
`platform_fk` int(10) unsigned NOT NULL,
`links` longblob,
`begin` datetime NOT NULL,
`end` datetime NOT NULL,
`radio` enum('','RF212','RF230') COLLATE utf8_bin DEFAULT '',
`radio_cfg` enum('', 'fsk_868','lora_868') COLLATE utf8_bin,
`links` longtext COLLATE utf8_bin,
PRIMARY KEY (`serv_link_measurements_key`),
KEY `date_begin` (`begin`),
KEY `fk_tbl_serv_web_link_measurements_platforms` (`platform_fk`),
KEY `radio` (`radio`),
CONSTRAINT `fk_tbl_serv_web_link_measurements_platforms` FOREIGN KEY (`platform_fk`) REFERENCES `tbl_serv_platforms` (`serv_platforms_key`) ON DELETE NO ACTION ON UPDATE NO ACTION
KEY `fk_tbl_serv_link_measurements_platforms` (`platform_fk`),
CONSTRAINT `fk_tbl_serv_link_measurements_platforms` FOREIGN KEY (`platform_fk`) REFERENCES `tbl_serv_platforms` (`serv_platforms_key`) ON DELETE NO ACTION ON UPDATE NO ACTION
) ENGINE=InnoDB AUTO_INCREMENT=13880 DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
/*!40101 SET character_set_client = @saved_cs_client */;
......
......@@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.
"""
import sys, os, getopt, errno, threading, shutil, time, datetime, subprocess, tempfile, queue, re, logging, traceback, __main__, types, hashlib, lxml.etree, MySQLdb, signal
import sys, os, getopt, errno, threading, shutil, time, datetime, subprocess, tempfile, queue, re, logging, traceback, __main__, types, hashlib, lxml.etree, MySQLdb, signal, tarfile
import lib.flocklab as flocklab
import flocklab as fltools
......@@ -1040,27 +1040,94 @@ def prepare_testresults(testid, cur):
##############################################################################
#
# evalute_linkmeasurement
# evaluate_linkmeasurement
#
##############################################################################
def evalute_linkmeasurement(testid, cur):
def evaluate_linkmeasurement(testid, cur):
global logger
if not logger:
logger = flocklab.get_logger()
errors = []
# if link measurement, evaluate data
cur.execute("SELECT `username` FROM `tbl_serv_tests` LEFT JOIN `tbl_serv_users` ON (`serv_users_key`=`owner_fk`) WHERE (`serv_tests_key` = %s)" %testid)
cur.execute("SELECT `username`, `time_start_act` FROM `tbl_serv_tests` LEFT JOIN `tbl_serv_users` ON (`serv_users_key`=`owner_fk`) WHERE (`serv_tests_key` = %s)" % testid)
ret = cur.fetchone()
if ret and ret[0]==flocklab.config.get('linktests', 'user'):
if ret and ret[0] == flocklab.config.get('linktests', 'user'):
teststarttime = ret[1]
# Get test results from archive ---
archive_path = "%s/%s%s" % (flocklab.config.get('archiver', 'archive_dir'), testid, flocklab.config.get('archiver','archive_ext'))
if not os.path.exists(archive_path):
msg = "Archive path %s does not exist, removing link measurement." % archive_path
cur.execute("DELETE FROM `tbl_serv_link_measurements` WHERE `test_fk` = %s" % testid)
logger.error(msg)
errors.append(msg)
return errors
# Extract serial service results file ---
logger.debug("Extracting serial service file from archive...")
tempdir = tempfile.mkdtemp()
archive = tarfile.open(archive_path, 'r:gz')
for f in archive.getmembers():
if re.search("serial[_]?", f.name) is not None:
archive.extract(f, tempdir)
_serial_service_file = "%s/%s" % (tempdir, f.name)
logger.debug("Found serial service file in test archive.")
break
archive.close()
if _serial_service_file is None:
msg = "Serial service file could not be found in archive %s, removing link measurement." % archive_path
cur.execute("DELETE FROM `tbl_serv_link_measurements` WHERE `test_fk` = %s" % testid)
logger.error(msg)
errors.append(msg)
return errors
# Run evaluation script
logger.debug("Evaluating link measurements.")
cmd = [flocklab.config.get('dispatcher', 'testtolinkmapscript')]
p = subprocess.Popen(cmd)
rs = p.wait()
cmd = [flocklab.config.get('dispatcher', 'linktestevalscript'), _serial_service_file]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
out, err = p.communicate()
rs = p.returncode
if rs != flocklab.SUCCESS:
msg = "Error %s returned from testtolinkmap script" % str(rs)
msg = "Linktest eval script returned with an error:\n%s" % str(out.strip())
logger.error(msg)
errors.append(msg)
else:
logger.debug("Link measurement evaluations finished.")
# Get platform info
sql = """SELECT `c`.`platforms_fk`, `d`.`name` FROM `tbl_serv_tests` as `a`
LEFT JOIN `tbl_serv_map_test_observer_targetimages` as `b` ON (`a`.serv_tests_key = `b`.test_fk)
LEFT JOIN `tbl_serv_targetimages` AS `c` ON (`b`.`targetimage_fk` = `c`.`serv_targetimages_key`)
LEFT JOIN `tbl_serv_platforms` AS `d` ON (`c`.`platforms_fk` = `d`.`serv_platforms_key`)
WHERE `a`.serv_tests_key = %s LIMIT 1"""
cur.execute(sql % str(testid))
ret = cur.fetchone()
if not ret:
msg = "Could not determine platform for test %d" % testid
logger.error(msg)
errors.append(msg)
else:
platform_fk = ret[0]
platform_name = ret[1]
# Load the results
resultspath = os.path.realpath("data") # TODO remove hardcoded path
resultsfile = os.path.join(resultspath, "linktest_map.html")
if not os.path.isfile(resultsfile):
msg = "Linktest results file %s not found." % (resultsfile)
logger.error(msg)
errors.append(msg)
else:
resultsdata = ""
with open(resultsfile, 'r') as rf:
resultsdata = rf.read()
z = re.findall("<body>(.*)</body>", resultsdata, re.MULTILINE | re.DOTALL)
if z:
resultsdata = z[0]
# Store results in DB
logger.debug("Storing XML file in DB...")
cur.execute("DELETE FROM `tbl_serv_link_measurements` WHERE `test_fk`=%s" % str(testid))
cur.execute("INSERT INTO `tbl_serv_link_measurements` (`test_fk`, `platform_fk`, `begin`, `radio_cfg`, `links`) VALUES (%s, %s, %s, %s, %s)", ((str(testid), platform_fk, teststarttime, '', resultsdata)))
# Remove the temporary files
if os.path.isdir(resultspath):
shutil.rmtree(resultspath)
return errors
### END evalute_linkmeasurement()
### END evaluate_linkmeasurement()
##############################################################################
......@@ -1430,9 +1497,10 @@ def main(argv):
for e in err:
errors.append(e)
# Evaluate link measurement:
#err = evalute_linkmeasurement(testid, cur)
#for e in err:
# errors.append(e)
err = evaluate_linkmeasurement(testid, cur)
cn.commit()
for e in err:
errors.append(e)
if len(errors) == 0:
status = 'finished'
......
......@@ -174,7 +174,7 @@ def main(argv):
now = time.strftime(flocklab.config.get("database", "timeformat"), time.gmtime())
# schedule link measurement if needed
#flocklab.schedule_linktest(cur, cn, debug)
flocklab.schedule_linktest(cur, cn, debug)
# Check for work ---
# Check if a new test is to be started ---
......
......@@ -1020,101 +1020,62 @@ def is_test_running(cursor=None):
#
##############################################################################
def schedule_linktest(cur, cn, debug=False):
global config, logger
# Check the arguments:
if not config or not logger or ((type(cur) != MySQLdb.cursors.Cursor) or (type(cn) != MySQLdb.connections.Connection)):
return FAILED
sql = "SELECT TIMESTAMPDIFF(MINUTE, `begin`, NOW()) AS `last` FROM `tbl_serv_web_link_measurements` ORDER BY `last` ASC LIMIT 1"
sql = "SELECT TIMESTAMPDIFF(MINUTE, `begin`, NOW()) AS `last` FROM `tbl_serv_link_measurements` ORDER BY `last` ASC LIMIT 1"
cur.execute(sql)
rs = cur.fetchone()
if rs:
if not rs:
logger.debug("No link measurements found.")
lasttest = 60 * config.getint("linktests", "interval_hours") * 2 # any number > (interval_hours + interval_random_minutes) will do
else:
lasttest = int(rs[0])
logger.debug("Last link measurement was %s minutes ago."%(lasttest))
nexttest = 60 * config.getint("linktests", "interval_hours") + random.randint(-config.getint("linktests", "interval_random_minutes"), config.getint("linktests", "interval_random_minutes"))
if lasttest >= nexttest:
logger.debug("Last link measurement was %s minutes ago." % (lasttest))
nexttest = 60 * config.getint("linktests", "interval_hours") + random.randint(-config.getint("linktests", "interval_random_minutes"), config.getint("linktests", "interval_random_minutes"))
if lasttest >= nexttest:
# Schedule new tests
# Check if the lockfile is present:
lockfile = config.get("linktests", "lockfile")
if os.path.exists(lockfile):
logger.debug("Lockfile %s exists already. Skip adding new linktests." % lockfile)
# If the last scheduled link tests are a long time ago, generate a warning since it may be that the lockfile was not deleted for whatever reason:
if lasttest > 2 * nexttest:
logger.error("Lockfile %s exists and the last linktest was %d min ago (interval is %d min)." % (lockfile, lasttest, config.getint("linktests", "interval_hours")))
else:
# Create the lockfile:
basedir = os.path.dirname(lockfile)
if not os.path.exists(basedir):
os.makedirs(basedir)
open(lockfile, 'a').close()
logger.debug("Touched lockfile %s" % lockfile)
# Schedule new tests
# Check if the lockfile is present:
lockfile = config.get("linktests", "lockfile")
if os.path.exists(lockfile):
logger.debug("Lockfile %s exists already. Skip adding new linktests.")
# If the last scheduled link tests are a long time ago, generate a warning since it may be that the lockfile was not deleted for whatever reason:
if lasttest > 2*nexttest:
logger.error("Lockfile %s exists and the last linktest was %d min ago (interval is %d min)"%(lockfile, lasttest, config.getint("linktests", "interval_hours")))
else:
# Create the lockfile:
basedir = os.path.dirname(lockfile)
if not os.path.exists(basedir):
os.makedirs(basedir)
open(lockfile, 'a').close()
logger.debug("Touched lockfile %s"%lockfile)
# Schedule new tests
logger.debug("Schedule new link measurements")
listing = os.listdir(config.get("linktests", "testfolder"))
for linktestfile in listing:
if re.search("\.xml$", os.path.basename(linktestfile)) is not None:
# read platform
parser = lxml.etree.XMLParser(remove_comments=True)
tree = lxml.etree.parse("%s/%s" % (config.get("linktests", "testfolder"),linktestfile), parser)
ns = {'d': config.get('xml', 'namespace')}
pl = tree.xpath('//d:platform', namespaces=ns)
platform = pl[0].text.strip()
# get available observers with that platform from DB
sql = """SELECT LPAD(obs.observer_id, 3, '0') as obsid
FROM `flocklab`.`tbl_serv_observer` AS obs
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_list` AS a ON obs.slot_1_tg_adapt_list_fk = a.serv_tg_adapt_list_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_types` AS slot1 ON a.tg_adapt_types_fk = slot1.serv_tg_adapt_types_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_list` AS b ON obs.slot_2_tg_adapt_list_fk = b.serv_tg_adapt_list_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_types` AS slot2 ON b.tg_adapt_types_fk = slot2.serv_tg_adapt_types_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_list` AS c ON obs.slot_3_tg_adapt_list_fk = c.serv_tg_adapt_list_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_types` AS slot3 ON c.tg_adapt_types_fk = slot3.serv_tg_adapt_types_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_list` AS d ON obs.slot_4_tg_adapt_list_fk = d.serv_tg_adapt_list_key
LEFT JOIN `flocklab`.`tbl_serv_tg_adapt_types` AS slot4 ON d.tg_adapt_types_fk = slot4.serv_tg_adapt_types_key
WHERE
obs.status = 'online' AND (
LOWER(slot1.name) = LOWER('%s') OR
LOWER(slot2.name) = LOWER('%s') OR
LOWER(slot3.name) = LOWER('%s') OR
LOWER(slot4.name) = LOWER('%s'))
ORDER BY obs.observer_id""" % (platform,platform,platform,platform)
logger.debug("Schedule new link measurements")
listing = os.listdir(config.get("linktests", "testfolder"))
for linktestfile in listing:
if re.search("\.xml$", os.path.basename(linktestfile)) is not None:
logger.info("Adding link test '%s'." % linktestfile)
cmd = [config.get("linktests", "starttest_script"), '-c', "%s" % os.path.join(config.get("linktests", "testfolder"), linktestfile)]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, cwd=os.path.dirname(config.get("linktests", "starttest_script")))
out, err = p.communicate()
rs = p.wait()
testid = re.search("Test ID: ([0-9]*)",out)
if (testid is None) | (rs != SUCCESS):
logger.error("Could not register link test %s (%s)" % (linktestfile, err.strip()))
else:
# flag in db
sql = "INSERT INTO `tbl_serv_link_measurements` (test_fk, begin, platform_fk, links, radio_cfg) \
SELECT %s, NOW(), serv_platforms_key, NULL, '' from tbl_serv_platforms WHERE serv_platforms_key = (SELECT `b`.platforms_fk FROM \
flocklab.tbl_serv_map_test_observer_targetimages as `a` left join \
flocklab.tbl_serv_targetimages as `b` ON (a.targetimage_fk = b.serv_targetimages_key) WHERE `a`.test_fk=%s ORDER BY serv_platforms_key LIMIT 1)" % (testid.group(1), testid.group(1))
cur.execute(sql)
ret = cur.fetchall()
if not ret:
logger.info("Target platform %s not available, skipping link test." % platform)
continue
logger.debug("Observers with platform %s: %s" %(platform,' '.join([x[0] for x in ret])))
obsIdTags = tree.xpath('//d:obsIds', namespaces=ns)
for o in obsIdTags:
o.text = ' '.join([x[0] for x in ret])
targetIdTags = tree.xpath('//d:targetIds', namespaces=ns)
for o in targetIdTags:
o.text = ' '.join(map(str,list(range(len(ret)))))
# generate temporary test config
(fd, xmlpath) = tempfile.mkstemp(suffix='.xml')
tree.write(xmlpath, xml_declaration=True, encoding="UTF-8")
logger.info("add link test: %s" % linktestfile)
cmd = [config.get("linktests", "starttest_script"), '-c', "%s" % xmlpath]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, cwd=os.path.dirname(config.get("linktests", "starttest_script")))
out, err = p.communicate()
rs = p.wait()
testid = re.search("Test ID: ([0-9]*)",out)
if (testid is None) | (rs != SUCCESS):
logger.error("Could not register link test %s (%s)" % (linktestfile,err))
else:
# flag in db
sql = "INSERT INTO `tbl_serv_web_link_measurements` (test_fk, begin, end, platform_fk, links) \
SELECT %s, NOW(), NOW(), serv_platforms_key, NULL from tbl_serv_platforms WHERE serv_platforms_key = (SELECT `b`.platforms_fk FROM \
flocklab.tbl_serv_map_test_observer_targetimages as `a` left join \
flocklab.tbl_serv_targetimages as `b` ON (a.targetimage_fk = b.serv_targetimages_key) WHERE `a`.test_fk=%s ORDER BY serv_platforms_key LIMIT 1)"% (testid.group(1), testid.group(1))
cur.execute(sql)
cn.commit()
os.remove(xmlpath)
# Delete the lockfile:
os.remove(lockfile)
logger.debug("Removed lockfile %s"%lockfile)
cn.commit()
# Delete the lockfile:
os.remove(lockfile)
logger.debug("Removed lockfile %s" % lockfile)
### END schedule_linktest()
......
#! /usr/bin/env python3
"""
Copyright (c) 2010 - 2020, ETH Zurich, Computer Engineering Group
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import os
import numpy as np
import pandas as pd
import json
from collections import OrderedDict
import pickle
from flocklab import Flocklab
from flocklab import *
fl = Flocklab()
assertionOverride = False
outputdir = "./data"
################################################################################
# check arguments (either at least one test ID or the path to test results must be specified)
if len(sys.argv) < 2:
print("no test number or path specified!")
sys.exit(1)
################################################################################
def getJson(text):
'''Find an convert json in a single line from serial output. Returns None if no valid json could be found.
'''
ret = None
# find index
idx = 0
if not '{' in text:
return ret
for i in range(len(text)):
if text[i] == '{':
idx = i
break
try:
ret = json.loads(text[idx:], strict=False)
except json.JSONDecodeError:
print('WARNING: json could not be parsed: {}'.format(text[idx:]))
return ret
def getRows(roundNo, gDf):
'''Extract rows for requested round from gDf
'''
inRange = False
ret = []
for d in gDf.data.to_list():
if d['type'] == 'StartOfRound':
if d['node'] == roundNo:
inRange = True
elif d['type'] == 'EndOfRound':
if d['node'] == roundNo:
break
elif inRange:
ret.append(d)
return ret
################################################################################
for arg in sys.argv[1:]:
seriallog = ""
testid = None
if os.path.isfile(arg) and "serial" in arg:
seriallog = arg
elif os.path.isdir(arg):
seriallog = os.path.join(arg, "serial.csv")
else:
try:
testid = int(arg)
print('testid: {}'.format(testid))
seriallog = os.getcwd() + "/{}/serial.csv".format(testid)
except:
print("invalid argument %s" % arg)
if testid:
# download test results if directory does not exist
if not os.path.isfile(seriallog):
fl.getResults(testid)
if not os.path.isfile(seriallog):
print("file %s not found" % seriallog)
continue
df = fl.serial2Df(seriallog, error='ignore')
df.sort_values(by=['timestamp', 'observer_id'], inplace=True, ignore_index=True)
# convert output with valid json to dict and remove other rows
keepMask = []
resList = []
for idx, row in df.iterrows():
jsonDict = getJson(row['output'])
keepMask.append(1 if jsonDict else 0)
if jsonDict:
resList.append(jsonDict)
dfd = df[np.asarray(keepMask).astype(bool)].copy()
dfd['data'] = resList
# figure our list of nodes available in the serial trace
nodeList = list(set(dfd.observer_id))
numNodes = len(nodeList)
# prepare
groups = dfd.groupby('observer_id')
prrMatrix = np.empty( (numNodes, numNodes,) ) * np.nan # packet reception ratio (PRR)
crcErrorMatrix = np.empty( (numNodes, numNodes,) ) * np.nan # ratio of packets with CRC error
pathlossMatrix = np.empty( (numNodes, numNodes,) ) * np.nan # path loss
# Get TestConfig and RadioConfig & check for consistency
testConfigDict = OrderedDict()
radioConfigDict = OrderedDict()
for node in nodeList:
testConfigFound = False
radioConfigFound = False
testConfigDict[node] = None
radioConfigDict[node] = None
gDf = groups.get_group(node)
for d in gDf.data.to_list():
if d['type'] == 'TestConfig':
testConfigDict[node] = d
testConfigFound = True
if d['type'] == 'RadioConfig':
radioConfigDict[node] = d
radioConfigFound = True
if testConfigFound and radioConfigFound:
break
for node in nodeList:
assert testConfigDict[nodeList[0]] == testConfigDict[node]
assert radioConfigDict[nodeList[0]] == radioConfigDict[node]
testConfig = testConfigDict[nodeList[0]]
radioConfig = radioConfigDict[nodeList[0]]
# Make sure that round boundaries do not overlap
if not assertionOverride:
currentSlot = -1
for d in dfd.data.to_list():
if d['type'] == 'StartOfRound':
node = d['node']
# print('Start: {}'.format(node))
assert node >= currentSlot
if node > currentSlot:
currentSlot = node
elif d['type'] == 'EndOfRound':
node = d['node']
# print('End: {}'.format(node))
assert node >= currentSlot
# extract statistics (PRR, path loss, ...)
# iterate over rounds
for roundIdx, roundNo in enumerate(nodeList):
# for roundNo in [nodeList[0]]:
# print('Round: {}'.format(roundNo))
txNode = roundNo
txNodeIdx = roundIdx
numTx = 0
numRxDict = OrderedDict()
numCrcErrorDict = OrderedDict()
rssiAvgDict = OrderedDict()
# iterate over nodes
for nodeIdx, node in enumerate(nodeList):
rows = getRows(roundNo, groups.get_group(node))
if node == txNode:
# print(node)
txDoneList = [elem for elem in rows if (elem['type']=='TxDone')]
numTx = len(txDoneList)
# print(numTx, testConfig['numTx'])
assert numTx == testConfig['numTx']
else:
rxDoneList = [elem for elem in rows if (elem['type']=='RxDone' and elem['key']==testConfig['key'] and elem['crc_error']==0)]
crcErrorList = [elem for elem in rows if (elem['type']=='RxDone' and elem['crc_error']==1)]
numRxDict[node] = len(rxDoneList)
numCrcErrorDict[node] = len(crcErrorList)
rssiAvgDict[node] = np.mean([elem['rssi'] for elem in rxDoneList]) if len(rxDoneList) else np.nan
# fill PRR matrix
for rxNode, numRx in numRxDict.items():
rxNodeIdx = nodeList.index(rxNode)
prrMatrix[txNodeIdx][rxNodeIdx] = numRx/numTx
# fill CRC error matrix
for rxNode, numCrcError in numCrcErrorDict.items():
rxNodeIdx = nodeList.index(rxNode)
crcErrorMatrix[txNodeIdx][rxNodeIdx] = numCrcError/numTx
# NOTE: some CRC error cases are ignored while getting the rows (getRows()) because the json parser cannot parse the RxDone output
# fill path loss matrix
for rxNode, rssi in rssiAvgDict.items():
rxNodeIdx = nodeList.index(rxNode)
pathlossMatrix[txNodeIdx][rxNodeIdx] = -(rssi - radioConfig['txPower'])
prrMatrixDf = pd.DataFrame(data=prrMatrix, index=nodeList, columns=nodeList)
crcErrorMatrixDf = pd.DataFrame(data=crcErrorMatrix, index=nodeList, columns=nodeList)
pathlossMatrixDf = pd.DataFrame(data=pathlossMatrix, index=nodeList, columns=nodeList)
# save obtained data to file (including nodeList to resolve idx <-> node ID relations)
if testid:
pklPath = '{}/linktest_data_{}.pkl'.format(outputdir, testid)
else:
pklPath = '{}/linktest_data.pkl'.format(outputdir)
os.makedirs(os.path.split(pklPath)[0], exist_ok=True)
with open(pklPath, 'wb' ) as f:
d = {
'testConfig': testConfig,
'radioConfig': radioConfig,
'nodeList': nodeList,
'prrMatrix': prrMatrix,
'crcErrorMatrix': crcErrorMatrix,
'pathlossMatrix': pathlossMatrix,