To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit 93fcd7c5 authored by Reto Da Forno's avatar Reto Da Forno
Browse files

files moved and some cleanup, config files consolidated

parent b8745389
......@@ -7,5 +7,4 @@ CRONLOG=/home/flocklab/logs/cron.log
*/10 * * * * /home/flocklab/testmanagementserver/flocklab_cleaner.py --debug >> $CRONLOG 2>&1
0 5 * * * /home/flocklab/testmanagementserver/flocklab_retention_cleaner.py --debug >> $CRONLOG 2>&1
0 0 * * * /usr/sbin/logrotate --state /home/flocklab/logrotate.state /home/flocklab/logrotate >> $CRONLOG 2>&1
1 0 1 * * /home/flocklab/testmanagementserver/flocklab_mmccheck.py --debug >> $CRONLOG 2>&1
0 2 * * 1 php /home/flocklab/webserver/update_stats.php >> $CRONLOG 2>&1
......@@ -238,7 +238,7 @@ DROP TABLE IF EXISTS `tbl_serv_platforms`;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `tbl_serv_platforms` (
`serv_platforms_key` int(10) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(45) COLLATE utf8_bin NOT NULL,
`name` varchar(45) COLLATE utf8_general_ci NOT NULL,
`description` text COLLATE utf8_bin,
`freq_2400` tinyint(1) NOT NULL DEFAULT '0',
`freq_868` tinyint(1) NOT NULL DEFAULT '0',
......@@ -391,7 +391,7 @@ DROP TABLE IF EXISTS `tbl_serv_tg_adapt_types`;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `tbl_serv_tg_adapt_types` (
`serv_tg_adapt_types_key` int(10) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(45) COLLATE utf8_bin NOT NULL,
`name` varchar(45) COLLATE utf8_general_ci NOT NULL,
`description` text COLLATE utf8_bin,
`platforms_fk` int(10) unsigned NOT NULL,
`last_changed` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
......
; This is the FlockLab server configuration file
[email]
admin_email = [your_email]
flocklab_email = [flocklab_user_email]
mailserver = [your_mailserver]
; database connection configuration
[database]
user = flocklab_testmng
password =
database = flocklab
host = [server_url]
timeformat = %%Y-%%m-%%d %%H:%%M:%%S ;Time format which is used on the MySQL database. Must be parsable by strftime.
; test config
[tests]
; IMPORTANT: Keep the setup/cleanuptimes in sync with the file /home/flocklab/testmanagement/user.ini
setuptime = 3 ;Minutes needed for test setup
cleanuptime = 3 ;Minutes needed for test cleanup
pidfolder = /tmp/flocklab/ ;Folder for pid files
guard_starttime = 1 ;Minutes needed to start the test (reset pin)
guard_stoptime = 1 ;Minutes needed to stop the test (reset pin)
testvalidator = /home/flocklab/testmanagement/testconfig_validator.py
allowparalleltests = 0 ;Allow parallel test execution on the same observer if resource conflicts avoided
; Observer config
[observer]
testconfigfolder = /home/root/mmc/flocklab/curtest/ ;Folder on the observer into which the test configuration and target image are uploaded
obsdbfolder = /home/root/mmc/flocklab/db/ ;Folder on the observer in which the database and all auxiliary files are located
timeformat = %%Y-%%m-%%dT%%H:%%M:%%S ;Time format which is used on the observer. Must be parsable by strftime.
starttestscript = flocklab_starttest.py
stoptestscript = flocklab_stoptest.py
serialidscript = tg_serialid.py
; Config for fetcher
[fetcher]
min_sleeptime = 60 ;Minimum sleeptime in seconds which DB fetcher waits between polling the observers
max_rand_sleeptime = 15 ;Maximum random time in seconds the fetcher adds to minsleeptime
shutdown_timeout = 240 ;Timeout in seconds until fetcher processes are killed
obsfile_dir = /home/flocklab/fetcher/obsfiles/ ;Folder to store DB files from observers before they are aggregated
obsfile_debug_dir = /home/flocklab/fetcher/debug/ ;Folder to store DB files from observers before they are aggregated
obsfile_debug_dir_max_age_days = 7 ;Time to keep debug files
testresults_dir = /home/flocklab/fetcher/testresults/ ;Folder to store aggregated test results for being processed by the archiver
cpus_errorlog = 1 ;Maximum number of CPU's to use for aggregating data from error logging service
cpus_serial = 1 ;Maximum number of CPU's to use for aggregating data from serial service
cpus_gpiosetting = 1 ;Maximum number of CPU's to use for aggregating data from GPIO setting service
cpus_gpiomonitoring = 2 ;Maximum number of CPU's to use for aggregating data from GPIO monitoring service
cpus_powerprofiling = 2 ;Maximum number of CPU's to use for converting and aggregating data from power profiling service
commitsize = 10000 ;Maximum number of rows to write to the aggregated files at the same time
; Config for archiver
[archiver]
max_instances = 100 ;Maximum of concurrently running instances of the archiver
pigz_max_cpus = 4 ;Maximum number of CPUs to be used by pigz compressor
nice_level = 15 ;Nice level for tar and pigz processes. Possible values: 0 to 19
email_maxsize = 10485760 ;Maximum size in bytes of archive to be emailed to the user. If archive is bigger, the test results can only be fetched from the user interface
archive_ext = .tar.gz ;Extension of archived files
archive_dir = /home/flocklab/test_archive ;Directory which stores all test results
; Config for cleaner
[cleaner]
max_instances = 1 ;Maximum of concurrently running instances of the cleaner
keeptime_viz = 30 ;Time in days to keep viz data
; Config for the dispatcher
[dispatcher]
schedulerscript = /home/flocklab/testmanagementserver/flocklab_scheduler.py ;Path to scheduler script on testmanagement server
validationscript = /home/flocklab/testmanagement/testconfig_validator.py ;Path to validation script on testmanagement server
dispatcherscript = /home/flocklab/testmanagementserver/flocklab_dispatcher.py ;Path to dispatcher script on testmanagement server
fetcherscript = /home/flocklab/testmanagementserver/flocklab_fetcher.py ;Path to fetcher script on testmanagement server
archiverscript = /home/flocklab/testmanagementserver/flocklab_archiver.py ;Path to archiver script on testmanagement server
testtolinkmapscript = /home/flocklab/testmanagementserver/test_to_linkmap.py ;Path to linkmap evaluation script on testmanagement server
serialproxyscript = /home/flocklab/testmanagementserver/flocklab_serialproxy.py ;Path to serial proxy script on testmanagement server
default_tg_voltage = 3.3 ;Default voltage for targets if not specified in XML
default_sampling_divider = 2 ;Default sampling divider for power profiling if not specified in XML
archiver_waittime = 10 ;Wait time between calls to the archiver if the maximum number of archiver instances is reached
binutils_arm = /home/flocklab/binutils/binutils-arm ;Path to ARM binutils
binutils_msp430 = /home/flocklab/binutils/binutils-msp430/usr/bin ;Path to MSP430 binutils
setsymbolsscript = /home/flocklab/binutils/tos-set-symbols ;Path to script used to set symbols (e.g. node ID)
; XML test configuration file settings
[xml]
namespace = http://www.flocklab.ethz.ch ;XML validation file (flocklab.xsd) and test XML config
schemapath = /home/flocklab/public_html/xml/flocklab.xsd
; regular link tests
[linktests]
user = flocklab ;User that owns the link measurements
interval_hours = 47 ;Interval between link measurements
interval_random_minutes = 120 ;Random slack (+/-)
testfolder = /home/flocklab/testmanagementserver/linktests
starttest_script = /home/flocklab/tools/flocklab
lockfile = /tmp/flocklab/linktest_schedule.lock
; vizualisation of test results
[viz]
enablepreview = 1 ;set to 1 to enable generation of preview data
imgdir = /home/flocklab/viz/ ;path to preview directory
; Cleaner which deletes test results (after a per-user retention time has expired)
[retentioncleaner]
max_instances = 1 ;Maximum of concurrently running instances of the script
expiration_leadtime = 14 ;Number of days to warn user before results are purged
; Config for serial proxy
[serialproxy]
startport = 50100 ;Start port for serial proxy to test users. For the real port, the observer ID is added to the start port.
obsdataport = 50001 ;Observer listens on this port for incoming connections from the testmanagement server
shutdown_timeout = 240 ;Timeout in seconds until proxy processes are killed
; tools for target image validation
[targetimage]
imagevalidator = /home/flocklab/testmanagement/targetimage_validator.py
msp430 = /usr/bin/msp430-readelf
arm = /home/flocklab/binutils/binutils-arm/arm-angstrom-linux-gnueabi-readelf
; Recaptcha
[recaptcha]
sitekey = [your_site_key] ;get one at https://www.google.com/recaptcha/admin/create
secretkey = [your_secret_key]
; settings for testmanagement server
[testmanagementserver]
host = localhost
user = flocklab
basedir = /home/flocklab/testmanagementserver
scheduler = /home/flocklab/testmanagementserver/flocklab_scheduler.py
archivedir = /home/flocklab/testarchive
sshflags = ;additional flags
logdir = /home/flocklab/logs ;log directory for web
tempdir = /home/flocklab/tmp
venvwrapper = /home/flocklab/tools/wrapper.sh ;activates python virtual environment (leave blank if no venv)
; config for webserver session
[session]
expiretime = 1440 ;Seconds until session expires
dir = /tmp/flocklab_sessions
; This is the Flocklab testserver configuration file
; Comments start with ';', as in php.ini
[general]
admin_email = [your_email]
; database connection configuration
[database]
user = flocklab_testmng
password =
database = flocklab
host = [server_url]
timeformat = %%Y/%%m/%%d %%H:%%M:%%S ;Time format which is used on the MySQL database. Must be parsable by strftime.
; test config
[tests]
; IMPORTANT: Keep the setup/cleanuptimes in sync with the file /home/flocklab/testmanagement/user.ini
setuptime = 3 ;Minutes needed for test setup
cleanuptime = 3 ;Minutes needed for test cleanup
pidfolder = /tmp/flocklab/ ;Folder for pid files
; Observer config
[observer]
testconfigfolder = /home/root/mmc/flocklab/curtest/ ;Folder on the observer into which the test configuration and target image are uploaded
obsdbfolder = /home/root/mmc/flocklab/db/ ;Folder on the observer in which the database and all auxiliary files are located
timeformat = %%Y-%%m-%%dT%%H:%%M:%%S ;Time format which is used on the observer. Must be parsable by strftime.
starttestscript = flocklab_starttest.py
stoptestscript = flocklab_stoptest.py
serialidscript = tg_serialid.py
; Config for fetcher
[fetcher]
min_sleeptime = 60 ;Minimum sleeptime in seconds which DB fetcher waits between polling the observers
max_rand_sleeptime = 15 ;Maximum random time in seconds the fetcher adds to minsleeptime
shutdown_timeout = 240 ;Timeout in seconds until fetcher processes are killed
obsfile_dir = /home/flocklab/fetcher/obsfiles/ ;Folder to store DB files from observers before they are aggregated
obsfile_debug_dir = /home/flocklab/fetcher/debug/ ;Folder to store DB files from observers before they are aggregated
obsfile_debug_dir_max_age_days = 7 ;Time to keep debug files
testresults_dir = /home/flocklab/fetcher/testresults/ ;Folder to store aggregated test results for being processed by the archiver
cpus_errorlog = 1 ;Maximum number of CPU's to use for aggregating data from error logging service
cpus_serial = 1 ;Maximum number of CPU's to use for aggregating data from serial service
cpus_gpiosetting = 1 ;Maximum number of CPU's to use for aggregating data from GPIO setting service
cpus_gpiomonitoring = 2 ;Maximum number of CPU's to use for aggregating data from GPIO monitoring service
cpus_powerprofiling = 2 ;Maximum number of CPU's to use for converting and aggregating data from power profiling service
commitsize = 10000 ;Maximum number of rows to write to the aggregated files at the same time
; Config for archiver
[archiver]
max_instances = 100 ;Maximum of concurrently running instances of the archiver
pigz_max_cpus = 4 ;Maximum number of CPUs to be used by pigz compressor
nice_level = 15 ;Nice level for tar and pigz processes. Possible values: 0 to 19
email_maxsize = 10485760 ;Maximum size in bytes of archive to be emailed to the user. If archive is bigger, the test results can only be fetched from the user interface
archive_ext = .tar.gz ;Extension of archived files
archive_dir = /home/flocklab/test_archive ;Directory which stores all test results
; Config for cleaner
[cleaner]
max_instances = 1 ;Maximum of concurrently running instances of the cleaner
keeptime_viz = 30 ;Time in days to keep viz data
; Config for the dispatcher
[dispatcher]
schedulerscript = /home/flocklab/testmanagementserver/flocklab_scheduler.py ;Path to scheduler script on testmanagement server
validationscript = /home/flocklab/testmanagement/testconfig_validator.py ;Path to validation script on testmanagement server
dispatcherscript = /home/flocklab/testmanagementserver/flocklab_dispatcher.py ;Path to dispatcher script on testmanagement server
fetcherscript = /home/flocklab/testmanagementserver/flocklab_fetcher.py ;Path to fetcher script on testmanagement server
archiverscript = /home/flocklab/testmanagementserver/flocklab_archiver.py ;Path to archiver script on testmanagement server
testtolinkmapscript = /home/flocklab/testmanagementserver/test_to_linkmap.py ;Path to linkmap evaluation script on testmanagement server
serialproxyscript = /home/flocklab/testmanagementserver/flocklab_serialproxy.py ;Path to serial proxy script on testmanagement server
default_tg_voltage = 3.3 ;Default voltage for targets if not specified in XML
default_sampling_divider = 2 ;Default sampling divider for power profiling if not specified in XML
archiver_waittime = 10 ;Wait time between calls to the archiver if the maximum number of archiver instances is reached
binutils_arm = /home/flocklab/binutils/binutils-arm ;Path to ARM binutils
binutils_msp430 = /home/flocklab/binutils/binutils-msp430/usr/bin ;Path to MSP430 binutils
setsymbolsscript = /home/flocklab/binutils/tos-set-symbols ;Path to script used to set symbols (e.g. node ID)
; XML test configuration file settings
[xml]
namespace = http://www.flocklab.ethz.ch ;Must match namespace declaration in user.ini, XML validation file (flocklab.xsd) and test XML config
; regular link tests
[linktests]
user = flocklab ;User that owns the link measurements
interval_hours = 47 ;Interval between link measurements
interval_random_minutes = 120 ;Random slack (+/-)
testfolder = /home/flocklab/testmanagementserver/linktests
starttest_script = /home/flocklab/tools/flocklab
lockfile = /tmp/flocklab/linktest_schedule.lock
; Vizualisation of test results
[viz]
enablepreview = 1 ;set to 1 to enable generation of preview data
imgdir = /home/flocklab/viz/ ;path to preview directory
; Cleaner which deletes test results (after a per-user retention time has expired)
[retentioncleaner]
max_instances = 1 ;Maximum of concurrently running instances of the script
expiration_leadtime = 14 ;Number of days to warn user before results are purged
; Config for serial proxy
[serialproxy]
startport = 50100 ;Start port for serial proxy to test users. For the real port, the observer ID is added to the start port.
obsdataport = 50001 ;Observer listens on this port for incoming connections from the testmanagement server
shutdown_timeout = 240 ;Timeout in seconds until proxy processes are killed
; Config fo MMC check script
[mmccheck]
reservation_group_id = 99 ;ID of reservation group in tbl_serv_reservations for mmc check
reservation_duration_h = 2 ;Expected upper bound of duration of mmc check
#!/usr/bin/env python3
__author__ = "Christoph Walser <walserc@tik.ee.ethz.ch>, Adnan Mlika"
__copyright__ = "Copyright 2010, ETH Zurich, Switzerland"
__license__ = "GPL"
import sys, os, getopt, errno, traceback, time, shutil, logging, subprocess, __main__, types
# Import local libraries
from lib.flocklab import SUCCESS
import lib.flocklab as flocklab
......@@ -22,18 +15,6 @@ logger = None
config = None
##############################################################################
#
# Error classes
#
##############################################################################
class Error(Exception):
""" Base class for exception. """
pass
### END Error classes
##############################################################################
#
# Usage
......@@ -92,7 +73,7 @@ def main(argv):
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(SUCCESS)
sys.exit(flocklab.SUCCESS)
elif opt in ("-e", "--email"):
send_email = True
elif opt in ("-d", "--debug"):
......@@ -102,7 +83,7 @@ def main(argv):
try:
testid = int(arg)
if testid <= 0:
raise Error
raise
except:
logger.warn("Wrong API usage: testid has to be a positive number")
sys.exit(errno.EINVAL)
......@@ -248,7 +229,7 @@ Yours faithfully,\nthe FlockLab server" %(testid, archivename)
cur.close()
cn.close()
sys.exit(SUCCESS)
sys.exit(flocklab.SUCCESS)
### END main()
......
#! /usr/bin/env python3
__author__ = "Christoph Walser <walserc@tik.ee.ethz.ch>"
__copyright__ = "Copyright 2010, ETH Zurich, Switzerland"
__license__ = "GPL"
import sys, os, getopt, errno, traceback, logging, time, __main__, shutil, glob, datetime
# Import local libraries
from lib.flocklab import SUCCESS
import lib.flocklab as flocklab
### Global variables ###
......@@ -21,18 +14,6 @@ logger = None
config = None
##############################################################################
#
# Error classes
#
##############################################################################
class Error(Exception):
""" Base class for exception. """
pass
### END Error classes
##############################################################################
#
# Usage
......@@ -46,7 +27,6 @@ def usage():
### END usage()
##############################################################################
#
# Main
......@@ -89,7 +69,7 @@ def main(argv):
logger.setLevel(logging.DEBUG)
elif opt in ("-h", "--help"):
usage()
sys.exit(SUCCESS)
sys.exit(flocklab.SUCCESS)
else:
logger.warn("Wrong API usage")
sys.exit(errno.EINVAL)
......@@ -206,7 +186,7 @@ def main(argv):
cn.close()
#logger.debug("Finished. Exit program.")
sys.exit(SUCCESS)
sys.exit(flocklab.SUCCESS)
### END main()
if __name__ == "__main__":
......
#! /usr/bin/env python3
__author__ = "Christoph Walser <walserc@tik.ee.ethz.ch>"
__copyright__ = "Copyright 2010, ETH Zurich, Switzerland"
__license__ = "GPL"
import sys, os, getopt, errno, threading, shutil, time, datetime, subprocess, tempfile, queue, re, logging, traceback, __main__, types, hashlib
from lxml import etree
from MySQLdb.constants import ER as MySQLErrors
# Import local libraries
from lib.flocklab import SUCCESS
import sys, os, getopt, errno, threading, shutil, time, datetime, subprocess, tempfile, queue, re, logging, traceback, __main__, types, hashlib, lxml, MySQLdb
import lib.flocklab as flocklab
......@@ -94,7 +85,7 @@ class StopTestThread(threading.Thread):
else:
out, err = p.communicate()
rs = p.returncode
if (rs == SUCCESS):
if (rs == flocklab.SUCCESS):
logger.debug("Test-stop script on observer ID %s succeeded." %(self._obsdict_key[self._obskey][1]))
elif (rs == 255):
msg = "Observer ID %s is not reachable, thus not able to stop test. Dataloss occurred possibly for this observer."%(self._obsdict_key[self._obskey][1])
......@@ -185,7 +176,7 @@ class StartTestThread(threading.Thread):
if self._abortEvent.is_set():
p.kill()
rs = p.returncode
if (rs != SUCCESS):
if (rs != flocklab.SUCCESS):
msg = "Upload of target image and config XML to observer ID %s failed with error number %d" %(self._obsdict_key[self._obskey][1], rs)
errors.append((msg, rs, self._obsdict_key[self._obskey][1]))
logger.error(msg)
......@@ -207,7 +198,7 @@ class StartTestThread(threading.Thread):
else:
out, err = p.communicate()
rs = p.wait()
if rs != SUCCESS:
if rs != flocklab.SUCCESS:
msg = "Test-start script on observer ID %s failed with error code %s and error message %s" %(self._obsdict_key[self._obskey][1], errno.errorcode[rs], str(out))
errors.append((msg, rs, self._obsdict_key[self._obskey][1]))
logger.error(msg)
......@@ -454,8 +445,8 @@ def start_test(testid, cur, cn, obsdict_key, obsdict_id):
errors.append(msg)
logger.error(msg)
else:
parser = etree.XMLParser(remove_comments=True)
tree = etree.fromstring(bytes(bytearray(ret[0], encoding = 'utf-8')), parser)
parser = xml.etree.XMLParser(remove_comments=True)
tree = xml.etree.fromstring(bytes(bytearray(ret[0], encoding = 'utf-8')), parser)
ns = {'d': config.get('xml', 'namespace')}
logger.debug("Got XML from database.")
# Create XML files ---
......@@ -870,8 +861,8 @@ def stop_test(testid, cur, cn, obsdict_key, obsdict_id, abort=False):
errors.append(msg)
logger.error(msg)
else:
parser = etree.XMLParser(remove_comments=True)
tree = etree.fromstring(bytes(bytearray(ret[0], encoding = 'utf-8')), parser)
parser = xml.etree.XMLParser(remove_comments=True)
tree = xml.etree.fromstring(bytes(bytearray(ret[0], encoding = 'utf-8')), parser)
ns = {'d': config.get('xml', 'namespace')}
logger.debug("Got XML from database.")
# only stop serialproxy if remote IP specified in xml
......@@ -934,7 +925,7 @@ def stop_test(testid, cur, cn, obsdict_key, obsdict_id, abort=False):
cmd.append("--debug")
p = subprocess.Popen(cmd)
rs = p.wait()
if rs not in (SUCCESS, errno.ENOPKG): # SUCCESS (0) is successful stop, ENOPKG (65) means the service was not running.
if rs not in (flocklab.SUCCESS, errno.ENOPKG): # flocklab.SUCCESS (0) is successful stop, ENOPKG (65) means the service was not running.
msg = "Could not stop database fetcher for test ID %d. Fetcher returned error %d"%(testid, rs)
errors.append(msg)
logger.error(msg)
......@@ -985,8 +976,8 @@ def prepare_testresults(testid, cur):
cur.execute("SELECT `testconfig_xml` FROM `tbl_serv_tests` WHERE (`serv_tests_key` = %s)" %testid)
ret = cur.fetchone()
if ret:
parser = etree.XMLParser(remove_comments=True)
tree = etree.fromstring(bytes(bytearray(ret[0], encoding = 'utf-8')), parser)
parser = xml.etree.XMLParser(remove_comments=True)
tree = xml.etree.fromstring(bytes(bytearray(ret[0], encoding = 'utf-8')), parser)
ns = {'d': config.get('xml', 'namespace')}
logger.debug("Got XML from database.")
# Check if user wants results as email
......@@ -1014,7 +1005,7 @@ def prepare_testresults(testid, cur):
while rs == errno.EUSERS:
p = subprocess.Popen(cmd)
rs = p.wait()
if rs not in (SUCCESS, errno.EUSERS): # SUCCESS (0) is successful stop, EUSERS (87) means the maximum number of allowed instances is reached.
if rs not in (flocklab.SUCCESS, errno.EUSERS): # flocklab.SUCCESS (0) is successful stop, EUSERS (87) means the maximum number of allowed instances is reached.
msg = "Could not trigger archiver. Archiver returned error %d"%(rs)
logger.error(msg)
logger.error("Tried to execute %s"%str(cmd))
......@@ -1046,7 +1037,7 @@ def evalute_linkmeasurement(testid, cur):
cmd = [config.get('dispatcher', 'testtolinkmapscript')]
p = subprocess.Popen(cmd)
rs = p.wait()
if rs != SUCCESS:
if rs != flocklab.SUCCESS:
msg = "Error %s returned from testtolinkmap script" % str(rs)
logger.error(msg)
errors.append(msg)
......@@ -1087,7 +1078,7 @@ def inform_user(testid, cur, job, errors, warnings):
msg = "Your test has been aborted as requested and the results (if any) will be available on the website soon\nTest results are also accessible using webdav: webdavs://www.flocklab.ethz.ch/user/webdav/\nConsider the following warnings:\n\n"
for warn in warnings:
msg += "\t * %s\n" %warn
ret = SUCCESS
ret = flocklab.SUCCESS
else:
if job == 'start':
subj = "Test %d starting as planned" %testid
......@@ -1098,7 +1089,7 @@ def inform_user(testid, cur, job, errors, warnings):
elif job == 'abort':
subj = "Test %d aborted as requested" %testid
msg = "Your test has been aborted as requested. The results (if any) will be available on the website soon.\nTest results are also accessible using webdav: webdavs://www.flocklab.ethz.ch/user/webdav/"
ret = SUCCESS
ret = flocklab.SUCCESS
rs = flocklab.get_test_owner(cur, testid)
if isinstance(rs, tuple):
......@@ -1178,7 +1169,7 @@ def db_register_activity(testid, cur, cn, action, obskeys):
cn.commit()
register_ok = False
except MySQLdb.OperationalError as e: # retry if deadlock
if e.args[0] == MySQLErrors.LOCK_DEADLOCK:
if e.args[0] == MySQLdb.constants.ER.LOCK_DEADLOCK:
time.sleep(1)
spin = True
else:
......@@ -1262,7 +1253,7 @@ def main(argv):
logger.debug("Detected debug flag.")
elif opt in ("-h", "--help"):
usage()
sys.exit(SUCCESS)
sys.exit(flocklab.SUCCESS)
elif opt in ("-t", "--testid"):
try:
testid = int(arg)
......@@ -1460,7 +1451,7 @@ def main(argv):
msg = "Unexpected error: %s: %s\n%s"%(str(sys.exc_info()[0]), str(sys.exc_info()[1]), traceback.format_exc())
print(msg)
flocklab.error_logandexit(msg, errno.EFAULT, name, logger, config)
sys.exit(SUCCESS)
sys.exit(flocklab.SUCCESS)
### END main()
......
#! /usr/bin/env python3
import os, sys, getopt, traceback, MySQLdb, signal, random, time, errno, multiprocessing, subprocess, re, logging, __main__, threading, struct, types, queue, math, shutil
from lxml import etree
# Import local libraries
import os, sys, getopt, traceback, MySQLdb, signal, random, time, errno, multiprocessing, subprocess, re, logging, __main__, threading, struct, types, queue, math, shutil, lxml
import lib.daemon as daemon
import lib.flocklab as flocklab
from lib.flocklab import SUCCESS
from shutil import copyfile
### Global variables ###
###
......@@ -36,15 +32,12 @@ serialdict = None
ITEM_TO_PROCESS = 0
ITEM_PROCESSED = 1
##############################################################################
#
# Error classes
#
##############################################################################
class Error(Exception):
""" Base class for exception. """
pass
class DbFileEof(Exception):
pass
......@@ -81,7 +74,6 @@ class ServiceInfo():
### END ServiceInfo
##############################################################################
#
# sigterm_handler
......@@ -125,7 +117,6 @@ def sigterm_handler(signum, frame):
### END sigterm_handler
##############################################################################
#
# Functions for parsing observer DB files data
......@@ -151,7 +142,6 @@ def parse_error_log(buf):
return (str(_data[2]), _data[3], "%i.%06i"%(_data[0],_data[1]))
##############################################################################
#
# Functions for converting observer DB data
......@@ -192,7 +182,6 @@ def read_from_db_file(dbfile):
### END read_from_db_file
##############################################################################
#
# worker_convert_and_aggregate: Worker function for multiprocessing pools.
......@@ -282,7 +271,6 @@ def worker_convert_and_aggregate(queueitem=None, nodeid=None, resultfile_path=No
### END worker_convert_and_aggregate
##############################################################################
#
# worker_gpiotracing: Worker function for converting and aggregating gpio
......@@ -321,7 +309,6 @@ def worker_gpiotracing(queueitem=None, nodeid=None, resultfile_path=None, slotca
### END worker_gpiotracing