To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit bbcb446b authored by Philipp Miedl's avatar Philipp Miedl

Increased reproducibility and comparability of data leak evaluations using ExOT

parents
[alias]
st = status
hist = log --pretty=format:\"%h %ad | %s%d [%an]\" --graph --date=short
[commit]
template = .gitmessage
# junk
junk/
_junk/
data/
_logs/
*/_logs/
# OS
.DS_Store
.DS_Store?
# IDE
*.sublime-*
.idea/
# build & distribution
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# pyinstaller
*.manifest
*.spec
# python
__pycache__/
*/__pycache__/
*.py[cod]
*$py.class
.mypy_cache/
.ipynb*
*.local.*
# poetry & pyenv
.python-version
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# git
!*/.git*
!.git*
# Work in progress files
*WiP*
# Subject (>50)
# Multi-line description (wrap at ~>72 characters)
default_language_version:
python: python3.7
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.1.0
hooks:
- id: trailing-whitespace
- id: check-added-large-files
- id: detect-private-key
- repo: local
hooks:
- id: version
name: Check Python version
entry: poetry run python --version
language: system
verbose: true
pass_filenames: false
always_run: true
- id: black
name: Format source code
entry: poetry run python -m black
args: ["--config", "pyproject.toml", "-q"]
language: system
require_serial: true
types: [python]
- id: isort
name: Sort imports
entry: poetry run python -m isort
args: ["-sp", "setup.cfg", "-y"]
language: system
types: [python]
Copyright (c) 2015-2020, Swiss Federal Institute of Technology (ETH Zurich)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*.local.toml
!template.toml
# Configuration name
name = "Example_Sweep_Thermal"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "overwrite" # overwrite, move, halt
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "ExploratoryExperiment"
channel = "Thermal"
[EXPERIMENT.PHASES]
# test = {schedule="path to file/actual schedule as DF", repetitions = 10} # could also be defined in the notebook
# Experiment layers
[EXPERIMENT.LAYERS]
io = {name = "TimeValue", params = {timebase='ns'}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 1e-3
delay_between_executions = 5.0
delay_after_bootstrap = 30.0
active_wait = false
# Apps and zones
[ENVIRONMENTS.Radagast.APPS]
snk = {executable = "meter_thermal_msr", zone = "host"}
src = {executable = "generator_utilisation_mt", zone = "host"}
# Platform settings -> source
[ENVIRONMENTS.Radagast.src]
# generator
generator.cores = [5]
generator.policy = "round_robin"
generator.priority = 99
generator.worker_priority = 98
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
# schedule reader
schedule_reader.input_file = "" # set during execution
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Radagast.snk]
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
thermal_msr.cores = [0, 2, 4, 6]
thermal_msr.package = false
# host
host.period = 1e-3
host.policy = "round_robin"
host.pinning = 0
host.priority = 90
host.log_header = true
host.start_immediately = false
# Configuration name
name = "Test_Thermal"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "move" # overwrite, move, halt
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "PerformanceExperiment"
channel = "Thermal"
[EXPERIMENT.PHASES]
train = {bit_count = 60, symbol_rates = [1], repetitions = 1}
#train = {bit_count = 60, symbol_rates = "list(range(1, 2))", repetitions = 2}
#eval = {bit_count = 120, symbol_rates = "list(range(1, 2))", repetitions = 2}
# Experiment layers
[EXPERIMENT.LAYERS]
src = {name = "SourcePassthrough", params = {}}
lne = {name = "ManchesterLineCoding", params = {}} # uses signal from Thermal()
#lne = {name = "GenericLineCoding", params = {}} # uses signal from Thermal()
rdp = {name = "CoreActivation", params = {saturating = true, saturate_mapping = {1 = -1}}}
io = {name = "TimeValue", params = {output_timing_multiplier = 1e9, input_timing_multiplier = 1e9}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 1e-3
# Apps and zones
[ENVIRONMENTS.Radagast.APPS]
snk = {executable = "meter_thermal_msr", zone = "host"}
src = {executable = "generator_utilisation_mt", zone = "host"}
# Platform settings -> source
[ENVIRONMENTS.Radagast.src]
# generator
generator.cores = [3]
generator.policy = "round_robin"
generator.priority = 99
generator.worker_priority = 98
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
# schedule reader
schedule_reader.input_file = "" # set during execution
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Radagast.snk]
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
thermal_msr.cores = [0, 2, 4, 6]
thermal_msr.package = false
# host
host.period = 1e-3
host.policy = "round_robin"
host.pinning = 0
host.priority = 90
host.log_header = true
host.start_immediately = false
# Configuration name
name = "Example_Sweep_Thermal"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "overwrite" # overwrite, move, halt
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "FrequencySweepExperiment"
channel = "Thermal"
[EXPERIMENT.PHASES]
sweep = {length_seconds = 10, frequencies = [0, 1, 2], repetitions = 2}
# Experiment layers
[EXPERIMENT.LAYERS]
rdp = {name = "CoreActivation", params = {saturating = true, saturate_mapping = {1 = -1}}}
io = {name = "TimeValue", params = {}} #params = {output_timing_multiplier = 1e9, input_timing_multiplier = 1e9}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 1e-3
delay_between_executions = 5.0
delay_after_bootstrap = 30.0
active_wait = false
# Apps and zones
[ENVIRONMENTS.Radagast.APPS]
snk = {executable = "meter_thermal_msr", zone = "host"}
src = {executable = "generator_utilisation_mt", zone = "host"}
# Platform settings -> source
[ENVIRONMENTS.Radagast.src]
# generator
generator.cores = [5]
generator.policy = "round_robin"
generator.priority = 99
generator.worker_priority = 98
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
# schedule reader
schedule_reader.input_file = "" # set during execution
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Radagast.snk]
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
thermal_msr.cores = [0, 2, 4, 6]
thermal_msr.package = false
# host
host.period = 1e-3
host.policy = "round_robin"
host.pinning = 0
host.priority = 90
host.log_header = true
host.start_immediately = false
# Configuration name
name = "flush+flush-cc_interference"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "move" # overwrite, move, halt, update
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "PerformanceExperiment"
channel = "Cache"
[EXPERIMENT.PHASES]
# 64bps up to 384Kbps using 64 lines (1 symbol = 64 bit) | 1.0/225e-6 = 4444.44
trainShortARMv8 = {bit_count = 1500, symbol_rates = "[100] + (list(range(500, 6001, 500)))", repetitions = 2}
evalShortARMv8 = {bit_count = 5000, symbol_rates = "[100] + (list(range(500, 6001, 500)))", repetitions = 2}
trainARMv8 = {bit_count = 1500, symbol_rates = "[100] + (list(range(250, 6001, 250)))", repetitions = 5}
evalARMv8 = {bit_count = 5000, symbol_rates = "[100] + (list(range(250, 6001, 250)))", repetitions = 5}
# 64bps up to 5Mbps using 64 lines (1 symbol = 64 bit) | 1.0/20e-6 = 50000.0
trainShortHaswell = {bit_count = 1500, symbol_rates = "[100] + (list(range(10000, 78001, 10000)) + [78125])", repetitions = 2}
evalShortHaswell = {bit_count = 5000, symbol_rates = "[100] + (list(range(10000, 78001, 10000)) + [78125])", repetitions = 2}
trainHaswell = {bit_count = 1500, symbol_rates = "[100] + (list(range(1000, 78001, 1000)) + [78125])", repetitions = 5}
evalHaswell = {bit_count = 5000, symbol_rates = "[100] + (list(range(1000, 78001, 1000)) + [78125])", repetitions = 5}
# Experiment layers
[EXPERIMENT.LAYERS]
src = {name = "BitsetCoding", params = {bitset_length = 64}}
lne = {name = "MultiN", params = {N = 64}}
rdp = {name = "DirectActivation", params = {}}
io = {name = "TimeValue", params = {timebase = "ns"}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 20e-6
delay_after_spawn = 5.0
delay_after_auxiliary = 1.0
active_wait = false
[EXPERIMENT.GENERAL.ARMv8]
fan = "255"
sampling_period = 225e-6
# --------------------------------------------------------------------------------------------------
# ENVIRONMENT: Haswell
# Apps and zones
[ENVIRONMENTS.Haswell.APPS]
snk = {executable = "meter_cache_ff", zone = "host"}
src = {executable = "generator_cache_read_st", zone = "host"}
[ENVIRONMENTS.Haswell.src]
# generator
generator.cpu_to_pin = 3
generator.cores = [0]
generator.worker_policy = "round_robin"
generator.self_policy = "round_robin"
generator.worker_priority = 98
generator.self_priority = 97
generator.use_busy_sleep = true
generator.busy_sleep_yield = false
generator.use_huge_pages = true
generator.shm_file = "/dev/hugepages/8"
generator.set_count = 64
generator.set_increment = 64
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = false
# schedule reader
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Haswell.snk]
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = true
logging.rotating_logs = false
logging.rotating_logs_count = 10
logging.rotating_logs_size = 104857600
logging.timestamp_files = false
host.policy = "round_robin"
host.pinning = 7
host.priority = 95
host.log_header = true
host.start_immediately = false
host.use_busy_sleep = true
host.busy_sleep_yield = false
cache.use_huge_pages = true
cache.shm_file = "/dev/hugepages/8"
cache.set_count = 64
cache.set_increment = 64
[ENVIRONMENTS.Haswell.APPS."ffmpeg"]
executable = "ffmpeg"
type = "standalone"
start_individually = true
zone = "host"
args = [
"-y",
"-loglevel", "error",
"-stream_loop", "-1",
"-i", "media/video.mp4",
"-c:v", "libx264",
"-b:v", "1000k",
"-f", "null", "/dev/null"
]
# --------------------------------------------------------------------------------------------------
# ENVIRONMENT: ARMv8
# Apps and zones
[ENVIRONMENTS.ARMv8.APPS]
snk = {executable = "meter_cache_ff", zone = "combined"}
src = {executable = "generator_cache_read_st", zone = "combined"}
[ENVIRONMENTS.ARMv8.src]
# generator
generator.cpu_to_pin = 2
generator.cores = [0]
generator.worker_policy = "round_robin"
generator.self_policy = "round_robin"
generator.worker_priority = 98
generator.self_priority = 97
generator.use_busy_sleep = true
generator.busy_sleep_yield = false
generator.use_huge_pages = true
generator.shm_file = "/dev/hugepages/8"
generator.set_count = 64
generator.set_increment = 16
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = false
# schedule reader
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.ARMv8.snk]
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = true
logging.rotating_logs = false
logging.rotating_logs_count = 10
logging.rotating_logs_size = 104857600
logging.timestamp_files = false
host.policy = "round_robin"
host.pinning = 3
host.priority = 95
host.log_header = true