To receive notifications about scheduled maintenance, please subscribe to the mailing-list gitlab-operations@sympa.ethz.ch. You can subscribe to the mailing-list at https://sympa.ethz.ch

Commit 8037a317 authored by Philipp Miedl's avatar Philipp Miedl

Threat potential assessment of power management related data leaks

parents
[alias]
st = status
hist = log --pretty=format:\"%h %ad | %s%d [%an]\" --graph --date=short
[commit]
template = .gitmessage
# junk
junk/
_junk/
data/
_logs/
*/_logs/
# OS
.DS_Store
.DS_Store?
# IDE
*.sublime-*
.idea/
# build & distribution
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# pyinstaller
*.manifest
*.spec
# python
__pycache__/
*/__pycache__/
*.py[cod]
*$py.class
.mypy_cache/
.ipynb*
*.local*
# poetry & pyenv
.python-version
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# git
!*/.git*
!.git*
# Work in progress files
*WiP*
# ADB temporary files
/.adb_key*
/adb-server.out*
# Backed up data
backup_*
# Subject (>50)
# Multi-line description (wrap at ~>72 characters)
default_language_version:
python: python3.7
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.1.0
hooks:
- id: trailing-whitespace
- id: check-added-large-files
- id: detect-private-key
- repo: local
hooks:
- id: version
name: Check Python version
entry: poetry run python --version
language: system
verbose: true
pass_filenames: false
always_run: true
- id: black
name: Format source code
entry: poetry run python -m black
args: ["--config", "pyproject.toml", "-q"]
language: system
require_serial: true
types: [python]
- id: isort
name: Sort imports
entry: poetry run python -m isort
args: ["-sp", "setup.cfg", "-y"]
language: system
types: [python]
*.local.toml
!template.toml
# Configuration name
name = "Example_Sweep_Thermal"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "overwrite" # overwrite, move, halt
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "ExploratoryExperiment"
channel = "Thermal"
[EXPERIMENT.PHASES]
# test = {schedule="path to file/actual schedule as DF", repetitions = 10} # could also be defined in the notebook
# Experiment layers
[EXPERIMENT.LAYERS]
io = {name = "TimeValue", params = {timebase='ns'}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 1e-3
delay_between_executions = 5.0
delay_after_bootstrap = 30.0
active_wait = false
# Apps and zones
[ENVIRONMENTS.Radagast.APPS]
snk = {executable = "meter_thermal_msr", zone = "host"}
src = {executable = "generator_utilisation_mt", zone = "host"}
# Platform settings -> source
[ENVIRONMENTS.Radagast.src]
# generator
generator.cores = [5]
generator.policy = "round_robin"
generator.priority = 99
generator.worker_priority = 98
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
# schedule reader
schedule_reader.input_file = "" # set during execution
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Radagast.snk]
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
thermal_msr.cores = [0, 2, 4, 6]
thermal_msr.package = false
# host
meter.period = 1e-3
meter.host_policy = "round_robin"
meter.host_pinning = 0
meter.host_priority = 90
meter.log_header = true
meter.start_immediately = false
# Configuration name
name = "Test_Thermal"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "move" # overwrite, move, halt
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "PerformanceExperiment"
channel = "ThermalCC"
[EXPERIMENT.PHASES]
train = {bit_count = 60, symbol_rates = [1], repetitions = 1}
#train = {bit_count = 60, symbol_rates = "list(range(1, 2))", repetitions = 2}
#eval = {bit_count = 120, symbol_rates = "list(range(1, 2))", repetitions = 2}
# Experiment layers
[EXPERIMENT.LAYERS]
src = {name = "SourcePassthrough", params = {}}
lne = {name = "ManchesterLineCoding", params = {}} # uses signal from Thermal()
#lne = {name = "GenericLineCoding", params = {}} # uses signal from Thermal()
rdp = {name = "CoreActivation", params = {saturating = true, saturate_mapping = {1 = -1}}}
io = {name = "TimeValue", params = {output_timing_multiplier = 1e9, input_timing_multiplier = 1e9}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 1e-3
# Apps and zones
[ENVIRONMENTS.Radagast.APPS]
snk = {executable = "meter_thermal_msr", zone = "host"}
src = {executable = "generator_utilisation_mt", zone = "host"}
# Platform settings -> source
[ENVIRONMENTS.Radagast.src]
# generator
generator.cores = [3]
generator.policy = "round_robin"
generator.priority = 99
generator.worker_priority = 98
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
# schedule reader
schedule_reader.input_file = "" # set during execution
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Radagast.snk]
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
thermal_msr.cores = [0, 2, 4, 6]
thermal_msr.package = false
# host
meter.period = 1e-3
meter.host_policy = "round_robin"
meter.host_pinning = 0
meter.host_priority = 90
meter.log_header = true
meter.start_immediately = false
# Configuration name
name = "Example_Sweep_Thermal"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "overwrite" # overwrite, move, halt
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "FrequencySweepExperiment"
channel = "Thermal"
[EXPERIMENT.PHASES]
sweep = {length_seconds = 10, frequencies = [0, 1, 2], repetitions = 2}
# Experiment layers
[EXPERIMENT.LAYERS]
rdp = {name = "CoreActivation", params = {saturating = true, saturate_mapping = {1 = -1}}}
io = {name = "TimeValue", params = {}} #params = {output_timing_multiplier = 1e9, input_timing_multiplier = 1e9}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 1e-3
delay_between_executions = 5.0
delay_after_bootstrap = 30.0
active_wait = false
# Apps and zones
[ENVIRONMENTS.Radagast.APPS]
snk = {executable = "meter_thermal_msr", zone = "host"}
src = {executable = "generator_utilisation_mt", zone = "host"}
# Platform settings -> source
[ENVIRONMENTS.Radagast.src]
# generator
generator.cores = [5]
generator.policy = "round_robin"
generator.priority = 99
generator.worker_priority = 98
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
# schedule reader
schedule_reader.input_file = "" # set during execution
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Radagast.snk]
# logging
logging.debug_log_filename = "" # set during execution
logging.app_log_filename = "" # set during execution
logging.log_level = "info"
thermal_msr.cores = [0, 2, 4, 6]
thermal_msr.package = false
# host
meter.period = 1e-3
meter.host_policy = "round_robin"
meter.host_pinning = 0
meter.host_priority = 90
meter.log_header = true
meter.start_immediately = false
# Configuration name
name = "flush+flush-cc_interference"
# Save path
save_path = "./data"
# Optional: Local backup path
backup_path = "./data/_backup"
# Optional: Action to perform if experiment directory exists
experiment_exists_action = "move" # overwrite, move, halt, update
# Optional: Backup configuration
# [BACKUP]
# user = ""
# host = ""
# ssh_key = ""
# port = ""
# path = ""
# Experiment settings
[EXPERIMENT]
type = "PerformanceExperiment"
channel = "FlushFlushCC"
[EXPERIMENT.PHASES]
# 64bps up to 384Kbps using 64 lines (1 symbol = 64 bit) | 1.0/225e-6 = 4444.44
trainShortARMv8 = {bit_count = 1500, symbol_rates = "[100] + (list(range(500, 6001, 500)))", repetitions = 2}
evalShortARMv8 = {bit_count = 5000, symbol_rates = "[100] + (list(range(500, 6001, 500)))", repetitions = 2}
trainARMv8 = {bit_count = 1500, symbol_rates = "[100] + (list(range(250, 6001, 250)))", repetitions = 5}
evalARMv8 = {bit_count = 5000, symbol_rates = "[100] + (list(range(250, 6001, 250)))", repetitions = 5}
# 64bps up to 5Mbps using 64 lines (1 symbol = 64 bit) | 1.0/20e-6 = 50000.0
trainShortHaswell = {bit_count = 1500, symbol_rates = "[100] + (list(range(10000, 78001, 10000)) + [78125])", repetitions = 2}
evalShortHaswell = {bit_count = 5000, symbol_rates = "[100] + (list(range(10000, 78001, 10000)) + [78125])", repetitions = 2}
trainHaswell = {bit_count = 1500, symbol_rates = "[100] + (list(range(1000, 78001, 1000)) + [78125])", repetitions = 5}
evalHaswell = {bit_count = 5000, symbol_rates = "[100] + (list(range(1000, 78001, 1000)) + [78125])", repetitions = 5}
# Experiment layers
[EXPERIMENT.LAYERS]
src = {name = "BitsetCoding", params = {bitset_length = 64}}
lne = {name = "MultiN", params = {N = 64}}
rdp = {name = "DirectActivation", params = {}}
io = {name = "TimeValue", params = {timebase = "ns"}}
# Experiment general settings
[EXPERIMENT.GENERAL]
latency = 10
fan = true
governors = "userspace"
frequencies = "max"
sampling_period = 20e-6
delay_after_spawn = 5.0
delay_after_auxiliary = 1.0
active_wait = false
[EXPERIMENT.GENERAL.ARMv8]
fan = "255"
sampling_period = 225e-6
# --------------------------------------------------------------------------------------------------
# ENVIRONMENT: Haswell
# Apps and zones
[ENVIRONMENTS.Haswell.APPS]
snk = {executable = "meter_cache_ff", zone = "host"}
src = {executable = "generator_cache_read_st", zone = "host"}
[ENVIRONMENTS.Haswell.src]
# generator
generator.host_pinning = 3
generator.should_pin_host = true
generator.cores = [0]
generator.should_pin_workers = true
generator.worker_policy = "round_robin"
generator.host_policy = "round_robin"
generator.worker_priority = 98
generator.host_priority = 97
generator.use_busy_sleep = true
generator.busy_sleep_yield = false
generator.use_huge_pages = true
generator.shm_file = "/dev/hugepages/8"
generator.set_count = 64
generator.set_increment = 64
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = false
# schedule reader
schedule_reader.reading_from_file = true
# Platform settings -> sink
[ENVIRONMENTS.Haswell.snk]
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = true
logging.rotating_logs = false
logging.rotating_logs_count = 10
logging.rotating_logs_size = 104857600
logging.timestamp_files = false
meter.host_policy = "round_robin"
meter.host_pinning = 7
meter.should_pin_host = true
meter.host_priority = 95
meter.log_header = true
meter.start_immediately = false
meter.use_busy_sleep = true
meter.busy_sleep_yield = false
cache.use_huge_pages = true
cache.shm_file = "/dev/hugepages/8"
cache.set_count = 64
cache.set_increment = 64
[ENVIRONMENTS.Haswell.APPS."ffmpeg"]
executable = "ffmpeg"
type = "standalone"
start_individually = true
zone = "host"
args = [
"-y",
"-loglevel", "error",
"-stream_loop", "-1",
"-i", "media/video.mp4",
"-c:v", "libx264",
"-b:v", "1000k",
"-f", "null", "/dev/null"
]
# --------------------------------------------------------------------------------------------------
# ENVIRONMENT: ARMv8
# Apps and zones
[ENVIRONMENTS.ARMv8.APPS]
snk = {executable = "meter_cache_ff", zone = "combined"}
src = {executable = "generator_cache_read_st", zone = "combined"}
[ENVIRONMENTS.ARMv8.src]
# generator
generator.host_pinning = 2
generator.should_pin_host = true
generator.cores = [0]
generator.should_pin_workers = true
generator.worker_policy = "round_robin"
generator.host_policy = "round_robin"
generator.worker_priority = 98
generator.host_priority = 97
generator.use_busy_sleep = true
generator.busy_sleep_yield = false
generator.use_huge_pages = true
generator.shm_file = "/dev/hugepages/8"
generator.set_count = 64
generator.set_increment = 16
# logging
logging.append_governor_to_files = false
logging.async = true
logging.async_size = 4096
logging.log_level = "debug"
logging.provide_platform_identification = false
# schedule reader
schedule_reader.reading_from_file = true