diff options
Diffstat (limited to 'salis.py')
| -rwxr-xr-x | salis.py | 158 |
1 files changed, 139 insertions, 19 deletions
@@ -1,12 +1,17 @@ #!/usr/bin/env -S PYTHONDONTWRITEBYTECODE=1 python +import datetime +import json import os import random import shutil +import sqlite3 import subprocess import sys +import urllib.parse from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, ArgumentTypeError, RawTextHelpFormatter +from http.server import HTTPServer, SimpleHTTPRequestHandler from tempfile import TemporaryDirectory # ------------------------------------------------------------------------------ @@ -29,6 +34,7 @@ formatter_class = lambda prog: ArgumentDefaultsHelpFormatter(prog, max_help_posi bench = parsers.add_parser("bench", formatter_class=formatter_class, help="run benchmark") load = parsers.add_parser("load", formatter_class=formatter_class, help="load saved simulation") new = parsers.add_parser("new", formatter_class=formatter_class, help="create new simulation") +serve = parsers.add_parser("serve", formatter_class=formatter_class, help="run data server") architectures = os.listdir("./arch") uis = os.listdir("./ui") @@ -48,7 +54,12 @@ def inat(i): if ival < 1: raise ArgumentTypeError("value must be greater than zero") return ival -option_keys = ["short", "long", "metavar", "description", "default", "required", "type", "parsers"] +def iport(i): + ival = int(i, 0) + if not 0 <= ival <= 65535: raise ArgumentTypeError("value must be valid port number") + return ival + +option_keys = ["short", "long", "metavar", "help", "default", "required", "type", "parsers"] option_list = [ ["A", "anc", "ANC", "ancestor file name without extension, to be compiled on all cores (ANC points to 'anc/{arch}/{ANC}.asm')", None, True, str, [bench, new]], ["a", "arch", architectures, "VM architecture", "dummy", False, str, [bench, new]], @@ -61,8 +72,9 @@ option_list = [ ["g", "compiler", "CC", "C compiler to use", "gcc", False, str, [bench, load, new]], ["M", "muta-pow", "POW", "mutator range exponent (range == 2^{POW})", 32, False, ipos, [bench, new]], ["m", "mvec-pow", "POW", "memory vector size exponent (size == 2^{POW})", 20, False, ipos, [bench, new]], - ["n", "name", "NAME", "name of new or loaded simulation", "def.sim", False, str, [load, new]], + ["n", "name", "NAME", "name of new or loaded simulation", "def.sim", False, str, [load, new, serve]], ["o", "optimized", None, "builds salis binary with optimizations", False, False, bool, [bench, load, new]], + ["P", "port", "PORT", "port number for data server", 8080, False, iport, [serve]], ["p", "pre-cmd", "CMD", "shell command to wrap call to executable (e.g. gdb, time, valgrind, etc.)", None, False, str, [bench, load, new]], ["s", "seed", "SEED", "seed value for new simulation; a value of 0 disables cosmic rays; a value of -1 creates a random seed", 0, False, seed, [bench, new]], ["T", "keep-temp-dir", None, "delete temporary directory on exit", False, False, bool, [bench, load, new]], @@ -83,7 +95,7 @@ for parser, option in parser_map: def push_diff(tgt_key, src_key): arg_kwargs[tgt_key] = option[src_key] def push_val(key, val): arg_kwargs[key] = val - push_diff("help", "description") + push_same("help") push_same("required") if option["metavar"] is None: @@ -102,44 +114,43 @@ args = main_parser.parse_args() # ------------------------------------------------------------------------------ # Logging # ------------------------------------------------------------------------------ +def now(): + return f"{datetime.datetime.now():%Y-%m-%d %H:%M:%S}" + def info(msg, val=""): - print(f"\033[1;34m[INFO]\033[0m {msg}", val) + print(f"\r{now()} ++ \033[1;34mINFO\033[0m {msg}", val) def warn(msg, val=""): - print(f"\033[1;33m[WARN]\033[0m {msg}", val) + print(f"\r{now()} ++ \033[1;33mWARN\033[0m {msg}", val) def error(msg, val=""): - print(f"\033[1;31m[ERROR]\033[0m {msg}", val) + print(f"\r{now()} ++ \033[1;31mERROR\033[0m {msg}", val) sys.exit(1) # ------------------------------------------------------------------------------ # Load configuration # ------------------------------------------------------------------------------ info(description) -info(f"Called '{prog}' with the following options:") +info(f"Called '{prog} {args.command}' with the following options:", vars(args)) -for key, val in vars(args).items(): - print(f"{key} = {repr(val)}") - -if args.command in ["load", "new"]: +if args.command in ["load", "new", "serve"]: sim_dir = os.path.join(os.environ["HOME"], ".salis", args.name) sim_opts = os.path.join(sim_dir, "opts.py") sim_path = os.path.join(sim_dir, args.name) -if args.command in ["load"]: +if args.command in ["load", "serve"]: if not os.path.isdir(sim_dir): error("No simulation found named:", args.name) - info(f"Sourcing configuration from: '{sim_opts}':") sys.path.append(sim_dir) import opts - opt_vars = (opt for opt in dir(opts) if not opt.startswith("__")) + opt_vars = {opt: getattr(opts, opt) for opt in dir(opts) if not opt.startswith("__")} + + for key, val in opt_vars.items(): + setattr(args, key, val) - for opt_var in opt_vars: - opt_attr = getattr(opts, opt_var) - print(f"{opt_var} = {repr(opt_attr)}") - setattr(args, opt_var, opt_attr) + info(f"Sourced configuration from: '{sim_opts}':", opt_vars) if args.command in ["new"]: if args.data_push_pow and args.data_push_pow < args.sync_pow: @@ -188,6 +199,113 @@ if args.command in ["load", "new"]: ui_vars = UIVars(args) # ------------------------------------------------------------------------------ +# Launch data server +# ------------------------------------------------------------------------------ +if args.command in ["serve"]: + sim_db = os.path.join(sim_dir, f"{args.name}.sqlite3") + info("Connecting to SQLite database:", sim_db) + db_con = sqlite3.connect(sim_db) + db_con.row_factory = sqlite3.Row + db_cur = db_con.cursor() + + # Generate configuration so front-end knows how to render the plots. + # Each architecture may also provide its own set of plots, which will be merged with the + # default dictionary below. + plots = { + "General": { + "cycl": { + "table": "general", + "type": "lines", + "cols": [f"cycl_{i}" for i in range(args.cores)], + }, + "mall": { + "table": "general", + "type": "lines", + "cols": [f"mall_{i}" for i in range(args.cores)], + }, + "pnum": { + "table": "general", + "type": "lines", + "cols": [f"pnum_{i}" for i in range(args.cores)], + }, + "ppop": { + "table": "general", + "type": "lines", + "cols": [f"{pref}_{i}" for i in range(args.cores) for pref in ["pfst", "plst"]], + }, + "ambs": { + "table": "general", + "type": "lines", + "cols": [f"{pref}_{i}" for pref in ["amb0", "amb1"] for i in range(args.cores)], + }, + "eevs": { + "table": "general", + "type": "lines", + "cols": [f"{pref}_{i}" for pref in ["emb0", "emb1", "eliv", "edea"] for i in range(args.cores)], + }, + }, + } + + for key in arch_vars.plots: + plots[key] = (plots[key] if key in plots else {}) | arch_vars.plots[key] + + tables = set(plot["table"] for _, section in plots.items() for _, plot in section.items()) + + info("Generated plot configuration:", plots) + info("Detected data tables:", tables) + + class Handler(SimpleHTTPRequestHandler): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs, directory="data") + + def log_message(_, format, *args): + info(format % args) + + def log_error(_, format, *args): + warn(format % args) + + def send_as_json(self, obj): + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + self.wfile.write(json.dumps(obj).encode("utf-8")) + + def do_GET(self): + bits = urllib.parse.urlparse(self.path) + + if bits.path == "/opts": return self.send_as_json(opt_vars | {"name": args.name}) + if bits.path == "/plots": return self.send_as_json(plots) + if bits.path == "/tables": return self.send_as_json(list(tables)) + + # NOTE: this server implementation is very minimal and has no built-in security! + # *DO NOT* deploy this to the internet as it is. Only run this data server within trusted, + # private networks. + if bits.path == "/data": + http_query = urllib.parse.parse_qs(bits.query) + table = http_query["table"][0] + entries = http_query["entries"][0] + x_axis = http_query["x_axis"][0] + x_low = http_query["x_low"][0] + x_high = http_query["x_high"][0] + sql_query = f"SELECT * FROM (SELECT rowid, * FROM {table} WHERE {x_axis} >= {x_low} AND {x_axis} <= {x_high} ORDER BY {x_axis} DESC LIMIT {entries}) ORDER BY {x_axis} ASC;" + sql_res = db_cur.execute(sql_query) + sql_list = [dict(row) for row in sql_res.fetchall()] + return self.send_as_json(sql_list) + + super().do_GET() + + info("Launching data server") + server = HTTPServer(("", args.port), Handler) + + try: + server.serve_forever() + except KeyboardInterrupt: + pass + + info("Shutting down data server") + sys.exit(0) + +# ------------------------------------------------------------------------------ # Compile ancestor organism # ------------------------------------------------------------------------------ if args.command in ["bench", "new"] and args.anc is not None: @@ -241,6 +359,7 @@ defines.add(f"-DSYNC_INTERVAL={2 ** args.sync_pow}ul") defines.add(f"-DTHREAD_GAP={args.thread_gap}") defines.add(f"-DCORE_FIELDS={" ".join(f"CORE_FIELD({", ".join(field)})" for field in arch_vars.core_fields)}") +defines.add(f"-DCORE_DATA_FIELDS={" ".join(f"CORE_DATA_FIELD({", ".join(field)})" for field in arch_vars.core_data_fields)}") defines.add(f"-DPROC_FIELDS={" ".join(f"PROC_FIELD({", ".join(field)})" for field in arch_vars.proc_fields)}") defines.add(f"-DINST_SET={" ".join(f"INST({index}, {"_".join(inst[0])}, \"{" ".join(inst[0])}\", L'{inst[1]}')" for index, inst in enumerate(arch_vars.inst_set))}") defines.add(f"-DCORE_FIELD_COUNT={len(arch_vars.core_fields)}") @@ -270,6 +389,7 @@ if args.command in ["bench", "new"]: if args.command in ["load", "new"]: flags.add(f"-Iui/{args.ui}") + flags.update(ui_vars.flags) includes.update(ui_vars.includes) defines.update(ui_vars.defines) defines.add(f"-DAUTOSAVE_INTERVAL={2 ** args.auto_save_pow}ul") @@ -313,7 +433,7 @@ info("Building salis binary at:", salis_bin) build_cmd = [args.compiler, "core.c", "-o", salis_bin] build_cmd.extend(flags) -build_cmd.extend(sum(map(lambda include: [f"-include", include], includes), [])) +build_cmd.extend(sum(map(lambda include: ["-include", include], includes), [])) build_cmd.extend(defines) build_cmd.extend(links) |
