Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
095f0ef72f | |||
adb66cfe6e | |||
13fdeb7c36 |
26
README.md
26
README.md
@ -1,3 +1,25 @@
|
||||
# Firmware
|
||||
# MINRES Firmware Repository
|
||||
## Structure
|
||||
This repository comes with several executables ready to be built, such as `hello-world` or `coremark` and `dhrystone` in the `benchmark` directory.
|
||||
Creating the executables in the easiest way possible is done by calling `make`in the corresponding directory.
|
||||
|
||||
Using `make clean && bear -- make ` will cause a correct compile_commands.json to be emitted. This allows using completion tools like clangd.
|
||||
## Prerequisite
|
||||
This repository requires `riscv64-unknown-elf-gcc` to be located in `$PATH`.
|
||||
|
||||
## How to Use
|
||||
When compiling executables, the target platform needs to be specified using the 'BOARD' variable. When compiling for the TGC5C for example, use `make BOARD=tgc_vp`, when compiling for RTL `make BOARD=rtl`. The default value for the Board variable is 'iss'.
|
||||
The arch can be set with the 'ISA' variable, the default value is 'imc'.
|
||||
|
||||
When compiling for the TGC5A VP for example, the call to create the correct binary is the following:
|
||||
```
|
||||
make BOARD=tgc_vp ISA=e
|
||||
```
|
||||
## Useful information
|
||||
Using `bear -- <build-command>` will cause a compile_commands.json to be emitted. This allows using completion tools like clangd.
|
||||
|
||||
## Current Limitations
|
||||
Currently, this repository only supports creation of 32-bit executables (Even when setting the `RISCV_ARCH` and `RISCV_ABI` manually).
|
||||
|
||||
Compiling for the 'e' extension / ISA together with any other extension (`ISA=emc` for example), requires setting the `RISCV_ABI=ilp32e` explicitly.
|
||||
|
||||
When switching ABI or ARCH ensure that object files in the corresponding 'env' dir in the 'bare-metal-bsp' submodule are removed, so they get created with the appropriate flags (namely the 'init.o' file).
|
||||
|
108
benchmarks/dhrystone_run_multiple.py
Normal file
108
benchmarks/dhrystone_run_multiple.py
Normal file
@ -0,0 +1,108 @@
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(command, cwd=None):
|
||||
"""Run a shell command in the specified directory and return its output."""
|
||||
result = subprocess.run(
|
||||
command,
|
||||
shell=True,
|
||||
cwd=cwd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
)
|
||||
return result.stdout.decode("utf-8"), result.stderr.decode("utf-8")
|
||||
|
||||
|
||||
def build_test_cases(makefile_dir, iterations):
|
||||
"""Run the Makefile with the specified iterations."""
|
||||
make_command = f"make clean && make ITERATIONS={iterations}"
|
||||
stdout, stderr = run_command(make_command, cwd=makefile_dir)
|
||||
if stderr:
|
||||
raise RuntimeError(f"Error during make: {stderr}")
|
||||
|
||||
|
||||
def main(simulator_path, makefile_dir):
|
||||
# Directory for generated test cases
|
||||
generated_dir = makefile_dir.parent / "workspace"
|
||||
os.makedirs(generated_dir, exist_ok=True)
|
||||
|
||||
# Define the iterations
|
||||
iterations_list = [10, 20, 30]
|
||||
# 15 value up to 6.000.000 evenly apart on a log scale
|
||||
iterations_list = [
|
||||
1,
|
||||
2,
|
||||
4,
|
||||
8,
|
||||
17,
|
||||
34,
|
||||
69,
|
||||
141,
|
||||
287,
|
||||
582,
|
||||
1182,
|
||||
2401,
|
||||
4878,
|
||||
9910,
|
||||
20133,
|
||||
40914,
|
||||
83103,
|
||||
168830,
|
||||
343042,
|
||||
696712,
|
||||
1414641,
|
||||
2874878,
|
||||
5837995,
|
||||
]
|
||||
|
||||
for iteration in iterations_list:
|
||||
try:
|
||||
# Update the Makefile with the current ITERATIONS value
|
||||
build_test_cases(makefile_dir, iteration)
|
||||
except RuntimeError as e:
|
||||
print(f"Error during compilation with ITERATIONS={iteration}: {e}")
|
||||
continue
|
||||
|
||||
# Run the simulator with the generated test case
|
||||
exe = makefile_dir / "dhrystone.elf"
|
||||
if not exe.is_file():
|
||||
exit(f"{exe} does not exist")
|
||||
verbose_exe = generated_dir / "bin" / f"dhrystone_{iteration}.elf"
|
||||
os.makedirs(verbose_exe.parent, exist_ok=True)
|
||||
shutil.copy(exe, verbose_exe)
|
||||
backends = ["interp", "llvm", "tcc", "asmjit"]
|
||||
for backend in backends:
|
||||
log_file = os.path.join(generated_dir, f"{backend}_{iteration}.log")
|
||||
|
||||
sim_command = f"{simulator_path} -f {exe} --backend {backend}"
|
||||
start_time = time.time()
|
||||
sim_stdout, sim_stderr = run_command(sim_command)
|
||||
end_time = time.time()
|
||||
elapsed_time = end_time - start_time
|
||||
|
||||
# Save the output to the logfile
|
||||
with open(log_file, "w", encoding="utf8") as f:
|
||||
f.write(sim_stdout)
|
||||
if sim_stderr:
|
||||
f.write(f"\nErrors:\n{sim_stderr}")
|
||||
|
||||
print(
|
||||
f"Ran {backend} in {elapsed_time:.2f} s, Output saved to {backend}_{iteration}.log"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run simulations with generated test cases."
|
||||
)
|
||||
parser.add_argument("simulator_path", help="Path to the simulator executable.")
|
||||
|
||||
args = parser.parse_args()
|
||||
dhrystone_path = Path(__file__).parent / "dhrystone"
|
||||
main(args.simulator_path, dhrystone_path)
|
86
benchmarks/visualize_runtimes.py
Normal file
86
benchmarks/visualize_runtimes.py
Normal file
@ -0,0 +1,86 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import plotly.express as px
|
||||
import yaml
|
||||
|
||||
|
||||
def parse_logs(log_dir):
|
||||
results = []
|
||||
for filename in os.listdir(log_dir):
|
||||
if filename.endswith(".log"):
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
with open(filepath, "r", encoding="utf8") as file:
|
||||
for line in file:
|
||||
if (
|
||||
"Executed" in line
|
||||
and "instructions" in line
|
||||
and "during" in line
|
||||
and "resulting in" in line
|
||||
):
|
||||
parts = line.split()
|
||||
instructions = int(parts[3])
|
||||
time_idx = parts.index("during") + 1
|
||||
time = int(parts[time_idx].rstrip("ms"))
|
||||
mips_idx = parts.index("resulting") + 2
|
||||
mips = float(parts[mips_idx].rstrip("MIPS"))
|
||||
backend, iterations, _ = re.split(r"[_.]", filename)
|
||||
|
||||
results.append(
|
||||
{
|
||||
"backend": backend,
|
||||
"run_count": int(iterations),
|
||||
"instructions": instructions,
|
||||
"time": time,
|
||||
"mips": mips,
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
def write_yaml(results, output_file):
|
||||
with open(output_file, "w", encoding="utf8") as file:
|
||||
yaml.dump(results, file)
|
||||
|
||||
|
||||
def visualize_mips_over_instructions(yaml_file):
|
||||
# Read data from YAML file
|
||||
with open(yaml_file, "r", encoding="utf8") as file:
|
||||
data = yaml.safe_load(file)
|
||||
|
||||
# Extract instructions and MIPS values
|
||||
run_count = [entry["run_count"] for entry in data]
|
||||
mips = [entry["mips"] for entry in data]
|
||||
backends = [entry["backend"] for entry in data]
|
||||
# Create scatter plot using Plotly Express
|
||||
fig = px.line(
|
||||
x=run_count,
|
||||
y=mips,
|
||||
color=backends,
|
||||
labels={"x": "Dhrystone Iterations", "y": "MIPS", "color": "Backend"},
|
||||
title="MIPS over Amount of Dhrystone Iterations",
|
||||
log_x=True,
|
||||
)
|
||||
fig.show()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""
|
||||
Parse log files and extract relevant information. Create a 'results.yaml' file and visualize it.
|
||||
Intended to be run after 'dhrystone_run_multiple.py'"""
|
||||
)
|
||||
parser.add_argument("log_dir", help="Path to the directory containing log files.")
|
||||
args = parser.parse_args()
|
||||
|
||||
result_file = Path(__file__).parent / "results.yaml"
|
||||
if not result_file.is_file():
|
||||
results = parse_logs(args.log_dir)
|
||||
write_yaml(results, result_file)
|
||||
visualize_mips_over_instructions(result_file)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in New Issue
Block a user