Compare commits
3 Commits
Author | SHA1 | Date |
---|---|---|
Eyck-Alexander Jentzsch | 095f0ef72f | |
Eyck-Alexander Jentzsch | adb66cfe6e | |
Eyck-Alexander Jentzsch | 13fdeb7c36 |
|
@ -151,6 +151,3 @@ compile_commands.json
|
|||
CTestTestfile.cmake
|
||||
*.dump
|
||||
|
||||
.vscode/c_cpp_properties.json
|
||||
semihosting_test/build/semihosting_test
|
||||
semihosting_test/build/Makefile
|
||||
|
|
|
@ -4,8 +4,16 @@ endif()
|
|||
if (NOT DEFINED ISA)
|
||||
set(ISA imc)
|
||||
endif()
|
||||
message(STATUS "Building firmware using ${BOARD} board configuration and isa ${ISA}")
|
||||
add_custom_target(fw-common ALL
|
||||
COMMAND make -C hello-world BOARD=${BOARD} ISA=${ISA} && make -C benchmarks/dhrystone BOARD=${BOARD} ISA=${ISA} && make -C benchmarks/coremark BOARD=${BOARD} ISA=${ISA}
|
||||
message(STATUS "Building firmware using ${BOARD} board configuration")
|
||||
add_custom_target(fw-hello-world ALL
|
||||
COMMAND make -C ${riscvfw_SOURCE_DIR}/hello-world BOARD=${BOARD} ISA=${ISA}
|
||||
USES_TERMINAL
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
add_custom_target(fw-dhrystone ALL
|
||||
COMMAND make -C ${riscvfw_SOURCE_DIR}/benchmarks/dhrystone BOARD=${BOARD} ISA=${ISA}
|
||||
USES_TERMINAL
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
add_custom_target(fw-coremark ALL
|
||||
COMMAND make -C ${riscvfw_SOURCE_DIR}/benchmarks/coremark BOARD=${BOARD} ISA=${ISA}
|
||||
USES_TERMINAL
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
|
|
|
@ -15,8 +15,9 @@ else
|
|||
RISCV_ABI:=ilp32
|
||||
endif
|
||||
# '-lgcc -lm' are needed to add softfloat routines
|
||||
CFLAGS := -g -O3 -DITERATIONS=$(ITERATIONS) -DHZ=32768 -DTIME -DNO_INIT -fno-inline -fno-builtin-printf -fno-common -Wno-implicit \
|
||||
CFLAGS := -g -march=$(RISCV_ARCH)_zicsr_zifencei -mabi=$(RISCV_ABI) -mcmodel=medlow -O3 -DITERATIONS=$(ITERATIONS) -DHZ=32768 -DTIME -DNO_INIT -fno-inline -fno-builtin-printf -fno-common -Wno-implicit \
|
||||
-funroll-loops -fpeel-loops -fgcse-sm -fgcse-las
|
||||
LDFLAGS := -g -march=$(RISCV_ARCH)_zicsr_zifencei -mabi=$(RISCV_ABI) -mcmodel=medlow -Wl,--wrap=scanf -Wl,--wrap=printf -Wl,--wrap=exit -lgcc -lm
|
||||
|
||||
TOOL_DIR=$(dir $(compiler))
|
||||
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def run_command(command, cwd=None):
|
||||
"""Run a shell command in the specified directory and return its output."""
|
||||
result = subprocess.run(
|
||||
command,
|
||||
shell=True,
|
||||
cwd=cwd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
)
|
||||
return result.stdout.decode("utf-8"), result.stderr.decode("utf-8")
|
||||
|
||||
|
||||
def build_test_cases(makefile_dir, iterations):
|
||||
"""Run the Makefile with the specified iterations."""
|
||||
make_command = f"make clean && make ITERATIONS={iterations}"
|
||||
stdout, stderr = run_command(make_command, cwd=makefile_dir)
|
||||
if stderr:
|
||||
raise RuntimeError(f"Error during make: {stderr}")
|
||||
|
||||
|
||||
def main(simulator_path, makefile_dir):
|
||||
# Directory for generated test cases
|
||||
generated_dir = makefile_dir.parent / "workspace"
|
||||
os.makedirs(generated_dir, exist_ok=True)
|
||||
|
||||
# Define the iterations
|
||||
iterations_list = [10, 20, 30]
|
||||
# 15 value up to 6.000.000 evenly apart on a log scale
|
||||
iterations_list = [
|
||||
1,
|
||||
2,
|
||||
4,
|
||||
8,
|
||||
17,
|
||||
34,
|
||||
69,
|
||||
141,
|
||||
287,
|
||||
582,
|
||||
1182,
|
||||
2401,
|
||||
4878,
|
||||
9910,
|
||||
20133,
|
||||
40914,
|
||||
83103,
|
||||
168830,
|
||||
343042,
|
||||
696712,
|
||||
1414641,
|
||||
2874878,
|
||||
5837995,
|
||||
]
|
||||
|
||||
for iteration in iterations_list:
|
||||
try:
|
||||
# Update the Makefile with the current ITERATIONS value
|
||||
build_test_cases(makefile_dir, iteration)
|
||||
except RuntimeError as e:
|
||||
print(f"Error during compilation with ITERATIONS={iteration}: {e}")
|
||||
continue
|
||||
|
||||
# Run the simulator with the generated test case
|
||||
exe = makefile_dir / "dhrystone.elf"
|
||||
if not exe.is_file():
|
||||
exit(f"{exe} does not exist")
|
||||
verbose_exe = generated_dir / "bin" / f"dhrystone_{iteration}.elf"
|
||||
os.makedirs(verbose_exe.parent, exist_ok=True)
|
||||
shutil.copy(exe, verbose_exe)
|
||||
backends = ["interp", "llvm", "tcc", "asmjit"]
|
||||
for backend in backends:
|
||||
log_file = os.path.join(generated_dir, f"{backend}_{iteration}.log")
|
||||
|
||||
sim_command = f"{simulator_path} -f {exe} --backend {backend}"
|
||||
start_time = time.time()
|
||||
sim_stdout, sim_stderr = run_command(sim_command)
|
||||
end_time = time.time()
|
||||
elapsed_time = end_time - start_time
|
||||
|
||||
# Save the output to the logfile
|
||||
with open(log_file, "w", encoding="utf8") as f:
|
||||
f.write(sim_stdout)
|
||||
if sim_stderr:
|
||||
f.write(f"\nErrors:\n{sim_stderr}")
|
||||
|
||||
print(
|
||||
f"Ran {backend} in {elapsed_time:.2f} s, Output saved to {backend}_{iteration}.log"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run simulations with generated test cases."
|
||||
)
|
||||
parser.add_argument("simulator_path", help="Path to the simulator executable.")
|
||||
|
||||
args = parser.parse_args()
|
||||
dhrystone_path = Path(__file__).parent / "dhrystone"
|
||||
main(args.simulator_path, dhrystone_path)
|
|
@ -0,0 +1,86 @@
|
|||
import argparse
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import plotly.express as px
|
||||
import yaml
|
||||
|
||||
|
||||
def parse_logs(log_dir):
|
||||
results = []
|
||||
for filename in os.listdir(log_dir):
|
||||
if filename.endswith(".log"):
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
with open(filepath, "r", encoding="utf8") as file:
|
||||
for line in file:
|
||||
if (
|
||||
"Executed" in line
|
||||
and "instructions" in line
|
||||
and "during" in line
|
||||
and "resulting in" in line
|
||||
):
|
||||
parts = line.split()
|
||||
instructions = int(parts[3])
|
||||
time_idx = parts.index("during") + 1
|
||||
time = int(parts[time_idx].rstrip("ms"))
|
||||
mips_idx = parts.index("resulting") + 2
|
||||
mips = float(parts[mips_idx].rstrip("MIPS"))
|
||||
backend, iterations, _ = re.split(r"[_.]", filename)
|
||||
|
||||
results.append(
|
||||
{
|
||||
"backend": backend,
|
||||
"run_count": int(iterations),
|
||||
"instructions": instructions,
|
||||
"time": time,
|
||||
"mips": mips,
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
def write_yaml(results, output_file):
|
||||
with open(output_file, "w", encoding="utf8") as file:
|
||||
yaml.dump(results, file)
|
||||
|
||||
|
||||
def visualize_mips_over_instructions(yaml_file):
|
||||
# Read data from YAML file
|
||||
with open(yaml_file, "r", encoding="utf8") as file:
|
||||
data = yaml.safe_load(file)
|
||||
|
||||
# Extract instructions and MIPS values
|
||||
run_count = [entry["run_count"] for entry in data]
|
||||
mips = [entry["mips"] for entry in data]
|
||||
backends = [entry["backend"] for entry in data]
|
||||
# Create scatter plot using Plotly Express
|
||||
fig = px.line(
|
||||
x=run_count,
|
||||
y=mips,
|
||||
color=backends,
|
||||
labels={"x": "Dhrystone Iterations", "y": "MIPS", "color": "Backend"},
|
||||
title="MIPS over Amount of Dhrystone Iterations",
|
||||
log_x=True,
|
||||
)
|
||||
fig.show()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""
|
||||
Parse log files and extract relevant information. Create a 'results.yaml' file and visualize it.
|
||||
Intended to be run after 'dhrystone_run_multiple.py'"""
|
||||
)
|
||||
parser.add_argument("log_dir", help="Path to the directory containing log files.")
|
||||
args = parser.parse_args()
|
||||
|
||||
result_file = Path(__file__).parent / "results.yaml"
|
||||
if not result_file.is_file():
|
||||
results = parse_logs(args.log_dir)
|
||||
write_yaml(results, result_file)
|
||||
visualize_mips_over_instructions(result_file)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in New Issue