v0.0.9
- added output parameters to mamul1 and hibench: in addition to the average duration, now outputs the min, max, media and standard deviation work related to [https://bugzilla.ipr.univ-rennes.fr/show_bug.cgi?id=3958]
This commit is contained in:
parent
ea28ea0293
commit
026c5f6100
|
@ -1,11 +1,11 @@
|
|||
from typing import List
|
||||
import pandas as pd
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import shutil
|
||||
from ..core import IBenchmark, BenchParam, BenchmarkConfig, BenchmarkMeasurements, ITargetHost
|
||||
from ..cmakeutils import get_bla_vendor
|
||||
from ..util import get_proxy_env_vars
|
||||
from starbench.core import StarbenchResults
|
||||
|
||||
|
||||
class HiBench(IBenchmark):
|
||||
|
@ -22,7 +22,11 @@ class HiBench(IBenchmark):
|
|||
bench_params.append(BenchParam('cmake_path', BenchParam.Type.PARAM_TYPE_STRING, 'the location of the cmake executable to use (eg "/opt/cmake/cmake-3.23.0/bin/cmake", or simply "cmake" for the one in the path)'))
|
||||
|
||||
out_params = []
|
||||
out_params.append(BenchParam('duration', BenchParam.Type.PARAM_TYPE_FLOAT, 'the average duration of one test, in seconds'))
|
||||
out_params.append(BenchParam('duration_avg', BenchParam.Type.PARAM_TYPE_FLOAT, 'the average duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_med', BenchParam.Type.PARAM_TYPE_FLOAT, 'the median duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_stddev', BenchParam.Type.PARAM_TYPE_FLOAT, 'the standard deviation of duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_min', BenchParam.Type.PARAM_TYPE_FLOAT, 'the min duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_max', BenchParam.Type.PARAM_TYPE_FLOAT, 'the max duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('num_threads_per_run', BenchParam.Type.PARAM_TYPE_INT, 'the number of cores to use by each concurrent run of the app (must be a divider of num_cores)'))
|
||||
|
||||
super().__init__(bench_id='hibench', bench_params=bench_params, out_params=out_params, common_params=common_params)
|
||||
|
@ -83,14 +87,14 @@ class HiBench(IBenchmark):
|
|||
shell_command += f'{get_proxy_env_vars()} starbench --source-tree-provider \'{source_tree_provider}\' --num-cores {num_cores} --output-dir={output_dir} --cmake-path={cmake_path} {" ".join([f"--cmake-option={option}" for option in cmake_options])} --benchmark-command=\'{benchmark_command}\' --output-measurements={output_measurements_file_path}'
|
||||
subprocess.run(shell_command, shell=True, check=True, executable='/bin/bash')
|
||||
measurements: BenchmarkMeasurements = {}
|
||||
df = pd.read_csv(output_measurements_file_path, sep='\t')
|
||||
selected_rows = df[df['worker_id'] == '<average>']
|
||||
assert len(selected_rows) == 1
|
||||
row = selected_rows.loc[0]
|
||||
duration = row["duration"]
|
||||
starbench_results = StarbenchResults(output_measurements_file_path)
|
||||
|
||||
num_threads_per_run = 1 # at the moment starbench always allocates 1 core per process, but in the future, starbench will support multiple cores per process (useful to measure the how the app scales with increasing parallelism)
|
||||
measurements['num_threads_per_run'] = num_threads_per_run
|
||||
measurements['duration'] = duration
|
||||
measurements['duration_avg'] = starbench_results.get_average_duration()
|
||||
measurements['duration_med'] = starbench_results.get_median_duration()
|
||||
measurements['duration_stddev'] = starbench_results.get_duration_stddev()
|
||||
(measurements['duration_min'], measurements['duration_max']) = starbench_results.get_duration_range()
|
||||
return measurements
|
||||
|
||||
# def get_measurements(self, benchmark_output_dir: Path) -> BenchmarkMeasurements:
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from typing import List
|
||||
from pathlib import Path
|
||||
import pandas as pd
|
||||
import subprocess
|
||||
import logging
|
||||
from ..core import IBenchmark, BenchParam, BenchmarkConfig, BenchmarkMeasurements, ITargetHost
|
||||
from ..cmakeutils import get_bla_vendor
|
||||
from iprbench.util import extract_resource_dir
|
||||
from starbench.core import StarbenchResults
|
||||
|
||||
|
||||
class MaMul1(IBenchmark):
|
||||
|
@ -21,7 +21,11 @@ class MaMul1(IBenchmark):
|
|||
# bench_params.append(BenchParam('source_dir', BenchParam.Type.PARAM_TYPE_STRING, 'the path to the directory containing mamul1 test source files'))
|
||||
|
||||
out_params = []
|
||||
out_params.append(BenchParam('duration', BenchParam.Type.PARAM_TYPE_FLOAT, 'the average duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_avg', BenchParam.Type.PARAM_TYPE_FLOAT, 'the average duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_med', BenchParam.Type.PARAM_TYPE_FLOAT, 'the median duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_stddev', BenchParam.Type.PARAM_TYPE_FLOAT, 'the standard deviation of duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_min', BenchParam.Type.PARAM_TYPE_FLOAT, 'the min duration of one matrix multiplication, in seconds'))
|
||||
out_params.append(BenchParam('duration_max', BenchParam.Type.PARAM_TYPE_FLOAT, 'the max duration of one matrix multiplication, in seconds'))
|
||||
|
||||
super().__init__(bench_id='mamul1', bench_params=bench_params, out_params=out_params, common_params=common_params)
|
||||
|
||||
|
@ -74,12 +78,11 @@ class MaMul1(IBenchmark):
|
|||
logging.debug('shell_command = "%s"', shell_command)
|
||||
subprocess.run(shell_command, shell=True, check=True, encoding='/bin/bash')
|
||||
measurements: BenchmarkMeasurements = {}
|
||||
df = pd.read_csv(output_measurements_file_path, sep='\t')
|
||||
selected_rows = df[df['worker_id'] == '<average>']
|
||||
assert len(selected_rows) == 1
|
||||
row = selected_rows.loc[0]
|
||||
duration = row["duration"]
|
||||
measurements['duration'] = duration
|
||||
starbench_results = StarbenchResults(output_measurements_file_path)
|
||||
measurements['duration_avg'] = starbench_results.get_average_duration()
|
||||
measurements['duration_med'] = starbench_results.get_median_duration()
|
||||
measurements['duration_stddev'] = starbench_results.get_duration_stddev()
|
||||
(measurements['duration_min'], measurements['duration_max']) = starbench_results.get_duration_range()
|
||||
return measurements
|
||||
|
||||
# def get_measurements(self, benchmark_output_dir: Path) -> BenchmarkMeasurements:
|
||||
|
|
|
@ -1 +1 @@
|
|||
__version__ = '0.0.8'
|
||||
__version__ = '0.0.9'
|
||||
|
|
|
@ -15,8 +15,8 @@ dependencies = [
|
|||
"sqlalchemy",
|
||||
# "cocluto >= 1.7",
|
||||
"cocluto@git+https://git.ipr.univ-rennes.fr/cellinfo/cocluto@v1.7.0",
|
||||
# "starbench >= 1.0.3"
|
||||
"starbench@git+https://github.com/g-raffy/starbench@v1.0.3",
|
||||
# "starbench >= 1.0.4"
|
||||
"starbench@git+https://github.com/g-raffy/starbench@v1.0.4",
|
||||
]
|
||||
requires-python = ">= 3.8"
|
||||
authors = [
|
||||
|
|
Loading…
Reference in New Issue