Skip to content

Commit

Permalink
Align report with python version of BAPP. Added validation.
Browse files Browse the repository at this point in the history
  • Loading branch information
nikita-kud committed Feb 27, 2025
1 parent d20d9a8 commit 45da73f
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 1 deletion.
5 changes: 5 additions & 0 deletions tests/samples_tests/smoke_tests/test_benchmark_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,11 @@ def verify(sample_language, device, api=None, nireq=None, shape=None, data_shape
'-d', device
)
assert 'FPS' in output

# No Windows support due to the lack of the ‘psutil’ module in the CI infrastructure
if os.name == "posix":
assert 'Compile model ram used' in output

if tmp_path:
assert (tmp_path / 'exec_graph.xml').exists()
with (tmp_path / 'conf.json').open(encoding='utf-8') as file:
Expand Down
37 changes: 36 additions & 1 deletion tools/benchmark_tool/openvino/tools/benchmark/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,27 @@
from openvino.tools.benchmark.utils.statistics_report import StatisticsReport, JsonStatisticsReport, CsvStatisticsReport, \
averageCntReport, detailedCntReport

def get_peak_memory_usage():
if os.name == "posix":
with open("/proc/self/status", "r") as f:
for line in f:
if line.startswith("VmPeak:"):
return int(line.split()[1]) # The value in KB
raise RuntimeError("VmPeak attribute not found. Unable to determine peak memory usage.")

# No Windows support due to the lack of the ‘psutil’ module in the CI infrastructure
return None

def log_memory_usage(logger, start_mem_usage, end_mem_usage, action_name):
if start_mem_usage is None or end_mem_usage is None:
return

capitalized_action_name = action_name.capitalize()
action_name = "compilation" if action_name == "compile" else action_name
logger.info(f"Start of {action_name} memory usage: Peak {start_mem_usage} KB")
logger.info(f"End of {action_name} memory usage: Peak {end_mem_usage} KB")
logger.info(f"{capitalized_action_name} model ram used {end_mem_usage - start_mem_usage} KB")

def parse_and_check_command_line():
def arg_not_empty(arg_value,empty_value):
return not arg_value is None and not arg_value == empty_value
Expand Down Expand Up @@ -349,10 +370,15 @@ def set_nthreads_pin(property_name, property_value):
# --------------------- 7. Loading the model to the device -------------------------------------------------
next_step()

start_mem_usage = get_peak_memory_usage()
start_time = datetime.utcnow()

compiled_model = benchmark.core.compile_model(args.path_to_model, benchmark.device, device_config)

duration_ms = f"{(datetime.utcnow() - start_time).total_seconds() * 1000:.2f}"
end_mem_usage = get_peak_memory_usage()
logger.info(f"Compile model took {duration_ms} ms")
log_memory_usage(logger, start_mem_usage, end_mem_usage, "compile")
if statistics:
statistics.add_parameters(StatisticsReport.Category.EXECUTION_RESULTS,
[
Expand Down Expand Up @@ -411,11 +437,15 @@ def set_nthreads_pin(property_name, property_value):

# --------------------- 7. Loading the model to the device -------------------------------------------------
next_step()
start_mem_usage = get_peak_memory_usage()
start_time = datetime.utcnow()
compiled_model = benchmark.core.compile_model(model, benchmark.device, device_config)

compiled_model = benchmark.core.compile_model(model, benchmark.device, device_config)

duration_ms = f"{(datetime.utcnow() - start_time).total_seconds() * 1000:.2f}"
end_mem_usage = get_peak_memory_usage()
logger.info(f"Compile model took {duration_ms} ms")
log_memory_usage(logger, start_mem_usage, end_mem_usage, "compile")
if statistics:
statistics.add_parameters(StatisticsReport.Category.EXECUTION_RESULTS,
[
Expand All @@ -435,10 +465,15 @@ def set_nthreads_pin(property_name, property_value):
# --------------------- 7. Loading the model to the device -------------------------------------------------
next_step()

start_mem_usage = get_peak_memory_usage()
start_time = datetime.utcnow()

compiled_model = benchmark.core.import_model(args.path_to_model, benchmark.device, device_config)

duration_ms = f"{(datetime.utcnow() - start_time).total_seconds() * 1000:.2f}"
end_mem_usage = get_peak_memory_usage()
logger.info(f"Import model took {duration_ms} ms")
log_memory_usage(logger, start_mem_usage, end_mem_usage, "import")
if statistics:
statistics.add_parameters(StatisticsReport.Category.EXECUTION_RESULTS,
[
Expand Down

0 comments on commit 45da73f

Please sign in to comment.