Skip to content

Commit

Permalink
Support min_duration
Browse files Browse the repository at this point in the history
  • Loading branch information
arjunsuresh committed Feb 26, 2025
1 parent 2d6fc4b commit bb3991b
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 5 deletions.
10 changes: 5 additions & 5 deletions script/generate-mlperf-inference-user-conf/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,13 +356,13 @@ def preprocess(i):
max_duration_ranging_s *
1000) # in milliseconds
if scenario == "MultiStream" or scenario == "SingleStream":
if env.get('MLC_MLPERF_USE_MAX_DURATION', 'yes').lower() not in ["no", "false", "0"] and env.get(
'MLC_MLPERF_MODEL_EQUAL_ISSUE_MODE', 'no').lower() not in ["yes", "1", "true"]:
user_conf += ml_model_name + "." + scenario + \
f".max_duration = {max_duration_valid}" + "\n"
elif env.get('MLC_MLPERF_INFERENCE_MIN_DURATION', '') != '':
if env.get('MLC_MLPERF_INFERENCE_MIN_DURATION', '') != '':
user_conf += ml_model_name + "." + scenario + ".min_duration = " + \
env['MLC_MLPERF_INFERENCE_MIN_DURATION'] + " \n"
elif not is_false(env.get('MLC_MLPERF_USE_MAX_DURATION', 'yes')) and not is_true(env.get(
'MLC_MLPERF_MODEL_EQUAL_ISSUE_MODE', 'no')):
user_conf += ml_model_name + "." + scenario + \
f".max_duration = {max_duration_valid}" + "\n"
if scenario == "MultiStream":
user_conf += ml_model_name + "." + scenario + ".min_query_count = " + \
env.get(
Expand Down
1 change: 1 addition & 0 deletions script/run-mlperf-inference-app/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ input_mapping:
imagenet_path: IMAGENET_PATH
implementation: MLC_MLPERF_IMPLEMENTATION
lang: MLC_MLPERF_IMPLEMENTATION
min_duration: MLC_MLPERF_INFERENCE_MIN_DURATION
min_query_count: MLC_MLPERF_INFERENCE_MIN_QUERY_COUNT
max_query_count: MLC_MLPERF_INFERENCE_MAX_QUERY_COUNT
mode: MLC_MLPERF_LOADGEN_MODE
Expand Down

0 comments on commit bb3991b

Please sign in to comment.