Skip to content

Commit

Permalink
Fixes for mlperf submission (#249)
Browse files Browse the repository at this point in the history
* Fix pythron version for bert deepsparse
  • Loading branch information
arjunsuresh authored Feb 20, 2025
1 parent af51c74 commit 86f7e11
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion script/get-onnxruntime-prebuilt/meta.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ automation_uid: 5b4e0237da074764
cache: true
category: AI/ML frameworks
clean_files: []
default_version: 1.16.3
default_version: 1.20.1
deps:
- tags: detect,os
new_env_keys:
Expand Down
24 changes: 12 additions & 12 deletions script/run-all-mlperf-models/run-cpp-implementation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ division="closed"
# run "$MLC_RUN_CMD"

POWER=" --power=yes --adr.mlperf-power-client.power_server=192.168.0.15 --adr.mlperf-power-client.port=4950 "
POWER=""
POWER=" --env.MLC_GET_PLATFORM_DETAILS=no"

run "mlcr set,system,performance,mode"
#run "mlcr set,system,performance,mode"

#cpp
run "mlcr generate-run-cmds,inference,_find-performance \
Expand All @@ -51,7 +51,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

run "mlcr generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
Expand All @@ -61,7 +61,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

run "mlcr generate-run-cmds,inference,_submission \
--model=resnet50 --implementation=cpp --device=cpu --backend=onnxruntime \
Expand All @@ -71,7 +71,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

run "mlcr generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cpu --backend=onnxruntime \
Expand All @@ -81,7 +81,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

# GPU

Expand All @@ -106,7 +106,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

run "mlcr generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
Expand All @@ -116,7 +116,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"


run "mlcr generate-run-cmds,inference,_submission \
Expand All @@ -128,7 +128,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

run "mlcr generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
Expand All @@ -138,7 +138,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

#multistream
run "mlcr generate-run-cmds,inference,_submission \
Expand All @@ -150,7 +150,7 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"

run "mlcr generate-run-cmds,inference,_submission \
--model=retinanet --implementation=cpp --device=cuda --backend=onnxruntime \
Expand All @@ -160,4 +160,4 @@ run "mlcr generate-run-cmds,inference,_submission \
--execution_mode=valid \
--skip_submission_generation=yes \
${POWER} \
--results_dir=$HOME/results_dir"
"
4 changes: 2 additions & 2 deletions script/run-all-mlperf-models/run-pruned-bert.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ scenario="Offline"
if [[ $scenario == "Offline" ]]; then
for stub in ${zoo_stub_list[@]}; do
cmd="mlcr run,mlperf,inference,generate-run-cmds,_find-performance \
--adr.python.version_min=3.8 \
--adr.python.version=3.9.12 \
--implementation=reference \
--model=bert-99 \
--precision=int8 \
Expand All @@ -65,7 +65,7 @@ fi

for stub in ${zoo_stub_list[@]}; do
cmd="mlcr run,mlperf,inference,generate-run-cmds \
--adr.python.version_min=3.8 \
--adr.python.version=3.9.12 \
--adr.compiler.tags=gcc \
--implementation=reference \
--model=bert-99 \
Expand Down

0 comments on commit 86f7e11

Please sign in to comment.