diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/README.md b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/README.md index 6143b69b..e22fdfe7 100644 --- a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/README.md +++ b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/README.md @@ -1,3 +1,3 @@ | Model | Scenario | Accuracy | Throughput | Latency (in ms) | |---------------|------------|------------------------------------|--------------|-------------------| -| llama2-70b-99 | offline | (61.7021, 37.9679, 39.3617, 610.0) | 0.362 | - | \ No newline at end of file +| llama2-70b-99 | offline | (61.7021, 37.9679, 39.3617, 610.0) | 0.43 | - | \ No newline at end of file diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/README.md b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/README.md index c1364265..2d5f7cea 100644 --- a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/README.md +++ b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/README.md @@ -16,7 +16,7 @@ pip install -U cmind cm rm cache -f -cm pull repo gateoverflow@mlperf-automations --checkout=ee7be856e5def9e46bc4535b6128ec342f6931db +cm pull repo gateoverflow@mlperf-automations --checkout=0b0073c83f137dabd0f2abb0866cf1a7193c8100 cm run script \ --tags=app,mlperf,inference,generic,_reference,_llama2-70b-99,_pytorch,_cpu,_test,_r5.0-dev_default,_bfloat16,_offline \ @@ -107,4 +107,4 @@ Model Precision: fp32 `TOKENS_PER_SAMPLE`: `610.0`, Required accuracy for closed division `>= 265.005` and `<= 323.895` ### Performance Results -`Samples per second`: `0.362211` +`Samples per second`: `0.43025` diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/accuracy_console.out b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/accuracy_console.out index 8572b60b..e118aa16 100644 --- a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/accuracy_console.out +++ b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/accuracy_console.out @@ -1,7 +1,7 @@ INFO:datasets:PyTorch version 2.5.1+cpu available. Loading dataset... Finished loading dataset. - Loading checkpoint shards: 0%| | 0/15 [00:00 get,git,repo,_branch.master,_repo.https://github.com/mlcommons/inference get-mlperf-inference-utils,e341e5f86d8342e5 --> get,mlperf,inference,src app-mlperf-inference,d775cac873ee4231_(_reference,_llama2-70b-99,_pytorch,_cpu,_test,_r5.0-dev_default,_bfloat16,_offline_) --> get,mlperf,inference,utils - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> detect,os + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> detect,os detect-cpu,586c8a43320142f7 --> detect,os - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> detect,cpu - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,sys-utils-cm - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,python + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> detect,cpu + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,sys-utils-cm + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,python get-generic-python-lib,94b62a682bc44791_(_torch_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_torch + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_torch get-generic-python-lib,94b62a682bc44791_(_torchvision_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_torchvision + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_torchvision get-ml-model-huggingface-zoo,53cf8252a443446a_(_clone-repo,_model-stub.meta-llama/Llama-2-70b-chat-hf_) --> get,python3 get-generic-python-lib,94b62a682bc44791_(_huggingface_hub_) --> detect,os detect-cpu,586c8a43320142f7 --> detect,os @@ -32,7 +32,7 @@ graph TD get-ml-model-huggingface-zoo,53cf8252a443446a_(_clone-repo,_model-stub.meta-llama/Llama-2-70b-chat-hf_) --> get,generic-python-lib,_huggingface_hub get-ml-model-huggingface-zoo,53cf8252a443446a_(_clone-repo,_model-stub.meta-llama/Llama-2-70b-chat-hf_) --> get,git,repo,_lfs,_repo.https://huggingface.co/meta-llama/Llama-2-70b-chat-hf get-ml-model-llama2,5db97be9f61244c6_(_pytorch_) --> get,ml-model,huggingface,zoo,_clone-repo,_model-stub.meta-llama/Llama-2-70b-chat-hf - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,ml-model,llama2,raw,_pytorch + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,ml-model,llama2,raw,_pytorch get-preprocessed-dataset-openorca,5614c39cb1564d72_(_validation,_mlcommons_) --> get,sys-utils-cm get-preprocessed-dataset-openorca,5614c39cb1564d72_(_validation,_mlcommons_) --> get,python3 get-generic-python-lib,94b62a682bc44791_(_package.pyarrow_) --> get,python3 @@ -42,7 +42,7 @@ graph TD get-generic-python-lib,94b62a682bc44791_(_package.transformers_) --> get,python3 get-preprocessed-dataset-openorca,5614c39cb1564d72_(_validation,_mlcommons_) --> get,generic-python-lib,_package.transformers get-preprocessed-dataset-openorca,5614c39cb1564d72_(_validation,_mlcommons_) --> download-and-extract,_rclone,_url.mlc-inference:mlcommons-inference-wg-public/open_orca - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,preprocessed,dataset,openorca,_validation,_mlcommons + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,preprocessed,dataset,openorca,_validation,_mlcommons generate-mlperf-inference-user-conf,3af4475745964b93 --> detect,os detect-cpu,586c8a43320142f7 --> detect,os generate-mlperf-inference-user-conf,3af4475745964b93 --> detect,cpu @@ -53,40 +53,40 @@ graph TD generate-mlperf-inference-user-conf,3af4475745964b93 --> get,mlcommons,inference,src get-mlperf-inference-sut-configs,c2fbf72009e2445b --> get,cache,dir,_name.mlperf-inference-sut-configs generate-mlperf-inference-user-conf,3af4475745964b93 --> get,sut,configs - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> generate,user-conf,mlperf,inference - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,loadgen + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> generate,user-conf,mlperf,inference + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,loadgen get-mlperf-inference-src,4b57186581024797 --> detect,os get-mlperf-inference-src,4b57186581024797 --> get,python3 get-mlperf-inference-src,4b57186581024797 --> get,git,repo,_branch.master,_repo.https://github.com/mlcommons/inference - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,mlcommons,inference,src - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,mlcommons,inference,src + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,mlcommons,inference,src + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,mlcommons,inference,src get-generic-python-lib,94b62a682bc44791_(_package.psutil_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.psutil + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.psutil get-generic-python-lib,94b62a682bc44791_(_package.transformers_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.transformers + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.transformers get-generic-python-lib,94b62a682bc44791_(_package.datasets_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.datasets + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.datasets get-generic-python-lib,94b62a682bc44791_(_package.sentencepiece_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.sentencepiece + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.sentencepiece get-generic-python-lib,94b62a682bc44791_(_package.protobuf_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.protobuf + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.protobuf get-generic-python-lib,94b62a682bc44791_(_package.accelerate_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.accelerate + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.accelerate get-generic-python-lib,94b62a682bc44791_(_package.absl-py_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.absl-py + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.absl-py get-generic-python-lib,94b62a682bc44791_(_package.evaluate_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.evaluate + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.evaluate get-generic-python-lib,94b62a682bc44791_(_package.nltk_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.nltk + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.nltk get-generic-python-lib,94b62a682bc44791_(_package.numpy_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.numpy + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.numpy get-generic-python-lib,94b62a682bc44791_(_package.rouge-score_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.rouge-score + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.rouge-score get-generic-python-lib,94b62a682bc44791_(_package.more-itertools_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.more-itertools + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.more-itertools get-generic-python-lib,94b62a682bc44791_(_package.compressed_tensors_) --> get,python3 - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> get,generic-python-lib,_package.compressed_tensors + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> get,generic-python-lib,_package.compressed_tensors detect-cpu,586c8a43320142f7 --> detect,os benchmark-program,19f369ef47084895 --> detect,cpu benchmark-program-mlperf,cfff0132a8aa4018 --> benchmark-program,program - app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16_) --> benchmark-mlperf \ No newline at end of file + app-mlperf-inference-mlcommons-python,ff149e9781fc4b65_(_pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16_) --> benchmark-mlperf \ No newline at end of file diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-deps.png b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-deps.png index 8bac215f..44afb13b 100644 Binary files a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-deps.png and b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-deps.png differ diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-version-info.json b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-version-info.json index 283c5320..d3492886 100644 --- a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-version-info.json +++ b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/cm-version-info.json @@ -147,7 +147,7 @@ "script_tags": "detect-os,detect,os,info", "script_variations": "", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -167,7 +167,7 @@ "script_tags": "detect,cpu,detect-cpu,info", "script_variations": "", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -177,7 +177,7 @@ "script_tags": "get,sys-utils-cm", "script_variations": "", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -187,7 +187,7 @@ "script_tags": "get,python,python3,get-python,get-python3", "script_variations": "", "version": "3.10.12", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -207,7 +207,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "torch", "version": "2.5.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -227,7 +227,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "torchvision", "version": "0.20.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -337,7 +337,7 @@ "script_tags": "get,raw,ml-model,language-processing,llama2,llama2-70b,text-summarization", "script_variations": "pytorch", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -437,7 +437,7 @@ "script_tags": "get,dataset,openorca,language-processing,preprocessed", "script_variations": "validation,mlcommons", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -547,7 +547,7 @@ "script_tags": "generate,mlperf,inference,user-conf,inference-user-conf", "script_variations": "", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -557,7 +557,7 @@ "script_tags": "get,loadgen,inference,inference-loadgen,mlperf,mlcommons", "script_variations": "", "version": "master", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -597,7 +597,7 @@ "script_tags": "get,src,source,inference,inference-src,inference-source,mlperf,mlcommons", "script_variations": "", "version": "r5.0-git-b9f22d6c37fa9024616538516e1d3bb59dd7c019", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -607,7 +607,7 @@ "script_tags": "get,src,source,inference,inference-src,inference-source,mlperf,mlcommons", "script_variations": "", "version": "master-git-b9f22d6c37fa9024616538516e1d3bb59dd7c019", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -627,7 +627,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.psutil", "version": "6.1.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -647,7 +647,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.transformers", "version": "4.47.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -667,7 +667,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.datasets", "version": "3.2.0", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -687,7 +687,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.sentencepiece", "version": "0.2.0", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -707,7 +707,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.protobuf", "version": "5.29.2", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -727,7 +727,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.accelerate", "version": "1.2.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -747,7 +747,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.absl-py", "version": "2.1.0", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -767,7 +767,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.evaluate", "version": "0.4.3", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -787,7 +787,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.nltk", "version": "3.8.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -807,7 +807,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.numpy", "version": "1.26.4", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -827,7 +827,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.rouge-score", "version": "0.1.2", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -847,7 +847,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.more-itertools", "version": "10.5.0", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -867,7 +867,7 @@ "script_tags": "get,install,generic,pip-package,generic-python-lib", "script_variations": "package.compressed_tensors", "version": "0.8.1", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } }, { @@ -907,7 +907,7 @@ "script_tags": "mlperf,benchmark-mlperf", "script_variations": "", "version": "", - "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_llama2-70b-99,_cpu,_bfloat16 )" + "parent": "app-mlperf-inference-mlcommons-python,ff149e9781fc4b65 ( pytorch,_offline,_cpu,_llama2-70b-99,_bfloat16 )" } } ] diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/os_info.json b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/os_info.json index 4afcbca6..0b94d33a 100644 --- a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/os_info.json +++ b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/os_info.json @@ -26,5 +26,5 @@ ], "CM_HOST_PLATFORM_FLAVOR": "x86_64", "CM_HOST_PYTHON_BITS": "64", - "CM_HOST_SYSTEM_NAME": "c9e0515cf4fd" + "CM_HOST_SYSTEM_NAME": "62ad37f3b3fb" } \ No newline at end of file diff --git a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/performance_console.out b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/performance_console.out index d70bc59b..1f9c6de5 100644 --- a/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/performance_console.out +++ b/open/MLCommons/measurements/gh_action-reference-cpu-pytorch_v2.5.1-default_config/llama2-70b-99/offline/performance_console.out @@ -1,7 +1,7 @@ INFO:datasets:PyTorch version 2.5.1+cpu available. Loading dataset... Finished loading dataset. - Loading checkpoint shards: 0%| | 0/15 [00:00