Skip to content
This repository has been archived by the owner on Oct 11, 2024. It is now read-only.

Commit

Permalink
remove long and short descs
Browse files Browse the repository at this point in the history
  • Loading branch information
Varun Sundar Rabindranath committed Mar 14, 2024
1 parent 82db7dc commit aa011d9
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 62 deletions.
10 changes: 5 additions & 5 deletions neuralmagic/benchmarks/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def max_model_length_from_model_id(model: str,
return _get_and_verify_max_len(config, max_model_len=None)


def script_args_to_cla(config: NamedTuple) -> Iterable[list[str]]:
def script_args_to_cla(config: NamedTuple) -> Iterable[dict]:
#config is a NamedTuple constructed from some JSON in neuralmagic/benchmarks/configs

kv = vars(config.script_args)
Expand All @@ -41,17 +41,17 @@ def script_args_to_cla(config: NamedTuple) -> Iterable[list[str]]:
if len(v) == 0:
key_args.append(k)

key_args_cla = list(map(lambda k: f"--{k}", key_args))
key_args_cla = {f"{k}": "" for k in key_args}

# Remove empty lists from arg_lists and remove key args from keys
arg_lists = list(filter(lambda arg_list: len(arg_list) != 0, arg_lists))
keys = list(filter(lambda k: k not in key_args, keys))
assert len(keys) == len(arg_lists)

for args in itertools.product(*arg_lists):
cla = key_args_cla
for name, value in zip(keys, args):
cla.extend([f"--{name}", f"{value}"])
args_dict = dict(zip(keys, args))
cla = key_args_cla.copy()
cla.update(args_dict)
yield cla


Expand Down
13 changes: 9 additions & 4 deletions neuralmagic/benchmarks/run_benchmark_serving.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import subprocess
import requests
import time
import json
import itertools

from typing import NamedTuple, Optional
Expand Down Expand Up @@ -125,19 +126,23 @@ def run_bench(server_cmd: str, bench_cmd: list[str], model: str) -> None:

description = (f"{config.description}\n" +
f"model - {model}\n" +
f"max-model-len - {max_model_len}\n" +
f"sparsity - {sparsity}\n" +
f"max_model_len - {max_model_len}\n" +
f"{config.script_name} " +
" ".join(script_args) + "\n"+
f"server-cmd : {server_cmd}" )
f"{json.dumps(script_args, indent=2)}")

bench_cmd = (["python3", "-m"
f"{script_path}"] + script_args +
f"{script_path}"] +
["--description", f"{description}"] +
["--model", f"{model}"] +
["--tokenizer", f"{model}"] +
["--port", f"{BENCH_SERVER_PORT}"] +
["--host", f"{BENCH_SERVER_HOST}"])
# Add script args
for k, v in script_args.items():
bench_cmd.append(f"--{k}")
if v != "":
bench_cmd.append(f"{v}")

if output_directory:
bench_cmd += (["--save-directory", f"{output_directory}"] +
Expand Down
9 changes: 7 additions & 2 deletions neuralmagic/benchmarks/run_benchmark_throughput.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import argparse
import json
from pathlib import Path
from typing import NamedTuple, Optional

Expand Down Expand Up @@ -34,14 +35,18 @@ def run_benchmark_throughput_script(config: NamedTuple,
f"model - {model}\n" +
f"max_model_len - {max_model_len}\n" +
f"{config.script_name} " +
" ".join(script_args))
f"{json.dumps(script_args, indent=2)}")

bench_cmd = (["python3", "-m", f"{script_path}"] +
script_args +
["--description", f"{description}"] +
["--model", f"{model}"] +
["--tokenizer", f"{model}"] +
["--max-model-len", f"{max_model_len}"])
# Add script args
for k, v in script_args.items():
bench_cmd.append(f"--{k}")
if v != "":
bench_cmd.append(f"{v}")

if output_directory:
bench_cmd = bench_cmd + [
Expand Down
26 changes: 0 additions & 26 deletions neuralmagic/benchmarks/scripts/logging/benchmark_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,29 +178,3 @@ def describe_gpu(result_json: dict) -> str:
assert all(map(lambda x: x == gpu_name, gpu_names[:num_gpus_used]))

return f"{gpu_name} x {num_gpus_used}"


def short_description(result_json: dict) -> str:
"""
Given a result_json, that is the JSON version for some
BenchmarkResult object, return a string that captures a few key high
level information like the user given benchmark description, GPU name etc.
"""
nl = '\n'
return (
f"Description: {result_json.get(BenchmarkResult.DESCRIPTION_KEY_)}{nl}"
f"GPU : {describe_gpu(result_json)}{nl}"
f"Context : {result_json.get(BenchmarkResult.BENCHMARKING_CONTEXT_KEY_)}")


def long_description(result_json: dict) -> str:
"""
Given a result_json, that is the JSON version for some
BenchmarkResult object, eeturn a string that is fully-descriptive of this benchmark run.
"""
short_desc = short_description(result_json)
nl = '\n'
return (
f"Short Description: {short_desc} {nl}"
f"script name : {result_json.get(BenchmarkResult.SCRIPT_NAME_KEY_)}{nl}"
f"script args : {result_json.get(BenchmarkResult.SCRIPT_ARGS_KEY_)}")
38 changes: 13 additions & 25 deletions neuralmagic/benchmarks/scripts/logging/gha_benchmark_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,28 +10,22 @@
from dataclasses import dataclass
from typing import List, Iterable, NamedTuple

from .benchmark_result import GHABenchmarkToolName, BenchmarkResult, MetricTemplate, short_description, long_description
from .benchmark_result import GHABenchmarkToolName, BenchmarkResult, MetricTemplate, describe_gpu


@dataclass
class GHARecord:
"""
GHARecord is what actually goes into the output JSON.
- name : Chart title. Unique names map to a unique chart. This should
most information that long_description has.
- name : Chart title. Unique names map to a unique chart.
- unit : Y-axis label.
- value : Value to plot.
- extra : This information shows up when you hover
over a data-point in the chart.
- short_description : For UI to display succinct a chart title.
- long_description : For UI to display a long chart title.
- extra : Any extra information that is passed as a JSON string.
"""
name: str
unit: str
value: float
extra: str
short_description: str
long_description: str

@staticmethod
def extra_from_benchmark_result(br: BenchmarkResult) -> str:
Expand All @@ -44,22 +38,17 @@ def extra_from_benchmark_result(br: BenchmarkResult) -> str:
br[BenchmarkResult.SCRIPT_ARGS_KEY_],
}

return f"{extra_as_dict}"
return f"{json.dumps(extra_as_dict, indent=2)}"

@staticmethod
def from_metric_template(metric_template: MetricTemplate,
extra: str = "",
short_description: str = "",
long_description: str = ""):
description: str = ""):
nl = '\n'
return GHARecord(
name=
f"{metric_template.key}{nl}{short_description}",
unit=metric_template.unit,
value=metric_template.value,
extra=extra,
short_description=short_description,
long_description=long_description)
return GHARecord(name=f"{metric_template.key}{nl}{description}",
unit=metric_template.unit,
value=metric_template.value,
extra=extra)


class Tool_Record_T(NamedTuple):
Expand All @@ -78,6 +67,8 @@ def process(json_file_path: Path) -> Iterable[Tool_Record_T]:

print(f"processing file : {json_file_path}")

description = describe_gpu(json_data) + "\n" + json_data.get(
BenchmarkResult.DESCRIPTION_KEY_)
hover_data = GHARecord.extra_from_benchmark_result(json_data)
metrics: Iterable[dict] = json_data.get(BenchmarkResult.METRICS_KEY_)
metrics: Iterable[MetricTemplate] = map(
Expand All @@ -87,10 +78,7 @@ def process(json_file_path: Path) -> Iterable[Tool_Record_T]:
lambda metric: Tool_Record_T(
metric.tool,
GHARecord.from_metric_template(
metric,
extra=hover_data,
short_description=short_description(json_data),
long_description=long_description(json_data))), metrics)
metric, extra=hover_data, description=description)), metrics)


def main(input_directory: Path, bigger_is_better_output_json_file_name: Path,
Expand All @@ -99,7 +87,7 @@ def main(input_directory: Path, bigger_is_better_output_json_file_name: Path,
def dump_to_json(gha_records: List[GHARecord], output_path: Path):
# Make output directory if it doesn't exist
output_path.parent.mkdir(parents=True, exist_ok=True)

# Make data JSON serializable
gha_record_dicts = list(map(lambda x: x.__dict__, gha_records))
with open(output_path, 'w+') as f:
Expand Down

0 comments on commit aa011d9

Please sign in to comment.