Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

23: Fix runtime and deprecation issues #24

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM lifflander1/vt:amd64-ubuntu-22.04-clang-11-cpp
FROM lifflander1/vt:amd64-ubuntu-22.04-clang-14-cpp

RUN apt-get update -y -q && \
apt-get install -y -q --no-install-recommends \
Expand Down
29 changes: 0 additions & 29 deletions build_vt.sh
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -5,36 +5,15 @@ set -ex
source_dir=${1}
build_dir=${2}
extra_flags=${3}
build_tests=${4}

# Dependency versions, when fetched via git.
detector_rev=master
checkpoint_rev=develop

mkdir -p "${build_dir}"
pushd "${build_dir}"

export DETECTOR_BUILD=${build_dir}/detector
export CHECKPOINT_BUILD=${build_dir}/checkpoint

if test -d "${build_dir}/detector"
then
{ echo "Detector already in lib... not downloading, building, and installing"; } 2>/dev/null
else
git clone -b "${detector_rev}" --depth 1 https://github.com/DARMA-tasking/detector.git
export DETECTOR=$PWD/detector

mkdir -p "$DETECTOR_BUILD"
cd "$DETECTOR_BUILD"
mkdir build
cd build
cmake -G "${CMAKE_GENERATOR:-Ninja}" \
-DCMAKE_INSTALL_PREFIX="$DETECTOR_BUILD/install" \
"$DETECTOR"
cmake --build . --target install
fi


if test -d "${build_dir}/checkpoint"
then
{ echo "Checkpoint already in lib... not downloading, building, and installing"; } 2>/dev/null
Expand Down Expand Up @@ -89,7 +68,6 @@ cmake -G "${CMAKE_GENERATOR:-Ninja}" \
-DCMAKE_CXX_COMPILER="${CXX:-c++}" \
-DCMAKE_C_COMPILER="${CC:-cc}" \
-DCMAKE_EXE_LINKER_FLAGS="${CMAKE_EXE_LINKER_FLAGS:-}" \
-Ddetector_DIR="$DETECTOR_BUILD/install" \
-Dcheckpoint_DIR="$CHECKPOINT_BUILD/install" \
-DCMAKE_PREFIX_PATH="${CMAKE_PREFIX_PATH:-}" \
-DCMAKE_INSTALL_PREFIX="$VT_BUILD/install" \
Expand All @@ -98,10 +76,3 @@ cmake -G "${CMAKE_GENERATOR:-Ninja}" \
"$VT"

{ time cmake --build . --target "${4}" ; } 2> >(tee build_time.txt)


if test "$use_ccache"
then
{ echo -e "===\n=== ccache statistics after build\n==="; } 2>/dev/null
ccache -s
fi
27 changes: 15 additions & 12 deletions generate_build_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,19 +53,22 @@ def prepare_data():

data_frame = pd.read_csv(previous_builds_filename)
last_builds = data_frame.tail(int(os.getenv("INPUT_NUM_LAST_BUILD")) - 1)
updated = last_builds.append(
pd.DataFrame(
[
updated = pd.concat(
[
last_builds,
pd.DataFrame(
[
vt_total_time_seconds,
tests_total_time_seconds,
new_run_num,
new_date,
commit_id,
]
],
columns=["vt", "tests", "run_num", "date", "commit"],
)
[
vt_total_time_seconds,
tests_total_time_seconds,
new_run_num,
new_date,
commit_id,
]
],
columns=["vt", "tests", "run_num", "date", "commit"],
),
]
)

# Data to be plotted
Expand Down
54 changes: 35 additions & 19 deletions generate_perf_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,15 +38,22 @@ def prepare_data():
time_df = pd.read_csv(time_test_file)
memory_df = pd.read_csv(memory_test_file)

num_nodes = memory_df["node"].max() + 1
num_nodes = time_df["node"].max() + 1

memory_data = list()
time_data = list()
memory_data = []
time_data = []
for node in range(num_nodes):
memory_data.append(memory_df.loc[memory_df["node"] == node])
time_data.append(time_df.tail(-num_nodes).loc[time_df["node"] == node])
node_memory_data = memory_df.loc[memory_df["node"] == node]
node_time_data = time_df.tail(-num_nodes).loc[time_df["node"] == node]

if not node_memory_data.empty:
memory_data.append(node_memory_data)

if not node_time_data.empty:
time_data.append(node_time_data)

print(f"Memory: {memory_data}")
print(f"Time: {time_data}")

new_run_num = parser.parse_args().run_num
new_date = date.today().strftime("%d %B %Y")
Expand All @@ -56,23 +63,25 @@ def prepare_data():
out_dir = f"{path_to_wiki}/perf_tests"
file_name = f"{out_dir}/{test_name}_times.csv"

current = time_df.head(num_nodes).copy()

if os.path.isfile(file_name):
total_df = pd.read_csv(file_name)
total_df = total_df.tail(NUM_LAST_BUILDS * num_nodes)
current = time_df.head(num_nodes)

if new_run_num == 0:
new_run_num = total_df["run_num"].iloc[-1] + 1

current["run_num"] = [new_run_num for node in range(num_nodes)]
current["date"] = [new_date for node in range(num_nodes)]
current["commit"] = [commit_id for node in range(num_nodes)]
current = total_df.append(current)
for node in range(num_nodes):
current.loc[current["node"] == node, "run_num"] = new_run_num
current.loc[current["node"] == node, "date"] = new_date
current.loc[current["node"] == node, "commit"] = commit_id
current = pd.concat([total_df, current])
else:
current = time_df.head(num_nodes)
current["run_num"] = [new_run_num for node in range(num_nodes)]
current["date"] = [new_date for node in range(num_nodes)]
current["commit"] = [commit_id for node in range(num_nodes)]
for node in range(num_nodes):
current.loc[current["node"] == node, "run_num"] = new_run_num
current.loc[current["node"] == node, "date"] = new_date
current.loc[current["node"] == node, "commit"] = commit_id

if not os.path.exists(out_dir):
os.mkdir(out_dir)
Expand Down Expand Up @@ -191,12 +200,15 @@ def generate_historic_graph(test_name, num_nodes, dataframe):
plt.xlabel("Run number")

run_nums = pd.unique(dataframe["run_num"]).tolist()
times = list()
errors = list()
times = []
errors = []
for node in range(num_nodes):
times.append(dataframe["mean"].loc[dataframe["node"] == node].tolist())
errors.append(dataframe["stdev"].loc[dataframe["node"] == node].tolist())

print(f"generate_historic_graph::times: {times}")
print(f"generate_historic_graph::errors: {errors}")

bar_width = 1.0 / (2 * num_nodes)

bar_positions = [
Expand All @@ -210,7 +222,7 @@ def generate_historic_graph(test_name, num_nodes, dataframe):
ax1.bar(
bar_positions[node],
times[node],
yerr=errors,
yerr=errors[node],
label=f"node {node}",
width=bar_width,
align="center",
Expand All @@ -232,5 +244,9 @@ def generate_historic_graph(test_name, num_nodes, dataframe):
if __name__ == "__main__":
set_graph_properties()
test_name_in, time_data_in, memory_data_in = prepare_data()
generate_memory_graph(test_name_in, memory_data_in)
generate_time_graph(test_name_in, time_data_in)

if len(memory_data_in) > 0:
generate_memory_graph(test_name_in, memory_data_in)

if len(time_data_in) > 0:
generate_time_graph(test_name_in, time_data_in)
13 changes: 7 additions & 6 deletions generate_wiki_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def get_name_times_avg(lines):
avg_ms_threshold = 20

total_times = []
name_times_avg = dict()
name_times_avg = {}

index = 0

Expand Down Expand Up @@ -80,7 +80,7 @@ def get_headers(lines):
"""

header_times = []
name_included_avg = dict()
name_included_avg = {}

index = 0

Expand Down Expand Up @@ -124,15 +124,15 @@ def generate_name_times_avg_table(templates_text):
def prepare_data():
# Expensive template instantiations
templates_total_times = []
templates = dict()
templates = {}

# Expensive template sets
template_sets_times = []
template_sets = dict()
template_sets = {}

# Expensive headers
headers_times = []
headers = dict()
headers = {}

with open(CLANG_BUILD_REPORT) as file:
lines = file.read().splitlines()
Expand Down Expand Up @@ -304,8 +304,9 @@ def create_image_hyperlink(image_link):
def get_runner_info():
return (
"**NOTE. The following builds were run on GitHub Action runners"
"that use [2-core CPU and 7 GB RAM]"
" that use [2-core CPU and 7 GB RAM]"
"(https://docs.github.com/en/actions/using-github-hosted-runners/"
"about-github-hosted-runners/"
"about-github-hosted-runners#supported-runners-and-hardware-resources)** <br><br> \n"
"Configuration:\n"
"- Compiler: **Clang-10**\n"
Expand Down
130 changes: 130 additions & 0 deletions script/local.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
#!/bin/bash

set -euo pipefail

export WIKI_DIR=~/Work/vt.wiki
export ROOT_DIR=~/Work/vt.wiki/build_stats
export GITHUB_REPOSITORY="DARMA-tasking/vt"
export INPUT_BUILD_STATS_OUTPUT=$ROOT_DIR
export INPUT_BUILD_RESULT_FILENAME="build_result.txt"
export INPUT_BUILD_TIMES_FILENAME="build_times.csv"
export INPUT_GRAPH_FILENAME="graph.png"
export INPUT_BADGE_FILENAME="build_status_badge.svg"
export INPUT_BADGE_TITLE="vt build times"
export INPUT_NUM_LAST_BUILD=25
export INPUT_X_LABEL="Run number"
export INPUT_Y_LABEL="Build time (min)"
export INPUT_GRAPH_WIDTH=20
export INPUT_GRAPH_HEIGHT=20
export INPUT_BADGE_LOGO="logo"
export INPUT_BADGE_FILENAME="badge_file"
export CXX=clang++-15
export CC=clang-15

WORKSPACE=$1
RUN_NUMBER=$2
BUILD_STATS_DIR=$3

cd "$WORKSPACE"

VT_BUILD_FOLDER="$WORKSPACE/build/vt"

########################
## CLONE DEPENDENCIES ##
########################

[ ! -d 'FlameGraph' ] && git clone https://github.com/brendangregg/FlameGraph.git
[ ! -d 'ClangBuildAnalyzer' ] && git clone https://github.com/aras-p/ClangBuildAnalyzer

cd ClangBuildAnalyzer
[ ! -d 'build' ] && mkdir build
cd build

cmake .. && make
chmod +x ClangBuildAnalyzer
ClangBuildTool="$WORKSPACE/ClangBuildAnalyzer/build/ClangBuildAnalyzer"

##################
## BUILD VT LIB ##
##################

cd "$WORKSPACE"
export VT_TESTS_ARGUMENTS="--vt_perf_gen_file"

# Build VT lib
[ ! -d 'vt' ] && git clone https://github.com/$GITHUB_REPOSITORY.git
eval "$BUILD_STATS_DIR/build_vt.sh" "$WORKSPACE/vt" "$WORKSPACE/build" "-ftime-trace" vt
vt_build_time=$(grep -oP 'real\s+\K\d+m\d+\,\d+s' "$VT_BUILD_FOLDER/build_time.txt")

# Build tests and examples
eval "$BUILD_STATS_DIR/build_vt.sh" "$WORKSPACE/vt" "$WORKSPACE/build" "-ftime-trace" all
tests_and_examples_build=$(grep -oP 'real\s+\K\d+m\d+\,\d+s' "$VT_BUILD_FOLDER/build_time.txt")

cp "$BUILD_STATS_DIR/ClangBuildAnalyzer.ini" .
$ClangBuildTool --all "$VT_BUILD_FOLDER" vt-build
$ClangBuildTool --analyze vt-build > build_result.txt

#######################
## PERFORMANCE TESTS ##
#######################

cd "$VT_BUILD_FOLDER"
ctest --output-on-failure --verbose -L perf_test
cd -

##########################
## GENERATE FLAMEGRAPHS ##
##########################

# Running 'mpirun -n x heaptrack' will generate x number of separate files, one for each node/rank
mpirun -n 2 heaptrack "$WORKSPACE/build/vt/examples/collection/jacobi2d_vt" 10 10 200
jacobi_output_list=$(ls -- *heaptrack.jacobi2d_vt.*.zst)

node_num=0
for file in ${jacobi_output_list}
do
file_name="flame$node_num"

# number of allocations
heaptrack_print -f "$file" -F "alloc_count_$file_name"
"$WORKSPACE/FlameGraph/flamegraph.pl" --title="jacobi2d_vt node:$node_num number of allocations"\
--width=1920 --colors mem --countname allocations < "alloc_count_$file_name" > "flame_heaptrack_jacobi_alloc_count_$node_num.svg"

# leaked
heaptrack_print -f "$file" -F "leaked_$file_name" --flamegraph-cost-type leaked
"$WORKSPACE/FlameGraph/flamegraph.pl" --title="jacobi2d_vt node:$node_num number of bytes leaked"\
--width=1920 --colors mem --countname bytes < "leaked_$file_name" > "flame_heaptrack_jacobi_leaked_$node_num.svg"

((node_num=node_num+1))
done

#####################
## GENERATE GRAPHS ##
#####################

cd "$WIKI_DIR" || exit 1

# Generate graph
python3 "$BUILD_STATS_DIR/generate_build_graph.py" -vt "$vt_build_time" -te "$tests_and_examples_build" -r "$RUN_NUMBER"
perf_test_files=$(find "$VT_BUILD_FOLDER/tests/" -name "*_mem.csv" | sed 's!.*/!!' | sed -e 's/_mem.csv$//')

cd perf_tests

for file in $perf_test_files
do
# Each test generates both time/mem files
time_file="${file}_time.csv"
memory_file="${file}_mem.csv"
echo "Test files $VT_BUILD_FOLDER/tests/$time_file $VT_BUILD_FOLDER/tests/$memory_file for test: $file"
python3 "$BUILD_STATS_DIR/generate_perf_graph.py" -time "$VT_BUILD_FOLDER/tests/$time_file"\
-mem "$VT_BUILD_FOLDER/tests/$memory_file" -r "$RUN_NUMBER" -wiki "$WIKI_DIR"
done

cd -

cp "$WORKSPACE/build_result.txt" "$INPUT_BUILD_STATS_OUTPUT"
eval cp "$WORKSPACE/flame_heaptrack*" "./perf_tests/"

python3 "$BUILD_STATS_DIR/generate_wiki_pages.py" -t "$perf_test_files"

exit 0
Loading