Skip to content

Commit

Permalink
Merge pull request #67 from cameron-git/benchmark_docker
Browse files Browse the repository at this point in the history
Benchmarks: Docker + Memory bench
  • Loading branch information
biphasic authored Nov 9, 2023
2 parents b44386d + f786030 commit 7fdefe8
Show file tree
Hide file tree
Showing 30 changed files with 853 additions and 721 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,13 @@ conda install lava-dl -c conda-forge

# Rules of adding benchmarks
1. The implementation you're benchmarking must be in the library's docs.

# Docker

The following commands will build the docker image, generate the figures and copy them to this folder.
`./bench.sh` takes the batch size as its first argument.

```
./build.sh
./bench.sh 32
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/bin/bash

docker rm snn-bench
docker run -it --gpus all --name snn-bench snn-bench ./run_benchmarks.sh $1
docker cp snn-bench:/app/data.csv .
docker cp snn-bench:/app/fig/ .
cp ./fig/* .
rm -fr fig
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/bin/bash

docker rm snn-bench
docker rmi snn-bench
docker build ./docker --progress=plain -t snn-bench
Original file line number Diff line number Diff line change
@@ -1,61 +1,34 @@
,framework,neurons,pass,time [s]
0,Spyx (fp16)<br> v0.1.9,512,forward,0.0008169346899341788
1,Spyx (fp16)<br> v0.1.9,4096,forward,0.010695806209077226
2,Spyx (fp16)<br> v0.1.9,8192,forward,0.02386324826408835
3,Spyx (fp32)<br> v0.1.9,512,forward,0.001162012002747787
4,Spyx (fp32)<br> v0.1.9,4096,forward,0.018527232178854287
5,Spyx (fp32)<br> v0.1.9,8192,forward,0.034034236272176104
6,SpikingJelly CuPy<br>v0.0.0.0.15,512,forward,0.00208638109678521
7,SpikingJelly CuPy<br>v0.0.0.0.15,4096,forward,0.02017000546822181
8,SpikingJelly CuPy<br>v0.0.0.0.15,8192,forward,0.02700501052956832
9,Rockpool<br>v2.7,512,forward,0.450988245010376
10,Rockpool<br>v2.7,4096,forward,0.5514374375343323
11,Rockpool<br>v2.7,8192,forward,0.8179528713226318
12,Rockpool EXODUS<br>v2.7,512,forward,0.0063792095062839
13,Rockpool EXODUS<br>v2.7,4096,forward,0.17546647787094116
14,Rockpool EXODUS<br>v2.7,8192,forward,0.5170656442642212
15,Sinabs<br>v1.2.9,512,forward,0.08140590667724609
16,Sinabs<br>v1.2.9,4096,forward,0.15627563916719878
17,Sinabs<br>v1.2.9,8192,forward,0.45073709487915037
18,Sinabs EXODUS<br>v1.1.2,512,forward,0.005014166796117797
19,Sinabs EXODUS<br>v1.1.2,4096,forward,0.1618201365837684
20,Sinabs EXODUS<br>v1.1.2,8192,forward,0.5298179984092712
21,snnTorch<br>v0.7.0,512,forward,0.15709185600280762
22,snnTorch<br>v0.7.0,4096,forward,0.2415475845336914
23,snnTorch<br>v0.7.0,8192,forward,0.6590602397918701
24,Norse<br>v1.0.0,512,forward,0.17225990692774454
25,Norse<br>v1.0.0,4096,forward,0.22791510158114964
26,Norse<br>v1.0.0,8192,forward,0.5284585356712341
27,Spyx (fp16)<br> v0.1.9,512,backward,0.0023113923668313776
28,Spyx (fp16)<br> v0.1.9,4096,backward,0.016376035348743413
29,Spyx (fp16)<br> v0.1.9,8192,backward,0.03882672663997201
30,Spyx (fp32)<br> v0.1.9,512,backward,0.00331751147039019
31,Spyx (fp32)<br> v0.1.9,4096,backward,0.02607223870604933
32,Spyx (fp32)<br> v0.1.9,8192,backward,0.061416154196768095
33,SpikingJelly CuPy<br>v0.0.0.0.15,512,backward,0.0021877023040271196
34,SpikingJelly CuPy<br>v0.0.0.0.15,4096,backward,0.10078186988830566
35,SpikingJelly CuPy<br>v0.0.0.0.15,8192,backward,0.025554166899787054
36,Rockpool<br>v2.7,512,backward,0.4077626705169678
37,Rockpool<br>v2.7,4096,backward,2.7636924584706626
38,Rockpool<br>v2.7,8192,backward,5.59786057472229
39,Rockpool EXODUS<br>v2.7,512,backward,0.008734763449456494
40,Rockpool EXODUS<br>v2.7,4096,backward,0.07632951900876801
41,Rockpool EXODUS<br>v2.7,8192,backward,0.09672652120175569
42,Sinabs<br>v1.2.9,512,backward,0.3001319680895124
43,Sinabs<br>v1.2.9,4096,backward,2.1311139265696206
44,Sinabs<br>v1.2.9,8192,backward,4.366256554921468
45,Sinabs EXODUS<br>v1.1.2,512,backward,0.012992295911235194
46,Sinabs EXODUS<br>v1.1.2,4096,backward,0.06799119313557943
47,Sinabs EXODUS<br>v1.1.2,8192,backward,0.09323820114135742
48,snnTorch<br>v0.7.0,512,backward,0.10173825025558472
49,snnTorch<br>v0.7.0,4096,backward,0.2020041227340698
50,snnTorch<br>v0.7.0,8192,backward,0.723294734954834
51,Norse<br>v1.0.0,512,backward,0.17903570334116617
52,Norse<br>v1.0.0,4096,backward,1.2727914651234944
53,Norse<br>v1.0.0,8192,backward,2.673928419748942
54,Lava DL<br>v0.4.0.dev0,512,forward,0.0037799142441659606
55,Lava DL<br>v0.4.0.dev0,4096,forward,0.08041971206665038
56,Lava DL<br>v0.4.0.dev0,8192,forward,0.21762444972991943
57,Lava DL<br>v0.4.0.dev0,512,backward,0.005953287084897359
58,Lava DL<br>v0.4.0.dev0,4096,backward,0.07756125926971436
59,Lava DL<br>v0.4.0.dev0,8192,backward,0.24242258071899414
framework,neurons,forward,backward,memory
Rockpool<br>v2.7,4096,0.6109498143196106,0.8800194263458252,1851982848
Rockpool<br>v2.7,8192,0.6990636984507242,1.6014392375946045,3827696640
Rockpool<br>v2.7,16384,1.0251133441925049,3.0923101902008057,8175223808
Rockpool EXODUS<br>v2.7,4096,0.03694915337996049,0.029833895318648395,2192186368
Rockpool EXODUS<br>v2.7,8192,0.11676208178202312,0.061521693638392855,4485028864
Rockpool EXODUS<br>v2.7,16384,0.33924134572347003,0.08307087898254395,9489882112
Sinabs<br>v1.2.9,4096,0.11245171229044597,0.6348053812980652,1392647680
Sinabs<br>v1.2.9,8192,0.15771047885601336,1.195282777150472,2908511744
Sinabs<br>v1.2.9,16384,0.3442482550938924,2.3881641228993735,6452607488
Sinabs EXODUS<br>v1.1.2,4096,0.031717587262392044,0.026867510477701823,1605931008
Sinabs EXODUS<br>v1.1.2,8192,0.09183779629794034,0.05146604631005264,3440185344
Sinabs EXODUS<br>v1.1.2,16384,0.29870578220912386,0.07561575955358045,7937069056
Norse<br>v1.0.0,4096,0.21016781330108641,0.3762962420781453,1522155520
Norse<br>v1.0.0,8192,0.23719366391499838,0.7118913332621256,3170140160
Norse<br>v1.0.0,16384,0.5078175067901611,1.4573170344034831,6860111872
snnTorch<br>v0.7.0,4096,0.2204042434692383,0.18412581357088956,867047424
snnTorch<br>v0.7.0,8192,0.3605805238087972,0.6607763171195984,2148367360
snnTorch<br>v0.7.0,16384,0.8902632395426432,2.619587024052938,6964052992
SpikingJelly PyTorch<br>v0.0.0.0.15,4096,0.10764691704197933,0.32283919198172434,1521369088
SpikingJelly PyTorch<br>v0.0.0.0.15,8192,0.12546713650226593,0.6649779081344604,3168567296
SpikingJelly PyTorch<br>v0.0.0.0.15,16384,0.3645840088526408,1.4009304841359456,6856966144
SpikingJelly CuPy<br>v0.0.0.0.15,4096,0.009032712389000863,0.008165712745822206,1261060096
SpikingJelly CuPy<br>v0.0.0.0.15,8192,0.016318264007568358,0.0625191256403923,2646376448
SpikingJelly CuPy<br>v0.0.0.0.15,16384,0.022625601809957752,0.23301858372158474,5930157568
Spyx full-precision v0.1.10,4096,0.008013666152954102,0.012141303539276125,nan
Spyx full-precision v0.1.10,8192,0.021323987057334497,0.031084310799314268,nan
Spyx full-precision v0.1.10,16384,0.04252536098162333,0.07192833887206185,nan
Spyx half-precision v0.1.10,4096,0.004259347408375841,0.006319355217099591,nan
Spyx half-precision v0.1.10,8192,0.013356369852230248,0.01792999448304995,nan
Spyx half-precision v0.1.10,16384,0.03154472204355093,0.04650832139528715,nan
Lava DL<br>v0.4.0.dev0,4096,0.0309566277724046,0.019224132810320173,1256727552
Lava DL<br>v0.4.0.dev0,8192,0.07851036695333627,0.08087961196899414,2909822464
Lava DL<br>v0.4.0.dev0,16384,0.23537524541219076,0.26369449496269226,7967153664
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
FROM nvidia/cuda:11.8.0-cudnn8-devel-ubuntu22.04

WORKDIR /app

COPY . .

RUN apt update && apt install -y git wget python3 python3-pip python3-dev cmake build-essential

RUN pip3 install --upgrade "jax[cuda11_pip]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html

RUN pip3 install torch torchvision torchaudio

RUN pip3 install -r ./requirements.txt

RUN ["chmod","+x","./run_benchmarks.sh"]

CMD [ "./run_benchmarks.sh", "16" ]
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
import pandas as pd
import os
import argparse

df = pd.read_csv("../data.csv")

os.makedirs("./fig", exist_ok=True)

##############

df["total"] = df["forward"] + df["backward"]
df["memory"] = df["memory"] * 1e-9 # convert to GB
df["framework"] = df["framework"].str.replace("<br>", " ")
df["framework"] = df["framework"].str.replace(
"SpikingJelly CuPy v0.0.0.0.15", "SpikingJelly CuPy<br>v0.0.15"
)
df["framework"] = df["framework"].str.replace(
"SpikingJelly PyTorch v0.0.0.0.15", "SpikingJelly PyTorch<br>v0.0.15"
)
df["framework"] = df["framework"].str.replace(
"Spyx half-precision v0.1.10", "Spyx (float16) v0.1.10"
)
df["framework"] = df["framework"].str.replace(
"Spyx full-precision v0.1.10", "Spyx (float32) v0.1.10"
)
df["framework"] = df["framework"].str.replace("Rockpool EXODUS", "Rockpool EXODUS<br>")
df["framework"] = df["framework"].str.replace("Sinabs EXODUS", "Sinabs EXODUS<br>")

###############

import plotly.express as px
import plotly.graph_objects as go

frameworks = df["framework"].unique()


def get_runtime_figure(df, rounding=2, title=""):
fig = px.bar(
df,
y="framework",
# x=["forward", "backward"],
x="total",
log_x=True,
text_auto=f".{rounding}f",
orientation="h",
# ).add_trace(
# go.Scatter(
# y=frameworks,
# x=df["total"].to_numpy() * 1.05,
# mode="text",
# text=df["total"].round(rounding),
# textposition="middle right",
# showlegend=False,
# )
range_x=(0.01, df["total"].max() * 1.2),
)

fig.update_layout(
title=title,
yaxis={"categoryorder": "total descending"},
legend=dict(orientation="h", yanchor="bottom", y=1.01, xanchor="right", x=1),
margin=dict(l=0, r=20, t=70, b=10),
yaxis_title="",
xaxis_title="Time (s)",
)
# increase size of facet titles
fig.update_annotations(font_size=16)
return fig


def get_memory_figure(df, rounding=2, title=""):
df = df[df["framework"].str.contains("Spyx") == False]
fig = px.bar(
df,
y="framework",
x="memory",
# log_x=True,
text_auto=f".{rounding}f",
orientation="h",
range_x=(0.01, df["memory"].max() * 1.2),
)

fig.update_layout(
title=title,
yaxis={"categoryorder": "total descending"},
margin=dict(l=0, r=20, t=70, b=10),
yaxis_title="",
xaxis_title="Max memory usage (GB)",
)
# increase size of facet titles
fig.update_annotations(font_size=16)
return fig


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--batch_size", action="store", default=None, required=False)
args = parser.parse_args()
batch_str = (
f", batch size of {args.batch_size}" if args.batch_size is not None else ""
)

###################

df16k = df[df["neurons"] == 16384]

fig = get_runtime_figure(
df16k, title=f"Forward + backward pass latency on GPU for 16k neurons, lower is better{batch_str}"
)

fig.write_json("./fig/framework-benchmarking-16k.json")
fig.write_image("./fig/framework-benchmarking-16k.png", width=1024) # scale=2)
fig.show()

fig = get_memory_figure(
df16k, title=f"Maximum GPU memory usage during latency benchmark for 16k neurons, lower is better{batch_str}"
)

fig.write_json("./fig/framework-benchmarking-mem-16k.json")
fig.write_image("./fig/framework-benchmarking-mem-16k.png", width=1024)
fig.show()



# ####################

# df8k = df[df["neurons"] == 8192]

# fig = get_runtime_figure(
# df8k, title=f"Latency for 8k neurons, lower is better{batch_str}"
# )

# fig.write_json("./fig/framework-benchmarking-8k.json")
# fig.write_image("./fig/framework-benchmarking-8k.png", width=1024)
# fig.show()

# fig = get_memory_figure(df8k, title="Memory use for 8k neurons, lower is better")

# fig.write_json("./fig/framework-benchmarking-mem-8k.json")
# fig.write_image("./fig/framework-benchmarking-mem-8k.png", width=1024)
# fig.show()

# ###################

# df4k = df[df["neurons"] == 4096]

# fig = get_runtime_figure(
# df4k, title=f"Latency for 4k neurons, lower is better{batch_str}"
# )

# fig.write_json("./fig/framework-benchmarking-4k.json")
# fig.write_image("./fig/framework-benchmarking-4k.png", width=1024) # scale=2)
# fig.show()

# fig = get_memory_figure(df4k, title="Memory use for 4k neurons, lower is better")

# fig.write_json("./fig/framework-benchmarking-mem-4k.json")
# fig.write_image("./fig/framework-benchmarking-mem-4k.png", width=1024)
# fig.show()
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
torch>=2.0.0
sinabs
# sinabs-exodus
norse
# spikingjelly
# lava-dl
spyx
tonic
snntorch
rockpool
kaleido
ipykernel
plotly
nbformat
cupy-cuda11x
ninja
Loading

0 comments on commit 7fdefe8

Please sign in to comment.