-
Notifications
You must be signed in to change notification settings - Fork 8
226 lines (221 loc) · 10.7 KB
/
main.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
name: unifrac-binaries CI
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
build-and-test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, linux-gpu-cuda]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: conda-incubator/setup-miniconda@v2
with:
miniconda-version: "latest"
auto-update-conda: true
- name: Install
shell: bash -l {0}
run: |
df -h .
if [[ "$(uname -s)" == "Linux" ]];
then
conda create -q --yes --strict-channel-priority -n unifrac -c conda-forge -c bioconda gxx_linux-64 gfortran_linux-64 hdf5 mkl-include lz4 zlib hdf5-static libcblas liblapacke make curl
else
conda create -q --yes --strict-channel-priority -n unifrac -c conda-forge -c bioconda clangxx_osx-64 hdf5 mkl-include lz4 hdf5-static libcblas liblapacke make curl
fi
conda clean --yes -t
df -h .
conda activate unifrac
echo "$(uname -s)"
if [[ "$(uname -s)" == "Linux" ]];
then
which x86_64-conda-linux-gnu-gcc
x86_64-conda-linux-gnu-gcc -v
x86_64-conda-linux-gnu-g++ -v
else
which clang
clang -v
fi
which h5c++
if [[ "$(uname -s)" == "Linux" ]];
then
# install PGI but do not source it
# the makefile will do it automatically
./scripts/install_hpc_sdk.sh </dev/null
fi
df -h .
export PERFORMING_CONDA_BUILD=True
echo "======= begin env ====="
env
echo "======= end env ====="
# all == build (shlib,bins,tests) and install
make all
df -h .
pushd src
if [[ "$(uname -s)" == "Linux" ]];
then
rm -f ~/.R/Makevars
conda install -q --yes --strict-channel-priority -c conda-forge r-base
unset CXXFLAGS
unset CFLAGS
unset DEBUG_CXXFLAGS
unset DEBUG_CFLAGS
# the r-base package has a broken dependency
ln -s $CONDA_PREFIX/lib/libreadline.so $CONDA_PREFIX/lib/libreadline.so.6
R -e 'install.packages("Rcpp", repos="http://lib.stat.cmu.edu/R/CRAN/")'
make rapi_test
fi
popd
- name: Tests
shell: bash -l {0}
run: |
conda activate unifrac
# keep it low for runs in containers
# and a weird number to potentially catch potential bugs
export OMP_NUM_THREADS=3
# diagnostic messages for debugging, if needed
export UNIFRAC_GPU_INFO=Y
pushd src
./test_su
./test_api
./test_ska
popd
pushd test
./capi_test 1
# explicitly check that we do not have hdf5 dependencies
if [[ "$(uname -s)" == "Linux" ]];
then
ldd ./capi_test |awk 'BEGIN{a=0}/hdf/{a=a+1;print $0}END{if (a==0) {print "No dynamic hdf5 found"} else {exit 2}}'
else
otool -L ./capi_test|awk 'BEGIN{a=0}/hdf/{a=a+1;print $0}END{if (a==0) {print "No dynamic hdf5 found"} else {exit 2}}'
fi
popd
pushd src/testdata
conda install --yes -c conda-forge h5py
# weighted_unnormalized
time ssu -m weighted_unnormalized_fp32 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 -o t1.h5
./compare_unifrac_matrix.py test500.weighted_unnormalized_fp32.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.weighted_unnormalized_fp32.h5 t1.h5 3 0.1
rm -f t1.h5
# retry with default precision handling
time ssu -m weighted_unnormalized -i test500.biom -t test500.tre --pcoa 4 -r hdf5 -o t1.h5
./compare_unifrac_matrix.py test500.weighted_unnormalized_fp32.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.weighted_unnormalized_fp32.h5 t1.h5 3 0.1
rm -f t1.h5
time ssu -f -m weighted_unnormalized_fp32 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 -o t1.h5
# matrrix will be different, but PCOA similar
./compare_unifrac_pcoa.py test500.weighted_unnormalized_fp32.h5 t1.h5 3 0.1
rm -f t1.h5
time ssu -m weighted_unnormalized_fp64 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp64 -o t1.h5
# minimal precision loss between fp32 and fp64
./compare_unifrac_matrix.py test500.weighted_unnormalized_fp32.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.weighted_unnormalized_fp32.h5 t1.h5 3 0.1
rm -f t1.h5
# weighted_normalized
time ssu -f -m weighted_normalized -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 -o t1.h5
./compare_unifrac_matrix.py test500.weighted_normalized_fp32.f.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.weighted_normalized_fp32.f.h5 t1.h5 3 0.1
rm -f t1.h5
time ssu -f -m weighted_normalized_fp64 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp64 -o t1.h5
./compare_unifrac_matrix.py test500.weighted_normalized_fp32.f.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.weighted_normalized_fp32.f.h5 t1.h5 3 0.1
rm -f t1.h5
# unweighted
time ssu -f -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 -o t1.h5
./compare_unifrac_matrix.py test500.unweighted_fp32.f.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
rm -f t1.h5
time ssu -f -m unweighted_fp64 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp64 -o t1.h5
./compare_unifrac_matrix.py test500.unweighted_fp32.f.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
ls -l t1.h5
rm -f t1.h5
# hdf5 without distance matrix, just PCoA
echo "hdf5_nodist"
time ssu -f -m unweighted_fp64 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_nodist -o t1.h5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
ls -l t1.h5
rm -f t1.h5
time ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_nodist -o t1.h5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
ls -l t1.h5
rm -f t1.h5
# permanova
echo "permanova"
time ssu -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5 --permanova 999 -g test500.tsv -c empo_2 -o t1.h5
# compare to values given by skbio.stats.distance.permanova
python compare_unifrac_stats.py t1.h5 5 999 1.001112 0.456 0.001 0.1
ls -l t1.h5
rm -f t1.h5
time ssu -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5 --permanova 99 -g test500.tsv -c empo_2 -o t1.h5
python compare_unifrac_stats.py t1.h5 5 99 1.001112 0.456 0.001 0.2
ls -l t1.h5
rm -f t1.h5
time ssu -m weighted_unnormalized_fp32 -i test500.biom -t test500.tre --pcoa 4 -r hdf5_nodist -g test500.tsv -c empo_3 -o t1.h5
# compare to values given by skbio.stats.distance.permanova
python compare_unifrac_stats.py t1.h5 17 999 0.890697 0.865 0.001 0.1
ls -l t1.h5
rm -f t1.h5
# partials
echo "partials"
ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --mode partial-report --n-partials 2
time ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --mode partial --start 0 --stop 125 -o t1.partial.1
time ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --mode partial --start 125 --stop 250 -o t1.partial.2
ls -l t1.partial*
ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --mode check-partial --partial-pattern 't1.partial.*'
time ssu -f -m unweighted_fp64 -i test500.biom -t test500.tre --pcoa 4 --mode merge-partial --partial-pattern 't1.partial.*' -r hdf5_fp64 -o t1.h5
./compare_unifrac_matrix.py test500.unweighted_fp32.f.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
ls -l t1.h5
rm -f t1.h5
time ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --pcoa 4 --mode merge-partial --partial-pattern 't1.partial.*' -r hdf5_fp32 -o t1.h5
./compare_unifrac_matrix.py test500.unweighted_fp32.f.h5 t1.h5 1.e-5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
ls -l t1.h5
rm -f t1.h5
time ssu -f -m unweighted_fp32 -i test500.biom -t test500.tre --pcoa 4 --mode merge-partial --partial-pattern 't1.partial.*' -r hdf5_nodist -o t1.h5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.1
ls -l t1.h5
rm -f t1.h5
rm -f t1.partial.*
# subsample
echo "subsample"
time ssu -f -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 --subsample-depth 100 -o t1.h5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.3
rm -f t1.h5
time ssu -f -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 --subsample-depth 100 --subsample-replacement with -o t1.h5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.3
rm -f t1.h5
time ssu -f -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5_fp32 --subsample-depth 100 --subsample-replacement without -o t1.h5
./compare_unifrac_pcoa.py test500.unweighted_fp32.f.h5 t1.h5 3 0.3
rm -f t1.h5
time ssu -m unweighted -i test500.biom -t test500.tre --pcoa 4 -r hdf5_nodist --permanova 99 -g test500.tsv -c empo_2 --subsample-depth 100 -o t1.h5
python compare_unifrac_stats.py t1.h5 5 99 1.001112 0.456 0.05 0.6
ls -l t1.h5
rm -f t1.h5
# multi
echo "multi"
time ssu -f -m unweighted -i test500.biom -t test500.tre --pcoa 4 --mode multi --subsample-depth 100 --n-subsamples 10 -o t1.h5
./compare_unifrac_pcoa_multi.py test500.unweighted_fp32.f.h5 t1.h5 10 3 0.3
ls -l t1.h5
rm -f t1.h5
time ssu -m unweighted -i test500.biom -t test500.tre --pcoa 4 --mode multi --n-subsamples 10 --permanova 99 -g test500.tsv -c empo_2 --subsample-depth 100 -o t1.h5
python compare_unifrac_stats_multi.py t1.h5 10 5 99 1.001112 0.456 0.08 0.6
ls -l t1.h5
rm -f t1.h5
time ssu -m unweighted -i test500.biom -t test500.tre --pcoa 4 --mode multi --n-subsamples 10 --permanova 99 -g test500.tsv -c empo_2 --subsample-depth 100 --subsample-replacement without -o t1.h5
python compare_unifrac_stats_multi.py t1.h5 10 5 99 1.001112 0.456 0.08 0.6
ls -l t1.h5
rm -f t1.h5
popd
- name: Sanity checks
shell: bash -l {0}
run: |
conda activate unifrac
pushd ci
./crawford_test.sh
popd