-
Notifications
You must be signed in to change notification settings - Fork 2
299 lines (296 loc) · 15.3 KB
/
generate_updates_json.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
name: 'Generate Updates JSON'
on:
push:
branches:
- master
repository_dispatch:
types: [github_release_update, development_build_update, scheduled_update]
concurrency: generate_updates
jobs:
generate_updates:
name: 'Generate Updates JSON'
runs-on: ubuntu-latest
steps:
- name: Install Prereqs
run: |
sudo apt-get install libsodium-dev
- name: Checkout master branch
uses: actions/checkout@v3
with:
ref: master
path: master
submodules: recursive
persist-credentials: false
- name: Create working directories
id: preparefolders
run: |
mkdir -p "${GITHUB_WORKSPACE}/data/master_branch"
mkdir -p "${GITHUB_WORKSPACE}/data/github_releases"
mkdir -p "${GITHUB_WORKSPACE}/data/generated"
mkdir -p "${GITHUB_WORKSPACE}/data/tmp"
mkdir -p "${GITHUB_WORKSPACE}/data/signjson/build"
mkdir -p "${GITHUB_WORKSPACE}/data/signjson/bin"
CHANGELOG_DIR="${GITHUB_WORKSPACE}/temp/changes"
mkdir -p "${CHANGELOG_DIR}"
echo "CHANGELOG_DIR=${CHANGELOG_DIR}" >> $GITHUB_OUTPUT
- name: Fetch latest GitHub Release info
uses: past-due/fetch-release-info@master
with:
github_repo: 'Warzone2100/warzone2100'
github_token: '${{ secrets.GITHUB_TOKEN }}'
output_directory: '${{ github.workspace }}/data/github_releases'
cache_directory: '${{ github.workspace }}/_tmp_cache_data/github_releases'
- name: Fetch first page of GitHub releases list
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
curl -H "Authorization: token ${GITHUB_TOKEN}" -s "https://api.github.com/repos/Warzone2100/warzone2100/releases" > "${GITHUB_WORKSPACE}/data/github_releases/index.json"
- name: Fetch latest successful master commit info
working-directory: "${{ github.workspace }}/data/master_branch"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
GITHUB_REPOSITORY="Warzone2100/warzone2100" BRANCH="master" "${GITHUB_WORKSPACE}/master/ci/process_latest_successful_commit.sh"
- name: Generate updates.json
working-directory: "${{ github.workspace }}/data/generated"
run: |
python3 "${GITHUB_WORKSPACE}/master/ci/generate_updates_json.py" -r "${GITHUB_WORKSPACE}/data/github_releases/latest.json" -i "${GITHUB_WORKSPACE}/data/github_releases/index.json" -d "${GITHUB_WORKSPACE}/data/master_branch/latest_successful_commit.json"
cat "updates.json"
- name: Minify updates.json
working-directory: "${{ github.workspace }}/data/generated"
run: |
cat "updates.json" | jq -c . > "updates.min.json"
rm "updates.json"
mv "updates.min.json" "wz2100.json"
- name: Generate compat.json
working-directory: "${{ github.workspace }}/data/generated"
run: |
python3 "${GITHUB_WORKSPACE}/master/ci/generate_compat_json.py" -r "${GITHUB_WORKSPACE}/data/github_releases/latest.json" -i "${GITHUB_WORKSPACE}/data/github_releases/index.json" -d "${GITHUB_WORKSPACE}/data/master_branch/latest_successful_commit.json"
cat "compat.json"
- name: Minify compat.json
working-directory: "${{ github.workspace }}/data/generated"
run: |
cat "compat.json" | jq -c . > "compat.min.json"
rm "compat.json"
mv "compat.min.json" "wz2100_compat.json"
- name: Compile signjson tool
working-directory: "${{ github.workspace }}/data/signjson/build"
run: |
cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo "-DCMAKE_INSTALL_PREFIX:PATH=${GITHUB_WORKSPACE}/data/signjson" "${GITHUB_WORKSPACE}/master/signjson"
cmake --build . --target install
- name: Digitally sign WZ .json
working-directory: "${{ github.workspace }}/data/generated"
run: |
echo "Signing wz2100.json"
"${GITHUB_WORKSPACE}/data/signjson/bin/signjson" -k "${{ secrets.SIGNJSON_B64_SECRETKEY }}" "wz2100.json"
echo "Signing wz2100_compat.json"
"${GITHUB_WORKSPACE}/data/signjson/bin/signjson" -k "${{ secrets.SIGNJSON_B64_SECRETKEY }}" "wz2100_compat.json"
- name: Checkout gh-pages branch
uses: actions/checkout@v3
with:
ref: gh-pages
path: gh-pages
persist-credentials: false
- name: Copy WZ JSON to gh-pages branch - if data has changed
id: copy_updates
run: |
COPIED_ANY_FILES=0
for NEW_FILE in ${GITHUB_WORKSPACE}/data/generated/*.json; do
[ -e "$NEW_FILE" ] || continue
EXISTING_FILE="${GITHUB_WORKSPACE}/gh-pages/$(basename "$NEW_FILE")"
if [[ "${{ github.event.action }}" != "scheduled_update" ]] && [ -f "${EXISTING_FILE}" ]; then
FILTERED_KEYS='["SIGNATURE","validThru"]'
NEW_FILTERED="${GITHUB_WORKSPACE}/data/tmp/new_filtered.json"
EXISTING_FILTERED="${GITHUB_WORKSPACE}/data/tmp/old_filtered.json"
jq --argjson filtered_keys ''"${FILTERED_KEYS}"'' -cS '. | with_entries( select( .key as $key | $filtered_keys | index($key) == null ) )' "${NEW_FILE}" > "${NEW_FILTERED}"
jq --argjson filtered_keys ''"${FILTERED_KEYS}"'' -cS '. | with_entries( select( .key as $key | $filtered_keys | index($key) == null ) )' "${EXISTING_FILE}" > "${EXISTING_FILTERED}"
CMP_STATUS=$(cmp -s "${NEW_FILTERED}" "${EXISTING_FILTERED}" && echo "0" || echo "1")
if [[ $CMP_STATUS = 0 ]]; then
# Files are effectively the same
echo "Skipping copy of newly-generated file (content is equal): $(basename "$NEW_FILE")"
continue
fi
fi
echo "Copying newly-generated file: $(basename "$NEW_FILE")"
cp "${NEW_FILE}" "${EXISTING_FILE}"
COPIED_ANY_FILES=1
done
if [[ $COPIED_ANY_FILES = 0 ]]; then
# Files are all effectively the same
echo "skip_publish=true" >> $GITHUB_OUTPUT
exit 0
fi
# Note: The following step must be run with a working directory of the gh-pages branch, as it stores additional _data information
- name: Generate wzlobby.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
working-directory: "${{ github.workspace }}/gh-pages"
run: |
NEW_FILE="${GITHUB_WORKSPACE}/data/generated/lobby.json"
python3 "${GITHUB_WORKSPACE}/master/ci/generate_lobby_json.py" \
-r "${GITHUB_WORKSPACE}/data/github_releases/latest.json" \
-i "${GITHUB_WORKSPACE}/data/github_releases/index.json" \
-d "${GITHUB_WORKSPACE}/data/master_branch/latest_successful_commit.json" \
-o "${NEW_FILE}"
cat "${NEW_FILE}"
- name: Copy wzlobby.json to gh-pages branch - if data has changed
id: copy_wzlobby
run: |
NEW_FILE="${GITHUB_WORKSPACE}/data/generated/lobby.json"
EXISTING_FILE="${GITHUB_WORKSPACE}/gh-pages/wzlobby.json"
if [ -f "${EXISTING_FILE}" ]; then
FILTERED_KEYS='["SIGNATURE","validThru"]'
NEW_FILTERED="${GITHUB_WORKSPACE}/data/tmp/newlobby_filtered.json"
EXISTING_FILTERED="${GITHUB_WORKSPACE}/data/tmp/oldlobby_filtered.json"
jq --argjson filtered_keys ''"${FILTERED_KEYS}"'' -cS '. | with_entries( select( .key as $key | $filtered_keys | index($key) == null ) )' "${NEW_FILE}" > "${NEW_FILTERED}"
jq --argjson filtered_keys ''"${FILTERED_KEYS}"'' -cS '. | with_entries( select( .key as $key | $filtered_keys | index($key) == null ) )' "${EXISTING_FILE}" > "${EXISTING_FILTERED}"
CMP_STATUS=$(cmp -s "${NEW_FILTERED}" "${EXISTING_FILTERED}" && echo "0" || echo "1")
if [[ $CMP_STATUS = 0 ]]; then
# Files are effectively the same
echo "Skipping copy of newly-generated file (content is equal)"
echo "skip_publish=true" >> $GITHUB_OUTPUT
exit 0
fi
fi
echo "Copying newly-generated file"
cp "${NEW_FILE}" "${EXISTING_FILE}"
- name: Publish any changes to data files
id: publishpages
if: success() && ((steps.copy_updates.outputs.skip_publish != 'true') || (steps.copy_wzlobby.outputs.skip_publish != 'true'))
working-directory: "./gh-pages"
env:
PUSH_PAT: ${{ secrets.WZ2100_UPDATES_PUSH_TOKEN }} # use a PAT
run: |
git config user.name "wzdev-ci"
git config user.email "[email protected]"
git add -A
timestamp=$(date -u)
git commit -m "Generate wz2100 json: ${timestamp}" || { echo "PROCESS_DEPLOYMENT=false" >> $GITHUB_OUTPUT && exit 0; }
#git pull --rebase
# Get the new commit's SHA
NEW_COMMIT_SHA=$(git rev-parse --verify HEAD)
echo "NEW_COMMIT_SHA=${NEW_COMMIT_SHA}"
# Push the new commit to the gh-pages branch
git push "https://${PUSH_PAT}@github.com/Warzone2100/update-data.git" gh-pages:gh-pages
echo "PROCESS_DEPLOYMENT=true" >> $GITHUB_OUTPUT
echo "GH_PAGES_BRANCH_COMMIT_SHA=${NEW_COMMIT_SHA}" >> $GITHUB_OUTPUT
echo "Done."
# Get the list of files / paths changed in the latest commit
CHANGED_FILES_LIST="${{ steps.preparefolders.outputs.CHANGELOG_DIR }}/changedpaths.txt"
git diff-tree --no-commit-id --name-only -r -z HEAD | tr '\0' '\n' > "${CHANGED_FILES_LIST}"
echo "CHANGED_FILES_LIST=${CHANGED_FILES_LIST}" >> $GITHUB_OUTPUT
exit 0
- name: 'Wait for Deployment'
id: deployments
if: success() && (steps.publishpages.outputs.PROCESS_DEPLOYMENT == 'true')
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_PAGES_BRANCH_COMMIT_SHA: ${{ steps.publishpages.outputs.GH_PAGES_BRANCH_COMMIT_SHA }}
shell: bash --noprofile --norc {0}
run: |
echo "Searching for deployment matching commit: ${GH_PAGES_BRANCH_COMMIT_SHA} ..."
# Poll until we find a deployment with a sha == the push's commit's SHA
status=1
POLL_ATTEMPTS=0
while [ $POLL_ATTEMPTS -le 15 ]
do
sleep_interval=$(( POLL_ATTEMPTS * POLL_ATTEMPTS ))
if [ $sleep_interval -ne 0 ]; then
echo "Sleeping ${sleep_interval} seconds..."
sleep ${sleep_interval}
echo "Finished sleep"
fi
curl -H "Authorization: token ${GITHUB_TOKEN}" -s "https://api.github.com/repos/${GITHUB_REPOSITORY}/deployments" | jq --exit-status --arg desired_sha "${GH_PAGES_BRANCH_COMMIT_SHA}" '.[] | select(.sha == $desired_sha and .environment == "github-pages")' > "deployment.json"
status=$?
if [ $status -eq 0 ]; then
break
fi
echo "Not found yet ..."
(( POLL_ATTEMPTS++ ))
done
if [ $status -ne 0 ]; then
# Did not find matching deployment
echo "::error ::Failed to find matching deployment for: ${GITHUB_SHA}"
exit 1
fi
DEPLOYMENT_ID=$(cat "deployment.json" | jq --raw-output '.id')
if [ -z "$DEPLOYMENT_ID" ]; then
echo "::error ::Missing expected '.id' field"
exit 1
fi
echo "Found deployment ID: ${DEPLOYMENT_ID}"
echo "DEPLOYMENT_ID=${DEPLOYMENT_ID}" >> $GITHUB_OUTPUT
- name: 'Wait for Deployment Success'
if: success() && (steps.publishpages.outputs.PROCESS_DEPLOYMENT == 'true')
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEPLOYMENT_ID: ${{ steps.deployments.outputs.DEPLOYMENT_ID }}
shell: bash --noprofile --norc {0}
run: |
echo "Waiting for deployment ${DEPLOYMENT_ID} to finish ..."
# Poll deployment statuses until we find a status with:
# "state": "success"
# "environment": "github-pages"
DEPLOYMENT_STATE=""
POLL_ATTEMPTS=0
while [ $POLL_ATTEMPTS -le 12 ]
do
sleep_interval=$(( POLL_ATTEMPTS * POLL_ATTEMPTS ))
if [ $sleep_interval -ne 0 ]; then
echo "Sleeping ${sleep_interval} seconds..."
sleep ${sleep_interval}
echo "Finished sleep"
fi
DEPLOYMENT_STATE=$(curl -H "Authorization: token ${GITHUB_TOKEN}" -s "https://api.github.com/repos/${GITHUB_REPOSITORY}/deployments/${DEPLOYMENT_ID}/statuses" | jq --raw-output --exit-status --argjson end_states '["success","error","failure"]' '.[] | select( (.state as $state | $end_states | index($state) != null ) and (.environment == "github-pages") ) | .state')
status=$?
(( POLL_ATTEMPTS++ ))
if [ $status -eq 0 ]; then
break
fi
done
if [ $status -ne 0 ]; then
# Did not find matching deployment
echo "::error ::Deployment did not finish before timeout"
exit 1
fi
echo "Found deployment state: ${DEPLOYMENT_STATE}"
if [ "$DEPLOYMENT_STATE" != "success" ]; then
echo "::error ::Deployment did not appear to succeed? (state: ${DEPLOYMENT_STATE})"
exit 1
fi
# Sleep for 10 seconds
sleep 10
echo "Done."
- name: 'Generate Cloudflare Cache Purge URLs List'
id: purgeurls
if: success() && (steps.publishpages.outputs.PROCESS_DEPLOYMENT == 'true')
run: |
PURGE_URLS_DATA_FILES_DIR="${{ steps.preparefolders.outputs.CHANGELOG_DIR }}/output"
python3 "${GITHUB_WORKSPACE}/master/ci/gen_purge_url_batches.py" "data.wz2100.net" "${{ steps.publishpages.outputs.CHANGED_FILES_LIST }}" "${PURGE_URLS_DATA_FILES_DIR}"
echo "PURGE_URLS_DATA_FILES_DIR=${PURGE_URLS_DATA_FILES_DIR}" >> $GITHUB_OUTPUT
- name: 'Purge Cloudflare Cache'
if: success() && (steps.publishpages.outputs.PROCESS_DEPLOYMENT == 'true')
env:
CLOUDFLARE_ZONE: ${{ secrets.CLOUDFLARE_WZ2100_ZONE }}
CLOUDFLARE_CACHEPURGE_TOKEN: ${{ secrets.CLOUDFLARE_WZ2100_CACHEPURGE_TOKEN }}
run: |
# Needs to handle multiple data files, since each purge command can only send a max of 30 URLs
for file in ${{ steps.purgeurls.outputs.PURGE_URLS_DATA_FILES_DIR }}/*
do
echo "File: $file"
curl -X POST "https://api.cloudflare.com/client/v4/zones/${CLOUDFLARE_ZONE}/purge_cache" \
-H "Authorization: Bearer ${CLOUDFLARE_CACHEPURGE_TOKEN}" \
-H "Content-Type: application/json" \
--data-binary "@$file"
done; # file
echo "Done."
- name: "Inform lobby server"
if: success() && (steps.publishpages.outputs.PROCESS_DEPLOYMENT == 'true') && (steps.copy_wzlobby.outputs.skip_publish != 'true')
env:
LOBBY_SERVER_INFORM_COMMAND: ${{ secrets.LOBBY_SERVER_INFORM_COMMAND }}
LOBBY_SERVER_INFORM_PASS: ${{ secrets.LOBBY_SERVER_INFORM_PASS }}
run: |
sleep 10
# Inform the lobby server
python3 "${GITHUB_WORKSPACE}/master/ci/inform_lobby.py" "lobby.wz2100.net" 9990 "${LOBBY_SERVER_INFORM_COMMAND}" -d "${LOBBY_SERVER_INFORM_PASS}"
echo "Done."