This repository has been archived by the owner on Feb 14, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 45
/
build_repo.py
337 lines (310 loc) · 13.5 KB
/
build_repo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
#!/usr/bin/env/ python3
# coding=utf-8
'''
Parse extensions/*.yaml files & build a directory with following structure:
public/
|-my-extension-1/
| |-1.0.0/ <- version (to avoid static file caching issues)
| | |-index.json <- extension info
| | |-index.html <- extension entrance (component)
| | |-dist <- extension resources
| | |-... <- other files
|-index.json <- repo info, contain all extensions' info
'''
from subprocess import run, PIPE
import sys
import os
import json
import shutil
from zipfile import ZipFile
from socket import gethostname as getlocalhostname
import requests
import yaml
LOCAL_HOSTNAME = getlocalhostname()
def get_environment(base_dir):
"""
Parse the environment variables from .env
"""
temp_env_var = yaml.load("""
github:
username:
token:
public_dir: public
extensions_dir: extensions
domain: https://domain.com/extensions
stdnotes_extensions_list: standardnotes-extensions-list.txt
""", Loader=yaml.FullLoader)
env_var = {}
if os.path.isfile(os.path.join(base_dir, ".env")):
with open(os.path.join(base_dir, ".env")) as temp_env_file:
env_var = yaml.load(temp_env_file, Loader=yaml.FullLoader)
# if user hasn't updated the env, copy defaults to yaml dictionary
for key in temp_env_var:
try:
if not env_var[key]:
env_var[key] = temp_env_var[key]
except KeyError as e:
env_var[key] = temp_env_var[key]
return env_var
def process_zipball(repo_dir, release_version):
"""
Grab the release zipball and extract it without the root/parent/top directory
"""
with ZipFile(os.path.join(repo_dir, release_version) + ".zip",
'r') as zipball:
for member in zipball.namelist():
# Parse files list excluding the top/parent/root directory
filename = '/'.join(member.split('/')[1:])
# Now ignore it
if filename == '': continue
# Ignore dot files
if filename.startswith('.'): continue
source = zipball.open(member)
try:
target = open(
os.path.join(repo_dir, release_version, filename), "wb")
with source, target:
target = open(
os.path.join(repo_dir, release_version, filename),
"wb")
shutil.copyfileobj(source, target)
except (FileNotFoundError, IsADirectoryError):
# Create the directory
os.makedirs(
os.path.dirname(
os.path.join(repo_dir, release_version, filename)))
continue
# Delete the archive zip
os.remove(os.path.join(repo_dir, release_version) + ".zip")
def git_clone_method(ext_yaml, public_path, ext_has_update):
"""
Get the latest repository and parse for metadata
"""
repo_name = ext_yaml['github'].split('/')[-1]
repo_dir = os.path.join(public_path, repo_name)
try:
run([
'git', 'clone', 'https://github.com/{github}.git'.format(**ext_yaml),
'--quiet', '{}_tmp'.format(repo_name)
],
check=True)
ext_last_commit = (run([
'git', '--git-dir=' +
os.path.join(public_path, '{}_tmp'.format(repo_name), '.git'),
'rev-list', '--tags', '--max-count=1'
],
stdout=PIPE,
check=True).stdout.decode('utf-8').replace(
"\n", ""))
ext_version = run([
'git', '--git-dir',
os.path.join(public_path, '{}_tmp'.format(repo_name), '.git'),
'describe', '--tags', ext_last_commit
],
stdout=PIPE,
check=True).stdout.decode('utf-8').replace("\n", "")
# check if the latest version already exist
if not os.path.exists(os.path.join(repo_dir, ext_version)):
ext_has_update = True
shutil.move(
os.path.join(public_path, '{}_tmp'.format(repo_name)),
os.path.join(public_path, repo_name, '{}'.format(ext_version)))
# Delete .git resource from the directory
shutil.rmtree(
os.path.join(public_path, repo_name, '{}'.format(ext_version),
'.git'))
else:
# ext already up-to-date
# print('Extension: {} - {} (already up-to-date)'.format(ext_yaml['name'], ext_version))
# clean-up
shutil.rmtree(os.path.join(public_path, '{}_tmp'.format(repo_name)))
return ext_version, ext_has_update
except Exception as e:
print('Skipping: {:38s}\t(github repository not found)'.format(repo_name))
return '0.0', False
def parse_extensions(base_dir, extensions_dir, public_dir, base_url, stdnotes_ext_list_path, ghub_headers):
"""
Build Standard Notes extensions repository using Github meta-data
"""
extension_path = extensions_dir
public_path = public_dir
os.chdir(public_path)
extensions = []
std_ext_list = []
std_ext_list = parse_stdnotes_extensions(stdnotes_ext_list_path)
# Get all extensions, sort extensions alphabetically along by their by type
extfiles = [x for x in sorted(os.listdir(extension_path)) if not x.endswith('theme.yaml') and x.endswith('.yaml')]
themefiles = [y for y in sorted(os.listdir(extension_path)) if y.endswith('theme.yaml')]
extfiles.extend(themefiles)
for extfile in extfiles:
with open(os.path.join(extension_path, extfile)) as extyaml:
ext_yaml = yaml.load(extyaml, Loader=yaml.FullLoader)
ext_has_update = False
repo_name = ext_yaml['github'].split('/')[-1]
repo_dir = os.path.join(public_path, repo_name)
# If we have valid github personal access token
if ghub_headers:
# Get extension's github release meta-data
ext_git_info = json.loads(
requests.get(
'https://api.github.com/repos/{github}/releases/latest'.
format(**ext_yaml), headers=ghub_headers).text)
try:
ext_version = ext_git_info['tag_name']
except KeyError:
# No github releases found
print('Skipping: {:38s}\t(github repository not found)'.format(
ext_yaml['name']))
continue
# Check if extension directory already exists
if not os.path.exists(repo_dir):
os.makedirs(repo_dir)
# Check if extension with current release already exists
if not os.path.exists(os.path.join(repo_dir, ext_version)):
ext_has_update = True
os.makedirs(os.path.join(repo_dir, ext_version))
# Grab the release and then unpack it
with requests.get(ext_git_info['zipball_url'], headers=ghub_headers,
stream=True) as zipball_stream:
with open(
os.path.join(repo_dir, ext_version) + ".zip",
'wb') as zipball_file:
shutil.copyfileobj(zipball_stream.raw, zipball_file)
# unpack the zipball
process_zipball(repo_dir, ext_version)
else:
ext_version, ext_has_update = git_clone_method(
ext_yaml, public_path, ext_has_update)
if extfile in std_ext_list:
ext_id = ext_yaml['id'].rsplit('.', 1)[1]
ext_yaml['id'] = '%s.%s' % (LOCAL_HOSTNAME, ext_id)
# Build extension info (stateless)
# https://domain.com/sub-domain/my-extension/index.json
extension = dict(
identifier=ext_yaml['id'],
name=ext_yaml['name'],
content_type=ext_yaml['content_type'],
area=ext_yaml.get('area', None),
version=ext_version,
description=ext_yaml.get('description', None),
marketing_url=ext_yaml.get('marketing_url', None),
thumbnail_url=ext_yaml.get('thumbnail_url', None),
valid_until='2030-05-16T18:35:33.000Z',
url='/'.join([base_url, repo_name, ext_version, ext_yaml['main']]),
download_url='https://github.com/{}/archive/{}.zip'.format(
ext_yaml['github'], ext_version),
latest_url='/'.join([base_url, repo_name, 'index.json']),
flags=ext_yaml.get('flags', []),
dock_icon=ext_yaml.get('dock_icon', {}),
layerable=ext_yaml.get('layerable', None),
statusBar=ext_yaml.get('statusBar', None),
)
# Strip empty values
extension = {k: v for k, v in extension.items() if v}
# Check if extension is already up-to-date
if ext_has_update:
# Generate JSON file for each extension
with open(os.path.join(public_path, repo_name, 'index.json'),
'w') as ext_json:
json.dump(extension, ext_json, indent=4)
if extfile.endswith("theme.yaml"):
print('Theme: {:34s} {:6s}\t(updated)'.format(
ext_yaml['name'], ext_version.strip('v')))
else:
print('Extension: {:30s} {:6s}\t(updated)'.format(
ext_yaml['name'], ext_version.strip('v')))
else:
# ext already up-to-date
if extfile.endswith("theme.yaml"):
print('Theme: {:34s} {:6s}\t(already up-to-date)'.format(
ext_yaml['name'], ext_version.strip('v')))
else:
print('Extension: {:30s} {:6s}\t(already up-to-date)'.format(
ext_yaml['name'], ext_version.strip('v')))
extensions.append(extension)
os.chdir('..')
# Generate the main repository index JSON
# https://domain.com/sub-domain/my-index.json
with open(os.path.join(public_path, 'index.json'), 'w') as ext_json:
json.dump(
dict(
content_type='SN|Repo',
valid_until='2030-05-16T18:35:33.000Z',
packages=extensions,
),
ext_json,
indent=4,
)
print("\nProcessed: {:20s}{} extensions. (Components: {}, Themes: {})".format("", len(extfiles), len(extfiles)-len(themefiles), len(themefiles)))
print("Repository Endpoint URL: {:6s}{}/index.json".format("", base_url))
def parse_stdnotes_extensions(stdnotes_ext_list_path):
"""
To circumvent around the issue: https://github.com/standardnotes/desktop/issues/789
We'll be parsing standard note's extensions package ids with local hostname followed
by package name
"""
if not os.path.exists(stdnotes_ext_list_path):
print("\n⚠️ WARNING: Unable to locate standard notes extensions list file, make sure you've \
cloned the source repository properly\
")
print("You may encounter issues registering extensions, checkout ")
print("https://github.com/standardnotes/desktop/issues/789 for more details\n")
else:
std_exts_list = []
with open(stdnotes_ext_list_path) as list_file:
for line in list_file:
if not line.startswith('#'):
std_exts_list.append(line.rstrip())
return std_exts_list
def main():
"""
teh main function
"""
base_dir = os.path.dirname(os.path.abspath(__file__))
# Get environment variables
env_var = {}
env_var = get_environment(base_dir)
base_url = env_var["domain"]
extensions_dir = env_var['extensions_dir']
if os.path.exists(os.path.join(base_dir, extensions_dir)):
extensions_dir = os.path.join(base_dir, extensions_dir)
else:
print("\n⚠️ WARNING: Unable to locate extensions directory, make sure you've \
cloned the source repository properly and try again")
sys.exit(1)
public_dir = env_var['public_dir']
if os.path.exists(os.path.join(base_dir, public_dir)):
public_dir = os.path.join(base_dir, public_dir)
else:
os.makedirs(os.path.join(base_dir, public_dir))
public_dir = os.path.join(base_dir, public_dir)
stdnotes_ext_list = env_var['stdnotes_extensions_list']
stdnotes_ext_list_path = os.path.join(base_dir, stdnotes_ext_list)
ghub_auth_complete = False
ghub_headers = False
if env_var['github']['token']:
# Get a re-usable session object using user credentials
ghub_headers = {'Authorization': f'token %s' % env_var['github']['token']}
try:
ghub_verify = requests.get("https://api.github.com/", headers=ghub_headers)
if not ghub_verify.status_code == 200:
print("ERROR: %s " % ghub_verify.headers['status'])
print(
"Bad Github credentials in the .env file, check and try again."
)
sys.exit(1)
ghub_auth_complete = True
except Exception as e:
print("ERROR: %s" % e)
if not ghub_auth_complete:
# Environment file missing
print(
"Environment variables not set (have a look at env.sample). Using git-clone method instead"
)
input(
"⚠️ WARNING: This is an in-efficient process\nPress any key to go ahead anyway: ")
# Build extensions
parse_extensions(base_dir, extensions_dir, public_dir, base_url, stdnotes_ext_list_path, ghub_headers)
sys.exit(0)
if __name__ == '__main__':
main()