This repository has been archived by the owner on Feb 12, 2019. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
identifyItemsMissingKeyInCommunity.py
93 lines (81 loc) · 3.37 KB
/
identifyItemsMissingKeyInCommunity.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import json
import requests
import secrets
import time
import csv
from datetime import datetime
import urllib3
import argparse
secretsVersion = input('To edit production server, enter the name of the secrets file: ')
if secretsVersion != '':
try:
secrets = __import__(secretsVersion)
print('Editing Production')
except ImportError:
print('Editing Stage')
else:
print('Editing Stage')
parser = argparse.ArgumentParser()
parser.add_argument('-k', '--key', help='the key to be searched. optional - if not provided, the script will ask for input')
parser.add_argument('-i', '--handle', help='handle of the community to retreive. optional - if not provided, the script will ask for input')
args = parser.parse_args()
if args.key:
key = args.key
else:
key = input('Enter the key to be searched: ')
if args.handle:
handle = args.handle
else:
handle = input('Enter collection handle: ')
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
baseURL = secrets.baseURL
email = secrets.email
password = secrets.password
filePath = secrets.filePath
verify = secrets.verify
skippedCollections = secrets.skippedCollections
startTime = time.time()
data = {'email':email,'password':password}
header = {'content-type':'application/json','accept':'application/json'}
session = requests.post(baseURL+'/rest/login', headers=header, verify=verify, params=data).cookies['JSESSIONID']
cookies = {'JSESSIONID': session}
headerFileUpload = {'accept':'application/json'}
status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies, verify=verify).json()
userFullName = status['fullname']
print('authenticated')
endpoint = baseURL+'/rest/handle/'+handle
community = requests.get(endpoint, headers=header, cookies=cookies, verify=verify).json()
communityID = community['uuid']
collections = requests.get(baseURL+'/rest/communities/'+str(communityID)+'/collections', headers=header, cookies=cookies, verify=verify).json()
collSels = ''
for j in range (0, len (collections)):
collectionID = collections[j]['uuid']
collSel = '&collSel[]=' + collectionID
collSels = collSels + collSel
f=csv.writer(open(filePath+'recordsMissing'+key+datetime.now().strftime('%Y-%m-%d %H.%M.%S')+'.csv', 'w'))
f.writerow(['itemID']+['key'])
offset = 0
recordsEdited = 0
items = ''
itemLinks = []
while items != []:
endpoint = baseURL+'/rest/filtered-items?query_field[]='+key+'&query_op[]=doesnt_exist&query_val[]='+collSels+'&limit=200&offset='+str(offset)
print(endpoint)
response = requests.get(endpoint, headers=header, cookies=cookies, verify=verify).json()
items = response['items']
for item in items:
itemMetadataProcessed = []
itemLink = item['link']
offset = offset + 200
print(offset)
for itemLink in itemLinks:
metadata = requests.get(baseURL+itemLink+'/metadata', headers=header, cookies=cookies, verify=verify).json()
for metadataElement in metadata:
itemMetadataProcessed.append(metadataElement['key'])
if key not in itemMetadataProcessed:
f.writerow([itemLink])
logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies, verify=verify)
elapsedTime = time.time() - startTime
m, s = divmod(elapsedTime, 60)
h, m = divmod(m, 60)
print('Total script run time: ', '%d:%02d:%02d' % (h, m, s))