forked from ehanson8/dspace-data-collection
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfindRecordsWithKeyAndValue.py
94 lines (83 loc) · 2.89 KB
/
findRecordsWithKeyAndValue.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import requests
import secret
import time
import argparse
from datetime import datetime
import pandas as pd
secretVersion = input('To edit production, enter the secret file name: ')
if secretVersion != '':
try:
secret = __import__(secretVersion)
print('Using Production')
except ImportError:
print('Using Stage')
else:
print('Using Stage')
parser = argparse.ArgumentParser()
parser.add_argument('-k', '--searchKey', help='the key to be searched')
parser.add_argument('-v', '--searchValue', help='the value to be searched')
args = parser.parse_args()
if args.searchKey:
searchKey = args.searchKey
else:
searchKey = input('Enter the searchKey: ')
if args.searchValue:
searchValue = args.searchValue
else:
searchValue = input('Enter the searchValue: ')
baseURL = secret.baseURL
email = secret.email
password = secret.password
filePath = secret.filePath
skippedCollections = secret.skippedCollections
startTime = time.time()
data = {'email': email, 'password': password}
header = {'content-type': 'application/json', 'accept': 'application/json'}
session = requests.post(baseURL+'/rest/login', headers=header, params=data).cookies['JSESSIONID']
cookies = {'JSESSIONID': session}
headerFileUpload = {'accept': 'application/json'}
cookiesFileUpload = cookies
status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies).json()
userFullName = status['fullname']
print('authenticated')
offset = 0
recordsEdited = 0
items = ''
itemLinks = []
while items != []:
endpoint = baseURL+'/rest/filtered-items?query_field[]='+searchKey+'&query_op[]=equals&query_val[]='+searchValue+'&limit=200&offset='+str(offset)
print(endpoint)
response = requests.get(endpoint, headers=header, cookies=cookies).json()
items = response['items']
for item in items:
itemMetadataProcessed = []
itemLink = item['link']
itemLinks.append(itemLink)
offset = offset + 200
print(offset)
all_items = []
for itemLink in itemLinks:
metadata = requests.get(baseURL+itemLink+'/metadata', headers=header, cookies=cookies).json()
itemDict = {}
itemDict['itemLink'] = itemLink
for item in metadata:
key = item['key']
value = item['value']
if key == searchKey and value == searchValue:
itemDict[key] = value
elif key == 'dc.identifier.uri':
itemDict[key] = value
else:
pass
if itemDict.get(searchKey):
all_items.append(itemDict)
df = pd.DataFrame.from_dict(all_items)
print(df.head(15))
dt = datetime.now().strftime('%Y-%m-%d %H.%M.%S')
newFile = 'recordsWith'+searchKey+'And'+searchValue+'_'+dt+'.csv'
df.to_csv(path_or_buf=newFile, header='column_names', index=False)
logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies)
elapsedTime = time.time() - startTime
m, s = divmod(elapsedTime, 60)
h, m = divmod(m, 60)
print("%d:%02d:%02d" % (h, m, s))