Skip to content

Commit

Permalink
fixing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
kylevillegas93 committed Oct 25, 2024
1 parent beec0a1 commit 1973827
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 8 deletions.
7 changes: 5 additions & 2 deletions managers/sfrRecord.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,10 +258,13 @@ def parseInstance(self, workData, editionData, rec):
editionData['dcdw_uuids'].append(rec.uuid.hex)

def buildItems(self, editionData, rec, itemContributors):
max_part_number = max(int(item.split('|')[0]) for item in rec.has_part)
number_of_parts = max(
max((int(part.split('|')[0]) for part in rec.has_part if part.split('|')[0].isdigit()), default=0),
len(rec.has_part)
)

startPos = len(editionData['items']) - 1
editionData['items'].extend([None] * max_part_number)
editionData['items'].extend([None] * number_of_parts)

for item in rec.has_part:
no, uri, source, linkType, flags = tuple(item.split('|'))
Expand Down
11 changes: 5 additions & 6 deletions processes/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ def __init__(self, *args):

self.createRedisClient()

# self.createElasticConnection()
# self.createElasticSearchIngestPipeline()
# self.createElasticSearchIndex()
self.createElasticConnection()
self.createElasticSearchIngestPipeline()
self.createElasticSearchIndex()

def runProcess(self):
try:
Expand All @@ -55,7 +55,6 @@ def cluster_records(self, full=False, start_datetime=None):
.filter(Record.cluster_status == False)
.filter(Record.source != 'oclcClassify')
.filter(Record.source != 'oclcCatalog')
.filter(Record.uuid == '2ddb63df-e4f5-479a-ad95-c03702c577b2')
)

if not full:
Expand Down Expand Up @@ -84,7 +83,7 @@ def cluster_records(self, full=False, start_datetime=None):
raise e

if len(works_to_index) >= self.CLUSTER_BATCH_SIZE:
# self.update_elastic_search(works_to_index, work_ids_to_delete)
self.update_elastic_search(works_to_index, work_ids_to_delete)
logger.info(f'Clustered {len(works_to_index)} works')
works_to_index = []

Expand All @@ -94,7 +93,7 @@ def cluster_records(self, full=False, start_datetime=None):
self.session.commit()

logger.info(f'Clustered {len(works_to_index)} works')
# self.update_elastic_search(works_to_index, work_ids_to_delete)
self.update_elastic_search(works_to_index, work_ids_to_delete)
self.delete_stale_works(work_ids_to_delete)

self.session.commit()
Expand Down

0 comments on commit 1973827

Please sign in to comment.