Skip to content

Commit

Permalink
fix(api): blobify json
Browse files Browse the repository at this point in the history
  • Loading branch information
annelhote committed Apr 12, 2024
1 parent 965b5aa commit af2b3fe
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 6 deletions.
4 changes: 2 additions & 2 deletions client/src/utils/works.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const b64decode = (str) => {
const binaryString = window.atob(str);
const len = binaryString.length;
const bytes = new Uint8Array(new ArrayBuffer(len));
for (let i = 0; i < len; i++) {
for (let i = 0; i < len; i += 1) {
bytes[i] = binaryString.charCodeAt(i);
}
return bytes;
Expand Down Expand Up @@ -43,7 +43,7 @@ const getData = async (options) => {
const resAffiliations = await unzipData(affiliations);
const resDatasets = await unzipData(datasets);
const resPublications = await unzipData(publications);
const data = { affiliations: resAffiliations, datasets: resDatasets, publications: resPublications };
const data = { affiliations: resAffiliations.affiliations, datasets: resDatasets, publications: resPublications };
return data;
}
console.error(responseAffiliations);
Expand Down
48 changes: 44 additions & 4 deletions server/src/routes/works.routes.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,51 @@ const arrayBufferToBase64 = (buffer) => {
return btoa(binary);
};

function jsonToBlob(json) {
const textEncoder = new TextEncoder();
const seen = new WeakSet();

function processValue(value) {
if (seen.has(value)) {
throw new TypeError('Converting circular structure to JSON');
}

if (value && typeof value.toJSON === 'function') {
// eslint-disable-next-line no-param-reassign
value = value.toJSON();
}

if (typeof value === 'object' && value !== null) {
// seen.add(value);

const blobParts = [];
const entries = Array.isArray(value) ? value : Object.entries(value);
for (let i = 0; i < entries.length; i += 1) {
if (Array.isArray(value)) {
blobParts.push(processValue(entries[i]));
} else {
const [key, val] = entries[i];
blobParts.push(textEncoder.encode(`${JSON.stringify(key)}:`), processValue(val));
}
if (i !== entries.length - 1) blobParts.push(textEncoder.encode(','));
}

const startBracket = Array.isArray(value) ? '[' : '{';
const endBracket = Array.isArray(value) ? ']' : '}';
return new Blob([textEncoder.encode(startBracket), ...blobParts, textEncoder.encode(endBracket)]);
} if (typeof value === 'function' || typeof value === 'undefined') {
return textEncoder.encode('null');
}
// For primitives we just convert it to string and encode
return textEncoder.encode(JSON.stringify(value));
}

return processValue(json);
}

const compressData = async (result) => {
// Convert JSON to Stream
const stream = new Blob([JSON.stringify(result)], {
type: 'application/json',
}).stream();
const stream = jsonToBlob(result).stream();
const compressedReadableStream = stream.pipeThrough(
new CompressionStream('gzip'),
);
Expand Down Expand Up @@ -99,6 +139,7 @@ const getData = async ({ options, type }) => {
console.timeEnd(`5. Query ${queryId} | Facet ${options.affiliationStrings}`);
// Build and serialize response
console.time(`6. Query ${queryId} | Serialization ${options.affiliationStrings}`);
const resAffiliations = await compressData({ affiliations: uniqueAffiliations });
const resDatasets = await compressData({
publishers: datasetsPublishers,
results: datasets,
Expand All @@ -111,7 +152,6 @@ const getData = async ({ options, type }) => {
types: publicationsTypes,
years: publicationsYears,
});
const resAffiliations = await compressData(uniqueAffiliations);
const result = {
affiliations: resAffiliations,
datasets: resDatasets,
Expand Down

0 comments on commit af2b3fe

Please sign in to comment.