Skip to content

Commit

Permalink
Revert chunked cache blobs (#9965)
Browse files Browse the repository at this point in the history
  • Loading branch information
devongovett authored Sep 29, 2024
1 parent 5ecb131 commit d1f3ffe
Show file tree
Hide file tree
Showing 8 changed files with 73 additions and 518 deletions.
77 changes: 7 additions & 70 deletions packages/core/cache/src/FSCache.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import logger from '@parcel/logger';
import {serialize, deserialize, registerSerializableClass} from '@parcel/core';
// flowlint-next-line untyped-import:off
import packageJson from '../package.json';
import {WRITE_LIMIT_CHUNK} from './constants';

const pipeline: (Readable, Writable) => Promise<void> = promisify(
stream.pipeline,
Expand Down Expand Up @@ -82,88 +81,26 @@ export class FSCache implements Cache {
}
}

#getFilePath(key: string, index: number): string {
return path.join(this.dir, `${key}-${index}`);
}

async #unlinkChunks(key: string, index: number): Promise<void> {
try {
await this.fs.unlink(this.#getFilePath(key, index));
await this.#unlinkChunks(key, index + 1);
} catch (err) {
// If there's an error, no more chunks are left to delete
}
}

hasLargeBlob(key: string): Promise<boolean> {
return this.fs.exists(this.#getFilePath(key, 0));
return this.fs.exists(this._getCachePath(`${key}-large`));
}

async getLargeBlob(key: string): Promise<Buffer> {
const buffers: Promise<Buffer>[] = [];
for (let i = 0; await this.fs.exists(this.#getFilePath(key, i)); i += 1) {
const file: Promise<Buffer> = this.fs.readFile(this.#getFilePath(key, i));

buffers.push(file);
}

return Buffer.concat(await Promise.all(buffers));
getLargeBlob(key: string): Promise<Buffer> {
return this.fs.readFile(this._getCachePath(`${key}-large`));
}

async setLargeBlob(
key: string,
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void> {
const chunks = Math.ceil(contents.length / WRITE_LIMIT_CHUNK);

const writePromises: Promise<void>[] = [];
if (chunks === 1) {
// If there's one chunk, don't slice the content
writePromises.push(
this.fs.writeFile(this.#getFilePath(key, 0), contents, {
signal: options?.signal,
}),
);
} else {
for (let i = 0; i < chunks; i += 1) {
writePromises.push(
this.fs.writeFile(
this.#getFilePath(key, i),
typeof contents === 'string'
? contents.slice(
i * WRITE_LIMIT_CHUNK,
(i + 1) * WRITE_LIMIT_CHUNK,
)
: contents.subarray(
i * WRITE_LIMIT_CHUNK,
(i + 1) * WRITE_LIMIT_CHUNK,
),
{signal: options?.signal},
),
);
}
}

// If there's already a files following this chunk, it's old and should be removed
writePromises.push(this.#unlinkChunks(key, chunks));

await Promise.all(writePromises);
await this.fs.writeFile(this._getCachePath(`${key}-large`), contents, {
signal: options?.signal,
});
}

async deleteLargeBlob(key: string): Promise<void> {
const deletePromises: Promise<void>[] = [];

let i = 0;
let filePath = this.#getFilePath(key, i);

while (await this.fs.exists(filePath)) {
deletePromises.push(this.fs.rimraf(filePath));
i += 1;
filePath = this.#getFilePath(key, i);
}

await Promise.all(deletePromises);
await this.fs.rimraf(this._getCachePath(`${key}-large`));
}

async get<T>(key: string): Promise<?T> {
Expand Down
20 changes: 7 additions & 13 deletions packages/core/cache/src/LMDBCache.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@ import packageJson from '../package.json';
// $FlowFixMe
import lmdb from 'lmdb';

import {FSCache} from './FSCache';

const pipeline: (Readable, Writable) => Promise<void> = promisify(
stream.pipeline,
);
Expand All @@ -24,12 +22,10 @@ export class LMDBCache implements Cache {
dir: FilePath;
// $FlowFixMe
store: any;
fsCache: FSCache;

constructor(cacheDir: FilePath) {
this.fs = new NodeFS();
this.dir = cacheDir;
this.fsCache = new FSCache(this.fs, cacheDir);

this.store = lmdb.open(cacheDir, {
name: 'parcel-cache',
Expand Down Expand Up @@ -95,17 +91,13 @@ export class LMDBCache implements Cache {
return Promise.resolve(this.store.get(key));
}

#getFilePath(key: string, index: number): string {
return path.join(this.dir, `${key}-${index}`);
}

hasLargeBlob(key: string): Promise<boolean> {
return this.fs.exists(this.#getFilePath(key, 0));
return this.fs.exists(path.join(this.dir, key));
}

// eslint-disable-next-line require-await
async getLargeBlob(key: string): Promise<Buffer> {
return this.fsCache.getLargeBlob(key);
return this.fs.readFile(path.join(this.dir, key));
}

// eslint-disable-next-line require-await
Expand All @@ -114,11 +106,13 @@ export class LMDBCache implements Cache {
contents: Buffer | string,
options?: {|signal?: AbortSignal|},
): Promise<void> {
return this.fsCache.setLargeBlob(key, contents, options);
await this.fs.writeFile(path.join(this.dir, key), contents, {
signal: options?.signal,
});
}

deleteLargeBlob(key: string): Promise<void> {
return this.fsCache.deleteLargeBlob(key);
async deleteLargeBlob(key: string): Promise<void> {
await this.fs.rimraf(path.join(this.dir, key));
}

refresh(): void {
Expand Down
4 changes: 0 additions & 4 deletions packages/core/cache/src/constants.js

This file was deleted.

27 changes: 1 addition & 26 deletions packages/core/core/src/Parcel.js
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,6 @@ export default class Parcel {

let result = await this._build({startTime});

await this.#requestTracker.writeToCache();
await this._end();

if (result.type === 'buildFailure') {
Expand All @@ -184,31 +183,10 @@ export default class Parcel {
async _end(): Promise<void> {
this.#initialized = false;

await this.#requestTracker.writeToCache();
await this.#disposable.dispose();
}

async writeRequestTrackerToCache(): Promise<void> {
if (this.#watchQueue.getNumWaiting() === 0) {
// If there's no queued events, we are safe to write the request graph to disk
const abortController = new AbortController();

const unsubscribe = this.#watchQueue.subscribeToAdd(() => {
abortController.abort();
});

try {
await this.#requestTracker.writeToCache(abortController.signal);
} catch (err) {
if (!abortController.signal.aborted) {
// We expect abort errors if we interrupt the cache write
throw err;
}
}

unsubscribe();
}
}

async _startNextBuild(): Promise<?BuildEvent> {
this.#watchAbortController = new AbortController();
await this.#farm.callAllWorkers('clearConfigCache', []);
Expand All @@ -228,9 +206,6 @@ export default class Parcel {
if (!(err instanceof BuildAbortError)) {
throw err;
}
} finally {
// If the build passes or fails, we want to cache the request graph
await this.writeRequestTrackerToCache();
}
}

Expand Down
Loading

0 comments on commit d1f3ffe

Please sign in to comment.