Skip to content

Commit

Permalink
fix(storage): pass 0x-prefixed storage slot instead of base 10
Browse files Browse the repository at this point in the history
  • Loading branch information
Rubilmax committed May 2, 2023
1 parent bc00fc3 commit 51b63f4
Show file tree
Hide file tree
Showing 8 changed files with 492 additions and 192 deletions.
435 changes: 350 additions & 85 deletions dist/index.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion dist/index.js.map

Large diffs are not rendered by default.

27 changes: 27 additions & 0 deletions dist/licenses.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1524,6 +1524,33 @@ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.


shell-quote
MIT
The MIT License

Copyright (c) 2013 James Halliday ([email protected])

Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:

The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

tmp
MIT
The MIT License (MIT)
Expand Down
6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@
"@octokit/core": "^4.2.0",
"@solidity-parser/parser": "^0.16.0",
"js-sha3": "^0.8.0",
"lodash": "^4.17.21"
"lodash": "^4.17.21",
"shell-quote": "^1.8.1"
},
"devDependencies": {
"@actions/exec": "^1.1.1",
Expand All @@ -58,6 +59,7 @@
"@types/jest": "^29.5.1",
"@types/lodash": "^4.14.194",
"@types/node": "^18.16.3",
"@types/shell-quote": "^1.7.1",
"@typescript-eslint/eslint-plugin": "^5.59.2",
"@typescript-eslint/parser": "^5.59.2",
"@vercel/ncc": "^0.36.1",
Expand Down Expand Up @@ -87,4 +89,4 @@
},
"verbose": true
}
}
}
2 changes: 1 addition & 1 deletion src/check.ts
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ const checkAddedStorageSlots = async (

const storage: { [slot: string]: string } = {};
for (const diff of sortDiffs(added)) {
const slot = diff.location.slot.toString();
const slot = "0x" + diff.location.slot.toString(16);

const memoized = storage[slot];
let value = memoized ?? (await provider.getStorageAt(address, slot));
Expand Down
199 changes: 97 additions & 102 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,16 @@ const provider = rpcUrl ? getDefaultProvider(rpcUrl) : undefined;
let srcContent: string;
let refCommitHash: string | undefined = undefined;

// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
// throw an uncaught exception.
process.on("uncaughtException", (error) => {
core.setFailed(error);
if (error.stack) core.debug(error.stack);

return process.exit();
});

async function run() {
core.startGroup(`Generate storage layout of contract "${contract}" using foundry forge`);
core.info(`Start forge process`);
Expand All @@ -48,130 +58,115 @@ async function run() {
const cmpLayout = parseLayout(cmpContent);
core.endGroup();

try {
const localReportPath = resolve(outReport);
fs.writeFileSync(localReportPath, cmpContent);
const localReportPath = resolve(outReport);
fs.writeFileSync(localReportPath, cmpContent);

core.startGroup(`Upload new report from "${localReportPath}" as artifact named "${outReport}"`);
const uploadResponse = await artifactClient.uploadArtifact(
outReport,
[localReportPath],
dirname(localReportPath),
{ continueOnError: false }
);
core.startGroup(`Upload new report from "${localReportPath}" as artifact named "${outReport}"`);
const uploadResponse = await artifactClient.uploadArtifact(
outReport,
[localReportPath],
dirname(localReportPath),
{ continueOnError: false }
);

if (uploadResponse.failedItems.length > 0)
throw Error("Failed to upload storage layout report.");
if (uploadResponse.failedItems.length > 0) throw Error("Failed to upload storage layout report.");

core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`);
} catch (error: any) {
return core.setFailed(error.message);
}
core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`);
core.endGroup();

// cannot use artifactClient because downloads are limited to uploads in the same workflow run
// cf. https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts#downloading-or-deleting-artifacts
let artifactId: number | null = null;
if (context.eventName === "pull_request") {
try {
core.startGroup(
`Searching artifact "${baseReport}" on repository "${repository}", on branch "${baseBranch}"`
);
// Note that the artifacts are returned in most recent first order.
for await (const res of octokit.paginate.iterator(octokit.rest.actions.listArtifactsForRepo, {
owner,
repo,
})) {
const artifact = res.data.find(
(artifact) => !artifact.expired && artifact.name === baseReport
);
if (!artifact) {
await new Promise((resolve) => setTimeout(resolve, 800)); // avoid reaching the API rate limit

continue;
}

artifactId = artifact.id;
refCommitHash = artifact.workflow_run?.head_sha;
core.info(
`Found artifact named "${baseReport}" with ID "${artifactId}" from commit "${refCommitHash}"`
);
break;
}
core.endGroup();

if (artifactId) {
core.startGroup(
`Searching artifact "${baseReport}" on repository "${repository}", on branch "${baseBranch}"`
`Downloading artifact "${baseReport}" of repository "${repository}" with ID "${artifactId}"`
);
// Note that the artifacts are returned in most recent first order.
for await (const res of octokit.paginate.iterator(octokit.rest.actions.listArtifactsForRepo, {
const res = await octokit.rest.actions.downloadArtifact({
owner,
repo,
})) {
const artifact = res.data.find(
(artifact) => !artifact.expired && artifact.name === baseReport
);
if (!artifact) {
await new Promise((resolve) => setTimeout(resolve, 800)); // avoid reaching the API rate limit

continue;
}

artifactId = artifact.id;
refCommitHash = artifact.workflow_run?.head_sha;
core.info(
`Found artifact named "${baseReport}" with ID "${artifactId}" from commit "${refCommitHash}"`
);
break;
artifact_id: artifactId,
archive_format: "zip",
});

// @ts-ignore data is unknown
const zip = new Zip(Buffer.from(res.data));
for (const entry of zip.getEntries()) {
core.info(`Loading storage layout report from "${entry.entryName}"`);
srcContent = zip.readAsText(entry);
}
core.endGroup();

if (artifactId) {
core.startGroup(
`Downloading artifact "${baseReport}" of repository "${repository}" with ID "${artifactId}"`
);
const res = await octokit.rest.actions.downloadArtifact({
owner,
repo,
artifact_id: artifactId,
archive_format: "zip",
});

// @ts-ignore data is unknown
const zip = new Zip(Buffer.from(res.data));
for (const entry of zip.getEntries()) {
core.info(`Loading storage layout report from "${entry.entryName}"`);
srcContent = zip.readAsText(entry);
}
core.endGroup();
} else return core.error(`No workflow run found with an artifact named "${baseReport}"`);
} catch (error: any) {
return core.setFailed(error.message);
}
} else throw Error(`No workflow run found with an artifact named "${baseReport}"`);
}

try {
core.info(`Mapping reference storage layout report`);
const srcLayout = parseLayout(srcContent);
core.endGroup();

core.startGroup("Check storage layout");
const diffs = await checkLayouts(srcLayout, cmpLayout, {
address,
provider,
checkRemovals: failOnRemoval,
});
core.info(`Mapping reference storage layout report`);
const srcLayout = parseLayout(srcContent);
core.endGroup();

if (diffs.length > 0) {
core.info(`Parse source code`);
const cmpDef = parseSource(contractAbs);

const formattedDiffs = diffs.map((diff) => {
const formattedDiff = formatDiff(cmpDef, diff);

const title = diffTitles[formattedDiff.type];
const level = diffLevels[formattedDiff.type] || "error";
core[level](formattedDiff.message, {
title,
file: cmpDef.path,
startLine: formattedDiff.loc.start.line,
endLine: formattedDiff.loc.end.line,
startColumn: formattedDiff.loc.start.column,
endColumn: formattedDiff.loc.end.column,
});

return formattedDiff;
core.startGroup("Check storage layout");
const diffs = await checkLayouts(srcLayout, cmpLayout, {
address,
provider,
checkRemovals: failOnRemoval,
});

if (diffs.length > 0) {
core.info(`Parse source code`);
const cmpDef = parseSource(contractAbs);

const formattedDiffs = diffs.map((diff) => {
const formattedDiff = formatDiff(cmpDef, diff);

const title = diffTitles[formattedDiff.type];
const level = diffLevels[formattedDiff.type] || "error";
core[level](formattedDiff.message, {
title,
file: cmpDef.path,
startLine: formattedDiff.loc.start.line,
endLine: formattedDiff.loc.end.line,
startColumn: formattedDiff.loc.start.column,
endColumn: formattedDiff.loc.end.column,
});

if (
formattedDiffs.filter((diff) => diffLevels[diff.type] === "error").length > 0 ||
(failOnRemoval &&
formattedDiffs.filter((diff) => diff.type === StorageLayoutDiffType.VARIABLE_REMOVED)
.length > 0)
)
return core.setFailed(
"Unsafe storage layout changes detected. Please see above for details."
);
}
return formattedDiff;
});

core.endGroup();
} catch (error: any) {
core.setFailed(error.message);
if (
formattedDiffs.filter((diff) => diffLevels[diff.type] === "error").length > 0 ||
(failOnRemoval &&
formattedDiffs.filter((diff) => diff.type === StorageLayoutDiffType.VARIABLE_REMOVED)
.length > 0)
)
throw Error("Unsafe storage layout changes detected. Please see above for details.");
}

core.endGroup();
}

run();
3 changes: 2 additions & 1 deletion src/input.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { execSync } from "child_process";
import fs from "fs";
import { quote } from "shell-quote";

import * as parser from "@solidity-parser/parser";
import { ContractDefinition } from "@solidity-parser/parser/src/ast-types";
Expand All @@ -19,7 +20,7 @@ const exactify = (variable: StorageVariable): StorageVariableExact => ({
});

export const createLayout = (contract: string, cwd = ".") => {
return execSync(`forge inspect ${contract} storage-layout`, {
return execSync(quote(["forge", "inspect", contract, "storage-layout"]), {
encoding: "utf-8",
cwd,
});
Expand Down
10 changes: 10 additions & 0 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1240,6 +1240,11 @@
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.13.tgz#da4bfd73f49bd541d28920ab0e2bf0ee80f71c91"
integrity sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==

"@types/shell-quote@^1.7.1":
version "1.7.1"
resolved "https://registry.yarnpkg.com/@types/shell-quote/-/shell-quote-1.7.1.tgz#2d059091214a02c29f003f591032172b2aff77e8"
integrity sha512-SWZ2Nom1pkyXCDohRSrkSKvDh8QOG9RfAsrt5/NsPQC4UQJ55eG0qClA40I+Gkez4KTQ0uDUT8ELRXThf3J5jw==

"@types/stack-utils@^2.0.0":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c"
Expand Down Expand Up @@ -3948,6 +3953,11 @@ shebang-regex@^3.0.0:
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==

shell-quote@^1.8.1:
version "1.8.1"
resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680"
integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==

side-channel@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf"
Expand Down

0 comments on commit 51b63f4

Please sign in to comment.