diff --git a/.github/workflows/efps.yml b/.github/workflows/efps.yml new file mode 100644 index 00000000000..d46e9c6d557 --- /dev/null +++ b/.github/workflows/efps.yml @@ -0,0 +1,95 @@ +name: eFPS Test +on: + pull_request: + workflow_dispatch: + inputs: + reference_tag: + description: "Reference tag for comparison" + required: true + default: "latest" + enable_profiler: + description: "Enable profiler" + required: true + type: boolean + default: false + +jobs: + install: + timeout-minutes: 30 + runs-on: ubuntu-latest + env: + TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} + TURBO_TEAM: ${{ vars.TURBO_TEAM }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 18 + + - uses: pnpm/action-setup@v4 + name: Install pnpm + id: pnpm-install + with: + run_install: false + + - name: Get pnpm store directory + id: pnpm-cache + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT + + - name: Cache node modules + id: cache-node-modules + uses: actions/cache@v4 + env: + cache-name: cache-node-modules + with: + path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ env.cache-name }}-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + v1-${{ runner.os }}-pnpm-store-${{ env.cache-name }}- + v1-${{ runner.os }}-pnpm-store- + v1-${{ runner.os }}- + + - name: Install project dependencies + run: pnpm install + + - name: Store Playwright's Version + run: | + PLAYWRIGHT_VERSION=$(npx playwright --version | sed 's/Version //') + echo "Playwright's Version: $PLAYWRIGHT_VERSION" + echo "PLAYWRIGHT_VERSION=$PLAYWRIGHT_VERSION" >> $GITHUB_ENV + + - name: Cache Playwright Browsers for Playwright's Version + id: cache-playwright-browsers + uses: actions/cache@v4 + with: + path: ~/.cache/ms-playwright + key: playwright-browsers-${{ env.PLAYWRIGHT_VERSION }} + + - name: Install Playwright Browsers + if: steps.cache-playwright-browsers.outputs.cache-hit != 'true' + run: npx playwright install --with-deps + + - name: Run eFPS tests + env: + VITE_PERF_EFPS_PROJECT_ID: ${{ secrets.PERF_EFPS_PROJECT_ID }} + VITE_PERF_EFPS_DATASET: ${{ secrets.PERF_EFPS_DATASET }} + PERF_EFPS_SANITY_TOKEN: ${{ secrets.PERF_EFPS_SANITY_TOKEN }} + REFERENCE_TAG: ${{ github.event.inputs.reference_tag || 'latest' }} + ENABLE_PROFILER: ${{ github.event.inputs.enable_profiler || false }} + run: pnpm efps:test + + - name: PR comment with report + uses: thollander/actions-comment-pull-request@fabd468d3a1a0b97feee5f6b9e499eab0dd903f6 # v2 + if: ${{ github.event_name == 'pull_request' }} + with: + comment_tag: "efps-report" + filePath: ${{ github.workspace }}/perf/efps/results/benchmark-results.md + + - uses: actions/upload-artifact@v3 + if: always() + with: + name: efps-report + path: perf/efps/results + retention-days: 30 diff --git a/dev/embedded-studio/package.json b/dev/embedded-studio/package.json index 1d245fd10e5..e60a14dff7c 100644 --- a/dev/embedded-studio/package.json +++ b/dev/embedded-studio/package.json @@ -3,7 +3,7 @@ "version": "3.59.0", "private": true, "scripts": { - "build": "tsc && vite build", + "build": "tsc && vite build && sanity manifest extract", "dev": "vite", "preview": "vite preview" }, diff --git a/dev/embedded-studio/sanity.cli.ts b/dev/embedded-studio/sanity.cli.ts new file mode 100644 index 00000000000..fac247bf8cf --- /dev/null +++ b/dev/embedded-studio/sanity.cli.ts @@ -0,0 +1,8 @@ +import {defineCliConfig} from 'sanity/cli' + +export default defineCliConfig({ + api: { + projectId: 'ppsg7ml5', + dataset: 'test', + }, +}) diff --git a/dev/embedded-studio/sanity.config.ts b/dev/embedded-studio/sanity.config.ts new file mode 100644 index 00000000000..c49026536a2 --- /dev/null +++ b/dev/embedded-studio/sanity.config.ts @@ -0,0 +1,34 @@ +import {defineConfig, defineType} from 'sanity' +import {structureTool} from 'sanity/structure' + +const BLOG_POST_SCHEMA = defineType({ + type: 'document', + name: 'blogPost', + title: 'Blog post', + fields: [ + { + type: 'string', + name: 'title', + title: 'Title', + }, + ], +}) + +export const SCHEMA_TYPES = [BLOG_POST_SCHEMA] + +export default defineConfig({ + projectId: 'ppsg7ml5', + dataset: 'test', + + document: { + unstable_comments: { + enabled: true, + }, + }, + + schema: { + types: SCHEMA_TYPES, + }, + + plugins: [structureTool()], +}) diff --git a/dev/embedded-studio/src/App.tsx b/dev/embedded-studio/src/App.tsx index d07913d0206..7ee792a216b 100644 --- a/dev/embedded-studio/src/App.tsx +++ b/dev/embedded-studio/src/App.tsx @@ -1,46 +1,8 @@ import {Button, Card, Flex, studioTheme, ThemeProvider, usePrefersDark} from '@sanity/ui' import {useCallback, useMemo, useState} from 'react' -import { - defineConfig, - defineType, - Studio, - StudioLayout, - StudioProvider, - type StudioThemeColorSchemeKey, -} from 'sanity' -import {structureTool} from 'sanity/structure' +import {Studio, StudioLayout, StudioProvider, type StudioThemeColorSchemeKey} from 'sanity' -const BLOG_POST_SCHEMA = defineType({ - type: 'document', - name: 'blogPost', - title: 'Blog post', - fields: [ - { - type: 'string', - name: 'title', - title: 'Title', - }, - ], -}) - -const SCHEMA_TYPES = [BLOG_POST_SCHEMA] - -const config = defineConfig({ - projectId: 'ppsg7ml5', - dataset: 'test', - - document: { - unstable_comments: { - enabled: true, - }, - }, - - schema: { - types: SCHEMA_TYPES, - }, - - plugins: [structureTool()], -}) +import config from '../sanity.config' export function App() { const prefersDark = usePrefersDark() diff --git a/dev/starter-next-studio/.gitignore b/dev/starter-next-studio/.gitignore index a680367ef56..f0f5197150f 100644 --- a/dev/starter-next-studio/.gitignore +++ b/dev/starter-next-studio/.gitignore @@ -1 +1,4 @@ .next + +public/static/*.create-schema.json +public/static/create-manifest.json diff --git a/dev/starter-next-studio/components/Studio.tsx b/dev/starter-next-studio/components/Studio.tsx index 00557ec43c7..11aa0e3ce65 100644 --- a/dev/starter-next-studio/components/Studio.tsx +++ b/dev/starter-next-studio/components/Studio.tsx @@ -1,41 +1,13 @@ -import {useMemo} from 'react' -import {defineConfig, Studio} from 'sanity' -import {structureTool} from 'sanity/structure' +import {Studio} from 'sanity' + +import config from '../sanity.config' const wrapperStyles = {height: '100vh', width: '100vw'} export default function StudioRoot({basePath}: {basePath: string}) { - const config = useMemo( - () => - defineConfig({ - basePath, - plugins: [structureTool()], - title: 'Next.js Starter', - projectId: 'ppsg7ml5', - dataset: 'test', - schema: { - types: [ - { - type: 'document', - name: 'post', - title: 'Post', - fields: [ - { - type: 'string', - name: 'title', - title: 'Title', - }, - ], - }, - ], - }, - }), - [basePath], - ) - return (
- +
) } diff --git a/dev/starter-next-studio/package.json b/dev/starter-next-studio/package.json index 6db5bcb1008..c92faad7acf 100644 --- a/dev/starter-next-studio/package.json +++ b/dev/starter-next-studio/package.json @@ -5,7 +5,7 @@ "license": "MIT", "author": "Sanity.io ", "scripts": { - "build": "next build", + "build": "sanity manifest extract --path public/static && next build", "dev": "next dev", "start": "next start" }, diff --git a/dev/starter-next-studio/sanity.cli.ts b/dev/starter-next-studio/sanity.cli.ts new file mode 100644 index 00000000000..fac247bf8cf --- /dev/null +++ b/dev/starter-next-studio/sanity.cli.ts @@ -0,0 +1,8 @@ +import {defineCliConfig} from 'sanity/cli' + +export default defineCliConfig({ + api: { + projectId: 'ppsg7ml5', + dataset: 'test', + }, +}) diff --git a/dev/starter-next-studio/sanity.config.ts b/dev/starter-next-studio/sanity.config.ts new file mode 100644 index 00000000000..102cbb15f94 --- /dev/null +++ b/dev/starter-next-studio/sanity.config.ts @@ -0,0 +1,25 @@ +import {defineConfig} from 'sanity' +import {structureTool} from 'sanity/structure' + +export default defineConfig({ + plugins: [structureTool()], + title: 'Next.js Starter', + projectId: 'ppsg7ml5', + dataset: 'test', + schema: { + types: [ + { + type: 'document', + name: 'post', + title: 'Post', + fields: [ + { + type: 'string', + name: 'title', + title: 'Title', + }, + ], + }, + ], + }, +}) diff --git a/package.json b/package.json index e133a523e10..c2bba646e6b 100644 --- a/package.json +++ b/package.json @@ -54,6 +54,7 @@ "e2e:setup": "node -r dotenv-flow/config -r esbuild-register scripts/e2e/setup", "e2e:start": "pnpm --filter studio-e2e-testing preview", "etl": "node -r dotenv-flow/config -r esbuild-register scripts/etl", + "efps:test": "cd perf/efps && pnpm test", "example:blog-studio": "cd examples/blog-studio && pnpm start", "example:clean-studio": "cd examples/blog-studio && pnpm start", "example:ecommerce-studio": "cd examples/blog-studio && pnpm start", @@ -115,7 +116,6 @@ "@sanity/tsdoc": "1.0.105", "@sanity/ui": "^2.8.9", "@sanity/uuid": "^3.0.2", - "@types/glob": "^7.2.0", "@types/lodash": "^4.17.7", "@types/node": "^18.19.8", "@types/react": "^18.3.5", @@ -147,7 +147,7 @@ "eslint-plugin-unicorn": "^52.0.0", "eslint-plugin-unused-imports": "^3.2.0", "execa": "^2.0.0", - "glob": "^7.2.0", + "glob": "^10.4.0", "globby": "^10.0.0", "husky": "^9.0.11", "jest": "^29.7.0", diff --git a/packages/@sanity/cli/src/util/noSuchCommandText.ts b/packages/@sanity/cli/src/util/noSuchCommandText.ts index 07b94d6b1ce..3c35f5dd65d 100644 --- a/packages/@sanity/cli/src/util/noSuchCommandText.ts +++ b/packages/@sanity/cli/src/util/noSuchCommandText.ts @@ -18,6 +18,7 @@ const coreCommands = [ 'graphql', 'hook', 'migration', + 'manifest', 'preview', 'schema', 'start', diff --git a/packages/sanity/package.config.ts b/packages/sanity/package.config.ts index 08aa9c96444..a2e7757e57a 100644 --- a/packages/sanity/package.config.ts +++ b/packages/sanity/package.config.ts @@ -41,6 +41,11 @@ export default defineConfig({ require: './lib/_internal/cli/threads/extractSchema.js', runtime: 'node', }, + { + source: './src/_internal/cli/threads/extractManifest.ts', + require: './lib/_internal/cli/threads/extractManifest.js', + runtime: 'node', + }, ], extract: { diff --git a/packages/sanity/src/_internal/cli/actions/build/buildAction.ts b/packages/sanity/src/_internal/cli/actions/build/buildAction.ts index 62b9b8fe8d2..24cb65957bc 100644 --- a/packages/sanity/src/_internal/cli/actions/build/buildAction.ts +++ b/packages/sanity/src/_internal/cli/actions/build/buildAction.ts @@ -187,6 +187,7 @@ export default async function buildSanityStudio( spin.text = `Build Sanity Studio (${buildDuration.toFixed()}ms)` spin.succeed() + trace.complete() if (flags.stats) { output.print('\nLargest module files:') diff --git a/packages/sanity/src/_internal/cli/actions/deploy/deployAction.ts b/packages/sanity/src/_internal/cli/actions/deploy/deployAction.ts index 364aec55735..042fb803190 100644 --- a/packages/sanity/src/_internal/cli/actions/deploy/deployAction.ts +++ b/packages/sanity/src/_internal/cli/actions/deploy/deployAction.ts @@ -7,6 +7,7 @@ import tar from 'tar-fs' import {shouldAutoUpdate} from '../../util/shouldAutoUpdate' import buildSanityStudio, {type BuildSanityStudioCommandFlags} from '../build/buildAction' +import {extractManifestSafe} from '../manifest/extractManifestAction' import { checkDir, createDeployment, @@ -101,16 +102,25 @@ export default async function deployStudioAction( // Always build the project, unless --no-build is passed const shouldBuild = flags.build if (shouldBuild) { - const buildArgs = [customSourceDir].filter(Boolean) - const {didCompile} = await buildSanityStudio( - {...args, extOptions: flags, argsWithoutOptions: buildArgs}, - context, - {basePath: '/'}, - ) + const buildArgs = { + ...args, + extOptions: flags, + argsWithoutOptions: [customSourceDir].filter(Boolean), + } + const {didCompile} = await buildSanityStudio(buildArgs, context, {basePath: '/'}) if (!didCompile) { return } + + await extractManifestSafe( + { + ...buildArgs, + extOptions: {}, + extraArguments: [], + }, + context, + ) } // Ensure that the directory exists, is a directory and seems to have valid content diff --git a/packages/sanity/src/_internal/cli/actions/manifest/extractManifestAction.ts b/packages/sanity/src/_internal/cli/actions/manifest/extractManifestAction.ts new file mode 100644 index 00000000000..509110cf775 --- /dev/null +++ b/packages/sanity/src/_internal/cli/actions/manifest/extractManifestAction.ts @@ -0,0 +1,182 @@ +import {createHash} from 'node:crypto' +import {mkdir, writeFile} from 'node:fs/promises' +import {dirname, join, resolve} from 'node:path' +import {Worker} from 'node:worker_threads' + +import {type CliCommandArguments, type CliCommandContext} from '@sanity/cli' +import {minutesToMilliseconds} from 'date-fns' +import readPkgUp from 'read-pkg-up' + +import { + type CreateManifest, + type CreateWorkspaceManifest, + type ManifestWorkspaceFile, +} from '../../../manifest/manifestTypes' +import {type ExtractManifestWorkerData} from '../../threads/extractManifest' +import {getTimer} from '../../util/timing' + +const MANIFEST_FILENAME = 'create-manifest.json' +const SCHEMA_FILENAME_SUFFIX = '.create-schema.json' + +/** Escape-hatch env flags to change action behavior */ +const FEATURE_ENABLED_ENV_NAME = 'SANITY_CLI_EXTRACT_MANIFEST_ENABLED' +const EXTRACT_MANIFEST_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME] !== 'false' +const EXTRACT_MANIFEST_LOG_ERRORS = process.env.SANITY_CLI_EXTRACT_MANIFEST_LOG_ERRORS === 'true' + +const CREATE_TIMER = 'create-manifest' + +const EXTRACT_TASK_TIMEOUT_MS = minutesToMilliseconds(2) + +const EXTRACT_FAILURE_MESSAGE = + "Couldn't extract manifest file. Sanity Create will not be available for the studio.\n" + + `Disable this message with ${FEATURE_ENABLED_ENV_NAME}=false` + +interface ExtractFlags { + path?: string +} + +/** + * This function will never throw. + * @returns `undefined` if extract succeeded - caught error if it failed + */ +export async function extractManifestSafe( + args: CliCommandArguments, + context: CliCommandContext, +): Promise { + if (!EXTRACT_MANIFEST_ENABLED) { + return undefined + } + + try { + await extractManifest(args, context) + return undefined + } catch (err) { + if (EXTRACT_MANIFEST_LOG_ERRORS) { + context.output.error(err) + } + return err + } +} + +async function extractManifest( + args: CliCommandArguments, + context: CliCommandContext, +): Promise { + const {output, workDir} = context + + const flags = args.extOptions + const defaultOutputDir = resolve(join(workDir, 'dist')) + + const outputDir = resolve(defaultOutputDir) + const defaultStaticPath = join(outputDir, 'static') + + const staticPath = flags.path ?? defaultStaticPath + + const path = join(staticPath, MANIFEST_FILENAME) + + const rootPkgPath = readPkgUp.sync({cwd: __dirname})?.path + if (!rootPkgPath) { + throw new Error('Could not find root directory for `sanity` package') + } + + const timer = getTimer() + timer.start(CREATE_TIMER) + const spinner = output.spinner({}).start('Extracting manifest') + + try { + const workspaceManifests = await getWorkspaceManifests({rootPkgPath, workDir}) + await mkdir(staticPath, {recursive: true}) + + const workspaceFiles = await writeWorkspaceFiles(workspaceManifests, staticPath) + + const manifest: CreateManifest = { + version: 1, + createdAt: new Date().toISOString(), + workspaces: workspaceFiles, + } + + await writeFile(path, JSON.stringify(manifest, null, 2)) + const manifestDuration = timer.end(CREATE_TIMER) + + spinner.succeed(`Extracted manifest (${manifestDuration.toFixed()}ms)`) + } catch (err) { + spinner.info(EXTRACT_FAILURE_MESSAGE) + throw err + } +} + +async function getWorkspaceManifests({ + rootPkgPath, + workDir, +}: { + rootPkgPath: string + workDir: string +}): Promise { + const workerPath = join( + dirname(rootPkgPath), + 'lib', + '_internal', + 'cli', + 'threads', + 'extractManifest.js', + ) + + const worker = new Worker(workerPath, { + workerData: {workDir} satisfies ExtractManifestWorkerData, + // eslint-disable-next-line no-process-env + env: process.env, + }) + + let timeout = false + const timeoutId = setTimeout(() => { + timeout = true + worker.terminate() + }, EXTRACT_TASK_TIMEOUT_MS) + + try { + return await new Promise((resolveWorkspaces, reject) => { + const buffer: CreateWorkspaceManifest[] = [] + worker.addListener('message', (message) => buffer.push(message)) + worker.addListener('exit', (exitCode) => { + if (exitCode === 0) { + resolveWorkspaces(buffer) + } else if (timeout) { + reject(new Error(`Extract manifest was aborted after ${EXTRACT_TASK_TIMEOUT_MS}ms`)) + } + }) + worker.addListener('error', reject) + }) + } finally { + clearTimeout(timeoutId) + } +} + +function writeWorkspaceFiles( + manifestWorkspaces: CreateWorkspaceManifest[], + staticPath: string, +): Promise { + const output = manifestWorkspaces.reduce[]>( + (workspaces, workspace) => { + return [...workspaces, writeWorkspaceSchemaFile(workspace, staticPath)] + }, + [], + ) + return Promise.all(output) +} + +async function writeWorkspaceSchemaFile( + workspace: CreateWorkspaceManifest, + staticPath: string, +): Promise { + const schemaString = JSON.stringify(workspace.schema, null, 2) + const hash = createHash('sha1').update(schemaString).digest('hex') + const filename = `${hash.slice(0, 8)}${SCHEMA_FILENAME_SUFFIX}` + + // workspaces with identical schemas will overwrite each others schema file. This is ok, since they are identical and can be shared + await writeFile(join(staticPath, filename), schemaString) + + return { + ...workspace, + schema: filename, + } +} diff --git a/packages/sanity/src/_internal/cli/actions/validation/validateAction.ts b/packages/sanity/src/_internal/cli/actions/validation/validateAction.ts index 175ae43c73b..6f343b9ab01 100644 --- a/packages/sanity/src/_internal/cli/actions/validation/validateAction.ts +++ b/packages/sanity/src/_internal/cli/actions/validation/validateAction.ts @@ -1,7 +1,12 @@ import fs from 'node:fs' import path from 'node:path' -import {type CliCommandArguments, type CliCommandContext, type CliOutputter} from '@sanity/cli' +import { + type CliCommandArguments, + type CliCommandContext, + type CliConfig, + type CliOutputter, +} from '@sanity/cli' import {type ClientConfig} from '@sanity/client' import chalk from 'chalk' import logSymbols from 'log-symbols' @@ -31,7 +36,7 @@ export type BuiltInValidationReporter = (options: { export default async function validateAction( args: CliCommandArguments, - {apiClient, workDir, output, prompt}: CliCommandContext, + {apiClient, workDir, output, cliConfig, prompt}: CliCommandContext, ): Promise { const flags = args.extOptions const unattendedMode = Boolean(flags.yes || flags.y) @@ -160,6 +165,7 @@ export default async function validateAction( return reporter({output, worker, flags}) }, + studioHost: (cliConfig as CliConfig)?.studioHost, }) process.exitCode = overallLevel === 'error' ? 1 : 0 diff --git a/packages/sanity/src/_internal/cli/actions/validation/validateDocuments.ts b/packages/sanity/src/_internal/cli/actions/validation/validateDocuments.ts index aa2be78083d..4447c5b7d30 100644 --- a/packages/sanity/src/_internal/cli/actions/validation/validateDocuments.ts +++ b/packages/sanity/src/_internal/cli/actions/validation/validateDocuments.ts @@ -23,6 +23,7 @@ export interface ValidateDocumentsOptions { maxCustomValidationConcurrency?: number maxFetchConcurrency?: number reporter?: (worker: WorkerChannelReceiver) => TReturn + studioHost?: string } export interface DocumentValidationResult { @@ -102,6 +103,7 @@ export function validateDocuments(options: ValidateDocumentsOptions): unknown { ndjsonFilePath, maxCustomValidationConcurrency, maxFetchConcurrency, + studioHost: options.studioHost, } satisfies ValidateDocumentsWorkerData, // eslint-disable-next-line no-process-env env: process.env, diff --git a/packages/sanity/src/_internal/cli/commands/index.ts b/packages/sanity/src/_internal/cli/commands/index.ts index e27daee4cce..1a6801e8a82 100644 --- a/packages/sanity/src/_internal/cli/commands/index.ts +++ b/packages/sanity/src/_internal/cli/commands/index.ts @@ -41,6 +41,8 @@ import hookGroup from './hook/hookGroup' import listHookLogsCommand from './hook/listHookLogsCommand' import listHooksCommand from './hook/listHooksCommand' import printHookAttemptCommand from './hook/printHookAttemptCommand' +import extractManifestCommand from './manifest/extractManifestCommand' +import manifestGroup from './manifest/manifestGroup' import createMigrationCommand from './migration/createMigrationCommand' import listMigrationsCommand from './migration/listMigrationsCommand' import migrationGroup from './migration/migrationGroup' @@ -110,6 +112,8 @@ const commands: (CliCommandDefinition | CliCommandGroupDefinition)[] = [ previewCommand, uninstallCommand, execCommand, + manifestGroup, + extractManifestCommand, ] /** diff --git a/packages/sanity/src/_internal/cli/commands/manifest/extractManifestCommand.ts b/packages/sanity/src/_internal/cli/commands/manifest/extractManifestCommand.ts new file mode 100644 index 00000000000..7422524e2a4 --- /dev/null +++ b/packages/sanity/src/_internal/cli/commands/manifest/extractManifestCommand.ts @@ -0,0 +1,35 @@ +import {type CliCommandDefinition} from '@sanity/cli' + +const description = 'Extracts the studio configuration as one or more JSON manifest files.' + +const helpText = ` +**Note**: This command is experimental and subject to change. It is currently intended for use with Create only. + +Options + --path Optional path to specify destination directory of the manifest files. Default: /dist/static + +Examples + # Extracts manifests + sanity manifest extract + + # Extracts manifests into /public/static + sanity manifest extract --path /public/static +` + +const extractManifestCommand: CliCommandDefinition = { + name: 'extract', + group: 'manifest', + signature: '', + description, + helpText, + action: async (args, context) => { + const {extractManifestSafe} = await import('../../actions/manifest/extractManifestAction') + const extractError = await extractManifestSafe(args, context) + if (extractError) { + throw extractError + } + return extractError + }, +} + +export default extractManifestCommand diff --git a/packages/sanity/src/_internal/cli/commands/manifest/manifestGroup.ts b/packages/sanity/src/_internal/cli/commands/manifest/manifestGroup.ts new file mode 100644 index 00000000000..ba086d91672 --- /dev/null +++ b/packages/sanity/src/_internal/cli/commands/manifest/manifestGroup.ts @@ -0,0 +1,6 @@ +export default { + name: 'manifest', + signature: '[COMMAND]', + isGroupRoot: true, + description: 'Interacts with the studio configuration.', +} diff --git a/packages/sanity/src/_internal/cli/threads/__tests__/validateDocuments.test.ts b/packages/sanity/src/_internal/cli/threads/__tests__/validateDocuments.test.ts index dcfac4c9950..c82178e4383 100644 --- a/packages/sanity/src/_internal/cli/threads/__tests__/validateDocuments.test.ts +++ b/packages/sanity/src/_internal/cli/threads/__tests__/validateDocuments.test.ts @@ -3,7 +3,7 @@ import path from 'node:path' import {Worker} from 'node:worker_threads' import {afterAll, beforeAll, describe, expect, it, jest} from '@jest/globals' -import {type SanityDocument, type SanityProject} from '@sanity/client' +import {type SanityDocument} from '@sanity/client' import {evaluate, parse} from 'groq-js' import {getMonorepoAliases} from '../../server/sanityMonorepo' @@ -125,14 +125,6 @@ describe('validateDocuments', () => { } switch (resource) { - case 'projects': { - json({ - studioHost: 'https://example.sanity.studio', - metadata: {externalStudioHost: localhost}, - } satisfies Partial) - return - } - case 'data': { const [method] = rest switch (method) { @@ -195,6 +187,7 @@ describe('validateDocuments', () => { useCdn: true, useProjectHostname: false, }, + studioHost: localhost, } const filepath = require.resolve('../validateDocuments') diff --git a/packages/sanity/src/_internal/cli/threads/extractManifest.ts b/packages/sanity/src/_internal/cli/threads/extractManifest.ts new file mode 100644 index 00000000000..e3080dce09f --- /dev/null +++ b/packages/sanity/src/_internal/cli/threads/extractManifest.ts @@ -0,0 +1,33 @@ +import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads' + +import {extractCreateWorkspaceManifest} from '../../manifest/extractWorkspaceManifest' +import {getStudioWorkspaces} from '../util/getStudioWorkspaces' +import {mockBrowserEnvironment} from '../util/mockBrowserEnvironment' + +/** @internal */ +export interface ExtractManifestWorkerData { + workDir: string +} + +if (isMainThread || !parentPort) { + throw new Error('This module must be run as a worker thread') +} + +const opts = _workerData as ExtractManifestWorkerData + +const cleanup = mockBrowserEnvironment(opts.workDir) + +async function main() { + try { + const workspaces = await getStudioWorkspaces({basePath: opts.workDir}) + + for (const workspace of workspaces) { + parentPort?.postMessage(extractCreateWorkspaceManifest(workspace)) + } + } finally { + parentPort?.close() + cleanup() + } +} + +main() diff --git a/packages/sanity/src/_internal/cli/threads/validateDocuments.ts b/packages/sanity/src/_internal/cli/threads/validateDocuments.ts index 41716d260fd..cefeae6dca1 100644 --- a/packages/sanity/src/_internal/cli/threads/validateDocuments.ts +++ b/packages/sanity/src/_internal/cli/threads/validateDocuments.ts @@ -44,6 +44,7 @@ export interface ValidateDocumentsWorkerData { level?: ValidationMarker['level'] maxCustomValidationConcurrency?: number maxFetchConcurrency?: number + studioHost?: string } /** @internal */ @@ -52,7 +53,6 @@ export type ValidationWorkerChannel = WorkerChannel<{ name: string projectId: string dataset: string - studioHost: string | null basePath: string }> loadedDocumentCount: WorkerChannelEvent<{documentCount: number}> @@ -81,6 +81,7 @@ const { level, maxCustomValidationConcurrency, maxFetchConcurrency, + studioHost, } = _workerData as ValidateDocumentsWorkerData if (isMainThread || !parentPort) { @@ -160,24 +161,14 @@ async function loadWorkspace() { requestTagPrefix: 'sanity.cli.validate', }).config({apiVersion: 'v2021-03-25'}) - let studioHost - try { - const project = await client.projects.getById(projectId || workspace.projectId) - studioHost = project.metadata.externalStudioHost || project.studioHost - } catch { - // no big deal if we fail to get the studio host - studioHost = null - } - report.event.loadedWorkspace({ projectId: workspace.projectId, dataset: workspace.dataset, name: workspace.name, - studioHost, basePath: workspace.basePath, }) - return {workspace, client, studioHost} + return {workspace, client} } async function downloadFromExport(client: SanityClient) { @@ -321,7 +312,7 @@ async function validateDocuments() { let cleanupDownloadedDocuments: (() => Promise) | undefined try { - const {client, workspace, studioHost} = await loadWorkspace() + const {client, workspace} = await loadWorkspace() const {documentIds, referencedIds, getDocuments, cleanup} = ndjsonFilePath ? await downloadFromFile(ndjsonFilePath) : await downloadFromExport(client) diff --git a/packages/sanity/src/_internal/manifest/extractWorkspaceManifest.ts b/packages/sanity/src/_internal/manifest/extractWorkspaceManifest.ts new file mode 100644 index 00000000000..427d4b83a7b --- /dev/null +++ b/packages/sanity/src/_internal/manifest/extractWorkspaceManifest.ts @@ -0,0 +1,502 @@ +import startCase from 'lodash/startCase' +import { + type ArraySchemaType, + type BlockDefinition, + type BooleanSchemaType, + ConcreteRuleClass, + createSchema, + type CrossDatasetReferenceSchemaType, + type FileSchemaType, + type MultiFieldSet, + type NumberSchemaType, + type ObjectField, + type ObjectSchemaType, + type ReferenceSchemaType, + type Rule, + type RuleSpec, + type Schema, + type SchemaType, + type SchemaValidationValue, + type SpanSchemaType, + type StringSchemaType, + type Workspace, +} from 'sanity' + +import { + getCustomFields, + isCrossDatasetReference, + isCustomized, + isDefined, + isPrimitive, + isRecord, + isReference, + isString, + isType, +} from './manifestTypeHelpers' +import { + type CreateWorkspaceManifest, + type ManifestField, + type ManifestFieldset, + type ManifestSchemaType, + type ManifestSerializable, + type ManifestTitledValue, + type ManifestValidationGroup, + type ManifestValidationRule, +} from './manifestTypes' + +interface Context { + schema: Schema +} + +type SchemaTypeKey = + | keyof ArraySchemaType + | keyof BooleanSchemaType + | keyof FileSchemaType + | keyof NumberSchemaType + | keyof ObjectSchemaType + | keyof StringSchemaType + | keyof ReferenceSchemaType + | keyof BlockDefinition + | 'group' // we strip this from fields + +type Validation = {validation: ManifestValidationGroup[]} | Record +type ObjectFields = {fields: ManifestField[]} | Record +type SerializableProp = ManifestSerializable | ManifestSerializable[] | undefined +type ManifestValidationFlag = ManifestValidationRule['flag'] +type ValidationRuleTransformer = (rule: RuleSpec) => ManifestValidationRule | undefined + +const MAX_CUSTOM_PROPERTY_DEPTH = 5 +const INLINE_TYPES = ['document', 'object', 'image', 'file'] + +export function extractCreateWorkspaceManifest(workspace: Workspace): CreateWorkspaceManifest { + const serializedSchema = extractManifestSchemaTypes(workspace.schema) + + return { + name: workspace.name, + title: workspace.title, + subtitle: workspace.subtitle, + basePath: workspace.basePath, + dataset: workspace.dataset, + schema: serializedSchema, + } +} + +/** + * Extracts all serializable properties from userland schema types, + * so they best-effort can be used as definitions for Schema.compile +. */ +export function extractManifestSchemaTypes(schema: Schema): ManifestSchemaType[] { + const typeNames = schema.getTypeNames() + const context = {schema} + + const studioDefaultTypeNames = createSchema({name: 'default', types: []}).getTypeNames() + + return typeNames + .filter((typeName) => !studioDefaultTypeNames.includes(typeName)) + .map((typeName) => schema.get(typeName)) + .filter((type): type is SchemaType => typeof type !== 'undefined') + .map((type) => transformType(type, context)) +} + +function transformCommonTypeFields( + type: SchemaType & {fieldset?: string}, + typeName: string, + context: Context, +): Omit { + const arrayProps = + typeName === 'array' && type.jsonType === 'array' ? transformArrayMember(type, context) : {} + + const referenceProps = isReference(type) ? transformReference(type) : {} + const crossDatasetRefProps = isCrossDatasetReference(type) + ? transformCrossDatasetReference(type) + : {} + + const objectFields: ObjectFields = + type.jsonType === 'object' && type.type && INLINE_TYPES.includes(typeName) && isCustomized(type) + ? { + fields: getCustomFields(type).map((objectField) => transformField(objectField, context)), + } + : {} + + return { + ...retainCustomTypeProps(type), + ...transformValidation(type.validation), + ...ensureString('description', type.description), + ...objectFields, + ...arrayProps, + ...referenceProps, + ...crossDatasetRefProps, + ...ensureConditional('readOnly', type.readOnly), + ...ensureConditional('hidden', type.hidden), + ...transformFieldsets(type), + // fieldset prop gets instrumented via getCustomFields + ...ensureString('fieldset', type.fieldset), + ...transformBlockType(type, context), + } +} + +function transformFieldsets( + type: SchemaType, +): {fieldsets: ManifestFieldset[]} | Record { + if (type.jsonType !== 'object') { + return {} + } + const fieldsets = type.fieldsets + ?.filter((fs): fs is MultiFieldSet => !fs.single) + .map((fs) => { + const options = isRecord(fs.options) ? {options: retainSerializableProps(fs.options)} : {} + return { + name: fs.name, + ...ensureCustomTitle(fs.name, fs.title), + ...ensureString('description', fs.description), + ...ensureConditional('readOnly', fs.readOnly), + ...ensureConditional('hidden', fs.hidden), + ...options, + } + }) + + return fieldsets?.length ? {fieldsets} : {} +} + +function transformType(type: SchemaType, context: Context): ManifestSchemaType { + const typeName = type.type ? type.type.name : type.jsonType + + return { + ...transformCommonTypeFields(type, typeName, context), + name: type.name, + type: typeName, + ...ensureCustomTitle(type.name, type.title), + } +} + +function retainCustomTypeProps(type: SchemaType): Record { + const manuallySerializedFields: SchemaTypeKey[] = [ + //explicitly added + 'name', + 'title', + 'description', + 'readOnly', + 'hidden', + 'validation', + 'fieldsets', + 'fields', + 'to', + 'of', + // not serialized + 'type', + 'jsonType', + '__experimental_actions', + '__experimental_formPreviewTitle', + '__experimental_omnisearch_visibility', + '__experimental_search', + 'components', + 'icon', + 'orderings', + 'preview', + 'groups', + //only exists on fields + 'group', + // we know about these, but let them be generically handled + // deprecated + // rows (from text) + // initialValue + // options + // crossDatasetReference props + ] + const typeWithoutManuallyHandledFields = Object.fromEntries( + Object.entries(type).filter( + ([key]) => !manuallySerializedFields.includes(key as unknown as SchemaTypeKey), + ), + ) + return retainSerializableProps(typeWithoutManuallyHandledFields) as Record< + string, + SerializableProp + > +} + +function retainSerializableProps(maybeSerializable: unknown, depth = 0): SerializableProp { + if (depth > MAX_CUSTOM_PROPERTY_DEPTH) { + return undefined + } + + if (!isDefined(maybeSerializable)) { + return undefined + } + + if (isPrimitive(maybeSerializable)) { + // cull empty strings + if (maybeSerializable === '') { + return undefined + } + return maybeSerializable + } + + // url-schemes ect.. + if (maybeSerializable instanceof RegExp) { + return maybeSerializable.toString() + } + + if (Array.isArray(maybeSerializable)) { + const arrayItems = maybeSerializable + .map((item) => retainSerializableProps(item, depth + 1)) + .filter((item): item is ManifestSerializable => isDefined(item)) + return arrayItems.length ? arrayItems : undefined + } + + if (isRecord(maybeSerializable)) { + const serializableEntries = Object.entries(maybeSerializable) + .map(([key, value]) => { + return [key, retainSerializableProps(value, depth + 1)] + }) + .filter(([, value]) => isDefined(value)) + return serializableEntries.length ? Object.fromEntries(serializableEntries) : undefined + } + + return undefined +} + +function transformField(field: ObjectField & {fieldset?: string}, context: Context): ManifestField { + const fieldType = field.type + const typeNameExists = !!context.schema.get(fieldType.name) + const typeName = typeNameExists ? fieldType.name : (fieldType.type?.name ?? fieldType.name) + return { + ...transformCommonTypeFields(fieldType, typeName, context), + name: field.name, + type: typeName, + ...ensureCustomTitle(field.name, fieldType.title), + // this prop gets added synthetically via getCustomFields + ...ensureString('fieldset', field.fieldset), + } +} + +function transformArrayMember( + arrayMember: ArraySchemaType, + context: Context, +): Pick { + return { + of: arrayMember.of.map((type) => { + const typeNameExists = !!context.schema.get(type.name) + const typeName = typeNameExists ? type.name : (type.type?.name ?? type.name) + return { + ...transformCommonTypeFields(type, typeName, context), + type: typeName, + ...(typeName === type.name ? {} : {name: type.name}), + ...ensureCustomTitle(type.name, type.title), + } + }), + } +} + +function transformReference(reference: ReferenceSchemaType): Pick { + return { + to: (reference.to ?? []).map((type) => { + return { + ...retainCustomTypeProps(type), + type: type.name, + } + }), + } +} + +function transformCrossDatasetReference( + reference: CrossDatasetReferenceSchemaType, +): Pick { + return { + to: (reference.to ?? []).map((crossDataset) => { + const preview = crossDataset.preview?.select + ? {preview: {select: crossDataset.preview.select}} + : {} + return { + type: crossDataset.type, + ...ensureCustomTitle(crossDataset.type, crossDataset.title), + ...preview, + } + }), + } +} + +const transformTypeValidationRule: ValidationRuleTransformer = (rule) => { + return { + ...rule, + constraint: + 'constraint' in rule && + (typeof rule.constraint === 'string' + ? rule.constraint.toLowerCase() + : retainSerializableProps(rule.constraint)), + } +} + +const validationRuleTransformers: Partial< + Record +> = { + type: transformTypeValidationRule, +} + +function transformValidation(validation: SchemaValidationValue): Validation { + const validationArray = (Array.isArray(validation) ? validation : [validation]).filter( + (value): value is Rule => typeof value === 'object' && '_type' in value, + ) + + // we dont want type in the output as that is implicitly given by the typedef itself an will only bloat the payload + const disallowedFlags = ['type'] + + // Validation rules that refer to other fields use symbols, which cannot be serialized. It would + // be possible to transform these to a serializable type, but we haven't implemented that for now. + const disallowedConstraintTypes: (symbol | unknown)[] = [ConcreteRuleClass.FIELD_REF] + + const serializedValidation = validationArray + .map(({_rules, _message, _level}) => { + const message: Partial> = + typeof _message === 'string' ? {message: _message} : {} + + const serializedRules = _rules + .filter((rule) => { + if (!('constraint' in rule)) { + return false + } + + const {flag, constraint} = rule + + if (disallowedFlags.includes(flag)) { + return false + } + + return !( + typeof constraint === 'object' && + 'type' in constraint && + disallowedConstraintTypes.includes(constraint.type) + ) + }) + .reduce((rules, rule) => { + const transformer: ValidationRuleTransformer = + validationRuleTransformers[rule.flag] ?? + ((spec) => retainSerializableProps(spec) as ManifestValidationRule) + + const transformedRule = transformer(rule) + if (!transformedRule) { + return rules + } + return [...rules, transformedRule] + }, []) + + return { + rules: serializedRules, + level: _level, + ...message, + } + }) + .filter((group) => !!group.rules.length) + + return serializedValidation.length ? {validation: serializedValidation} : {} +} + +function ensureCustomTitle(typeName: string, value: unknown) { + const titleObject = ensureString('title', value) + + const defaultTitle = startCase(typeName) + // omit title if its the same as default, to reduce payload + if (titleObject.title === defaultTitle) { + return {} + } + return titleObject +} + +function ensureString(key: Key, value: unknown) { + if (typeof value === 'string') { + return { + [key]: value, + } + } + + return {} +} + +function ensureConditional(key: Key, value: unknown) { + if (typeof value === 'boolean') { + return { + [key]: value, + } + } + + if (typeof value === 'function') { + return { + [key]: 'conditional', + } + } + + return {} +} + +export function transformBlockType( + blockType: SchemaType, + context: Context, +): Pick | Record { + if (blockType.jsonType !== 'object' || !isType(blockType, 'block')) { + return {} + } + + const childrenField = blockType.fields?.find((field) => field.name === 'children') as + | {type: ArraySchemaType} + | undefined + + if (!childrenField) { + return {} + } + const ofType = childrenField.type.of + if (!ofType) { + return {} + } + const spanType = ofType.find((memberType) => memberType.name === 'span') as + | ObjectSchemaType + | undefined + if (!spanType) { + return {} + } + const inlineObjectTypes = (ofType.filter((memberType) => memberType.name !== 'span') || + []) as ObjectSchemaType[] + + return { + marks: { + annotations: (spanType as SpanSchemaType).annotations.map((t) => transformType(t, context)), + decorators: resolveEnabledDecorators(spanType), + }, + lists: resolveEnabledListItems(blockType), + styles: resolveEnabledStyles(blockType), + of: inlineObjectTypes.map((t) => transformType(t, context)), + } +} + +function resolveEnabledStyles(blockType: ObjectSchemaType): ManifestTitledValue[] | undefined { + const styleField = blockType.fields?.find((btField) => btField.name === 'style') + return resolveTitleValueArray(styleField?.type?.options?.list) +} + +function resolveEnabledDecorators(spanType: ObjectSchemaType): ManifestTitledValue[] | undefined { + return 'decorators' in spanType ? resolveTitleValueArray(spanType.decorators) : undefined +} + +function resolveEnabledListItems(blockType: ObjectSchemaType): ManifestTitledValue[] | undefined { + const listField = blockType.fields?.find((btField) => btField.name === 'listItem') + return resolveTitleValueArray(listField?.type?.options?.list) +} + +function resolveTitleValueArray(possibleArray: unknown): ManifestTitledValue[] | undefined { + if (!possibleArray || !Array.isArray(possibleArray)) { + return undefined + } + const titledValues = possibleArray + .filter( + (d): d is {value: string; title?: string} => isRecord(d) && !!d.value && isString(d.value), + ) + .map((item) => { + return { + value: item.value, + ...ensureString('title', item.title), + } satisfies ManifestTitledValue + }) + if (!titledValues?.length) { + return undefined + } + + return titledValues +} diff --git a/packages/sanity/src/_internal/manifest/manifestTypeHelpers.ts b/packages/sanity/src/_internal/manifest/manifestTypeHelpers.ts new file mode 100644 index 00000000000..e9b366e978a --- /dev/null +++ b/packages/sanity/src/_internal/manifest/manifestTypeHelpers.ts @@ -0,0 +1,107 @@ +import { + type CrossDatasetReferenceSchemaType, + type ObjectField, + type ObjectSchemaType, + type ReferenceSchemaType, + type SchemaType, +} from '@sanity/types' + +const DEFAULT_IMAGE_FIELDS = ['asset', 'hotspot', 'crop'] +const DEFAULT_FILE_FIELDS = ['asset'] +const DEFAULT_GEOPOINT_FIELDS = ['lat', 'lng', 'alt'] +const DEFAULT_SLUG_FIELDS = ['current', 'source'] + +export function getCustomFields(type: ObjectSchemaType): (ObjectField & {fieldset?: string})[] { + const fields = type.fieldsets + ? type.fieldsets.flatMap((fs) => { + if (fs.single) { + return fs.field + } + return fs.fields.map((field) => ({ + ...field, + fieldset: fs.name, + })) + }) + : type.fields + + if (isType(type, 'block')) { + return [] + } + if (isType(type, 'slug')) { + return fields.filter((f) => !DEFAULT_SLUG_FIELDS.includes(f.name)) + } + if (isType(type, 'geopoint')) { + return fields.filter((f) => !DEFAULT_GEOPOINT_FIELDS.includes(f.name)) + } + if (isType(type, 'image')) { + return fields.filter((f) => !DEFAULT_IMAGE_FIELDS.includes(f.name)) + } + if (isType(type, 'file')) { + return fields.filter((f) => !DEFAULT_FILE_FIELDS.includes(f.name)) + } + return fields +} + +export function isReference(type: SchemaType): type is ReferenceSchemaType { + return isType(type, 'reference') +} + +export function isCrossDatasetReference(type: SchemaType): type is CrossDatasetReferenceSchemaType { + return isType(type, 'crossDatasetReference') +} + +export function isObjectField(maybeOjectField: unknown): boolean { + return ( + typeof maybeOjectField === 'object' && maybeOjectField !== null && 'name' in maybeOjectField + ) +} + +export function isCustomized(maybeCustomized: SchemaType): boolean { + const hasFieldsArray = + isObjectField(maybeCustomized) && + !isType(maybeCustomized, 'reference') && + !isType(maybeCustomized, 'crossDatasetReference') && + 'fields' in maybeCustomized && + Array.isArray(maybeCustomized.fields) + + if (!hasFieldsArray) { + return false + } + + const fields = getCustomFields(maybeCustomized) + return !!fields.length +} + +export function isType(schemaType: SchemaType, typeName: string): boolean { + if (schemaType.name === typeName) { + return true + } + if (!schemaType.type) { + return false + } + return isType(schemaType.type, typeName) +} + +export function isDefined(value: T | null | undefined): value is T { + return value !== null && value !== undefined +} + +export function isRecord(value: unknown): value is Record { + return !!value && typeof value === 'object' +} + +export function isPrimitive(value: unknown): value is string | boolean | number { + return isString(value) || isBoolean(value) || isNumber(value) +} + +export function isString(value: unknown): value is string { + return typeof value === 'string' +} + +function isNumber(value: unknown): value is number { + return typeof value === 'boolean' +} + +function isBoolean(value: unknown): value is boolean { + return typeof value === 'number' +} diff --git a/packages/sanity/src/_internal/manifest/manifestTypes.ts b/packages/sanity/src/_internal/manifest/manifestTypes.ts new file mode 100644 index 00000000000..7ce29c9ba7e --- /dev/null +++ b/packages/sanity/src/_internal/manifest/manifestTypes.ts @@ -0,0 +1,85 @@ +export type ManifestSerializable = + | string + | number + | boolean + | {[k: string]: ManifestSerializable} + | ManifestSerializable[] + +export interface CreateManifest { + version: 1 + createdAt: string + workspaces: ManifestWorkspaceFile[] +} + +export interface ManifestWorkspaceFile { + name: string + dataset: string + schema: string // filename +} + +export interface CreateWorkspaceManifest { + name: string + title?: string + subtitle?: string + basePath: string + dataset: string + schema: ManifestSchemaType[] +} + +export interface ManifestSchemaType { + type: string + name: string + title?: string + deprecated?: { + reason: string + } + readOnly?: boolean | 'conditional' + hidden?: boolean | 'conditional' + validation?: ManifestValidationGroup[] + fields?: ManifestField[] + to?: ManifestReferenceMember[] + of?: ManifestArrayMember[] + preview?: { + select: Record + } + fieldsets?: ManifestFieldset[] + options?: Record + //portable text + marks?: { + annotations?: ManifestArrayMember[] + decorators?: ManifestTitledValue[] + } + lists?: ManifestTitledValue[] + styles?: ManifestTitledValue[] + + // userland (assignable to ManifestSerializable | undefined) + // not included to add some typesafty to extractManifest + // [index: string]: unknown +} + +export interface ManifestFieldset { + name: string + title?: string + [index: string]: ManifestSerializable | undefined +} + +export interface ManifestTitledValue { + value: string + title?: string +} + +export type ManifestField = ManifestSchemaType & {fieldset?: string} +export type ManifestArrayMember = Omit & {name?: string} +export type ManifestReferenceMember = Omit & {name?: string} + +export interface ManifestValidationGroup { + rules: ManifestValidationRule[] + message?: string + level?: 'error' | 'warning' | 'info' +} + +export type ManifestValidationRule = { + flag: string + constraint?: ManifestSerializable + [index: string]: ManifestSerializable | undefined +} diff --git a/packages/sanity/src/core/index.ts b/packages/sanity/src/core/index.ts index 56606120212..83140342883 100644 --- a/packages/sanity/src/core/index.ts +++ b/packages/sanity/src/core/index.ts @@ -33,5 +33,9 @@ export * from './templates' export * from './theme' export * from './user-color' export * from './util' -export {validateDocument, type ValidateDocumentOptions} from './validation' +export { + Rule as ConcreteRuleClass, + validateDocument, + type ValidateDocumentOptions, +} from './validation' export * from './version' diff --git a/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.test.ts b/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.test.ts index 3fcb6617107..1853d912e1e 100644 --- a/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.test.ts +++ b/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.test.ts @@ -23,7 +23,12 @@ describe('stringOperators', () => { it('should create a valid filter for stringMatches', () => { const filter = stringOperators.stringMatches.groqFilter({fieldPath, value}) - expect(filter).toEqual(`${fieldPath} match "${value}"`) + expect(filter).toEqual(`${fieldPath} match "*${value}*"`) + }) + + it('should create a valid filter for stringNotMatches', () => { + const filter = stringOperators.stringNotMatches.groqFilter({fieldPath, value}) + expect(filter).toEqual(`!(${fieldPath} match "*${value}*")`) }) it('should create a valid filter for stringNotEqual', () => { diff --git a/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.ts b/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.ts index 06188c70286..e51798b792d 100644 --- a/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.ts +++ b/packages/sanity/src/core/studio/components/navbar/search/definitions/operators/stringOperators.ts @@ -37,7 +37,7 @@ export const stringOperators = { nameKey: 'search.operator.string-contains.name', descriptionKey: 'search.operator.string-contains.description', groqFilter: ({fieldPath, value}) => - value && fieldPath ? `${fieldPath} match ${toJSON(value)}` : null, + value && fieldPath ? `${fieldPath} match "*${value}*"` : null, initialValue: null, inputComponent: SearchFilterStringInput as SearchOperatorInput, type: 'stringMatches', @@ -55,7 +55,7 @@ export const stringOperators = { nameKey: 'search.operator.string-not-contains.name', descriptionKey: 'search.operator.string-not-contains.description', groqFilter: ({fieldPath, value}) => - value && fieldPath ? `!(${fieldPath} match ${toJSON(value)})` : null, + value && fieldPath ? `!(${fieldPath} match "*${value}*")` : null, initialValue: null, inputComponent: SearchFilterStringInput as SearchOperatorInput, type: 'stringNotMatches', diff --git a/packages/sanity/src/core/validation/Rule.ts b/packages/sanity/src/core/validation/Rule.ts index 6640cda8a09..15b4ac165a7 100644 --- a/packages/sanity/src/core/validation/Rule.ts +++ b/packages/sanity/src/core/validation/Rule.ts @@ -54,21 +54,25 @@ const ruleConstraintTypes: RuleTypeConstraint[] = [ 'String', ] -// Note: `RuleClass` and `Rule` are split to fit the current `@sanity/types` -// setup. Classes are a bit weird in the `@sanity/types` package because classes -// create an actual javascript class while simultaneously creating a type -// definition. -// -// This implicitly creates two types: -// 1. the instance type — `Rule` and -// 2. the static/class type - `RuleClass` -// -// The `RuleClass` type contains the static methods and the `Rule` instance -// contains the instance methods. -// -// This package exports the RuleClass as a value without implicitly exporting -// an instance definition. This should help reminder downstream users to import -// from the `@sanity/types` package. +/** + * Note: `RuleClass` and `Rule` are split to fit the current `@sanity/types` + * setup. Classes are a bit weird in the `@sanity/types` package because classes + * create an actual javascript class while simultaneously creating a type + * definition. + * + * This implicitly creates two types: + * 1. the instance type — `Rule` and + * 2. the static/class type - `RuleClass` + * + * The `RuleClass` type contains the static methods and the `Rule` instance + * contains the instance methods. + * + * This package exports the RuleClass as a value without implicitly exporting + * an instance definition. This should help reminder downstream users to import + * from the `@sanity/types` package. + * + * @internal + */ export const Rule: RuleClass = class Rule implements IRule { static readonly FIELD_REF = FIELD_REF static array = (def?: SchemaType): Rule => new Rule(def).type('Array') diff --git a/packages/sanity/src/structure/panes/documentList/DocumentListPane.tsx b/packages/sanity/src/structure/panes/documentList/DocumentListPane.tsx index 61bc05af867..d50b3562041 100644 --- a/packages/sanity/src/structure/panes/documentList/DocumentListPane.tsx +++ b/packages/sanity/src/structure/panes/documentList/DocumentListPane.tsx @@ -1,6 +1,6 @@ import {SearchIcon, SpinnerIcon} from '@sanity/icons' import {Box, TextInput} from '@sanity/ui' -import {memo, useCallback, useEffect, useMemo, useRef, useState} from 'react' +import {memo, useCallback, useEffect, useMemo, useState} from 'react' import {useObservableEvent} from 'react-rx' import {debounce, map, type Observable, of, tap, timer} from 'rxjs' import { @@ -14,7 +14,7 @@ import {keyframes, styled} from 'styled-components' import {structureLocaleNamespace} from '../../i18n' import {type BaseStructureToolPaneProps} from '../types' -import {EMPTY_RECORD} from './constants' +import {EMPTY_RECORD, FULL_LIST_LIMIT} from './constants' import {DocumentListPaneContent} from './DocumentListPaneContent' import {applyOrderingFunctions, findStaticTypesInFilter} from './helpers' import {useShallowUnique} from './PaneContainer' @@ -38,10 +38,34 @@ const rotate = keyframes` } ` +const fadeIn = keyframes` + 0% { + opacity: 0; + } + 50% { + opacity: 0.1; + } + 100% { + opacity: 0.4; + } +` + const AnimatedSpinnerIcon = styled(SpinnerIcon)` animation: ${rotate} 500ms linear infinite; ` +const SubtleSpinnerIcon = styled(SpinnerIcon)` + animation: ${rotate} 1500ms linear infinite; + opacity: 0.4; +` + +const DelayedSubtleSpinnerIcon = styled(SpinnerIcon)` + animation: + ${rotate} 1500ms linear infinite, + ${fadeIn} 1000ms linear; + opacity: 0.4; +` + /** * @internal */ @@ -68,11 +92,6 @@ export const DocumentListPane = memo(function DocumentListPane(props: DocumentLi const [searchInputValue, setSearchInputValue] = useState('') const [searchInputElement, setSearchInputElement] = useState(null) - // A ref to determine if we should show the loading spinner in the search input. - // This is used to avoid showing the spinner on initial load of the document list. - // We only wan't to show the spinner when the user interacts with the search input. - const showSearchLoadingRef = useRef(false) - const sortWithOrderingFn = typeName && sortOrderRaw ? applyOrderingFunctions(sortOrderRaw, schema.get(typeName) as any) @@ -80,22 +99,14 @@ export const DocumentListPane = memo(function DocumentListPane(props: DocumentLi const sortOrder = useUnique(sortWithOrderingFn) - const { - error, - hasMaxItems, - isLazyLoading, - isLoading, - isSearchReady, - items, - onListChange, - onRetry, - } = useDocumentList({ - apiVersion, - filter, - params, - searchQuery: searchQuery?.trim(), - sortOrder, - }) + const {error, isLoadingFullList, isLoading, items, fromCache, onLoadFullList, onRetry} = + useDocumentList({ + apiVersion, + filter, + params, + searchQuery: searchQuery?.trim(), + sortOrder, + }) const handleQueryChange = useObservableEvent( (event$: Observable>) => { @@ -122,30 +133,41 @@ export const DocumentListPane = memo(function DocumentListPane(props: DocumentLi [handleClearSearch], ) - useEffect(() => { - if (showSearchLoadingRef.current === false && !isLoading) { - showSearchLoadingRef.current = true - } + const [enableSearchSpinner, setEnableSearchSpinner] = useState() - return () => { - showSearchLoadingRef.current = false + useEffect(() => { + if (!enableSearchSpinner && !isLoading) { + setEnableSearchSpinner(paneKey) } - }, [isLoading]) + }, [enableSearchSpinner, isLoading, paneKey]) useEffect(() => { - // Clear search field and reset showSearchLoadingRef ref + // Clear search field and disable search spinner // when switching between panes (i.e. when paneKey changes). handleClearSearch() - showSearchLoadingRef.current = false + setEnableSearchSpinner() }, [paneKey, handleClearSearch]) const loadingVariant: LoadingVariant = useMemo(() => { - const showSpinner = isLoading && items.length === 0 && showSearchLoadingRef.current - - if (showSpinner) return 'spinner' + if (isLoading && enableSearchSpinner === paneKey) { + return 'spinner' + } + if (fromCache) { + return 'subtle' + } return 'initial' - }, [isLoading, items.length]) + }, [enableSearchSpinner, fromCache, isLoading, paneKey]) + + const textInputIcon = useMemo(() => { + if (loadingVariant === 'spinner') { + return AnimatedSpinnerIcon + } + if (searchInputValue && loadingVariant === 'subtle') { + return SubtleSpinnerIcon + } + return SearchIcon + }, [loadingVariant, searchInputValue]) return ( <> @@ -155,9 +177,12 @@ export const DocumentListPane = memo(function DocumentListPane(props: DocumentLi autoComplete="off" border={false} clearButton={Boolean(searchQuery)} - disabled={!isSearchReady} + disabled={Boolean(error)} fontSize={[2, 2, 1]} - icon={loadingVariant === 'spinner' ? AnimatedSpinnerIcon : SearchIcon} + icon={textInputIcon} + iconRight={ + loadingVariant === 'subtle' && !searchInputValue ? DelayedSubtleSpinnerIcon : null + } onChange={handleQueryChange} onClear={handleClearSearch} onKeyDown={handleSearchKeyDown} @@ -173,16 +198,16 @@ export const DocumentListPane = memo(function DocumentListPane(props: DocumentLi childItemId={childItemId} error={error} filterIsSimpleTypeConstraint={!!typeName} - hasMaxItems={hasMaxItems} + hasMaxItems={items.length === FULL_LIST_LIMIT} hasSearchQuery={Boolean(searchQuery)} isActive={isActive} - isLazyLoading={isLazyLoading} + isLazyLoading={isLoadingFullList} isLoading={isLoading} items={items} key={paneKey} layout={layout} loadingVariant={loadingVariant} - onListChange={onListChange} + onEndReached={onLoadFullList} onRetry={onRetry} paneTitle={title} searchInputElement={searchInputElement} diff --git a/packages/sanity/src/structure/panes/documentList/DocumentListPaneContent.tsx b/packages/sanity/src/structure/panes/documentList/DocumentListPaneContent.tsx index 30d34900fb6..8d9a2a8c7e7 100644 --- a/packages/sanity/src/structure/panes/documentList/DocumentListPaneContent.tsx +++ b/packages/sanity/src/structure/panes/documentList/DocumentListPaneContent.tsx @@ -20,8 +20,10 @@ import {structureLocaleNamespace} from '../../i18n' import {FULL_LIST_LIMIT} from './constants' import {type DocumentListPaneItem, type LoadingVariant} from './types' -const RootBox = styled(Box)` +const RootBox = styled(Box)<{$opacity?: number}>` position: relative; + opacity: ${(props) => props.$opacity || 1}; + transition: opacity 0.4s; ` const CommandListBox = styled(Box)` @@ -44,7 +46,7 @@ interface DocumentListPaneContentProps { items: DocumentListPaneItem[] layout?: GeneralPreviewLayoutKey loadingVariant?: LoadingVariant - onListChange: () => void + onEndReached: () => void onRetry?: () => void paneTitle: string searchInputElement: HTMLInputElement | null @@ -78,7 +80,7 @@ export function DocumentListPaneContent(props: DocumentListPaneContentProps) { items, layout, loadingVariant, - onListChange, + onEndReached, onRetry, paneTitle, searchInputElement, @@ -89,14 +91,14 @@ export function DocumentListPaneContent(props: DocumentListPaneContentProps) { const {collapsed: layoutCollapsed} = usePaneLayout() const {collapsed, index} = usePane() - const [shouldRender, setShouldRender] = useState(false) + const [shouldRender, setShouldRender] = useState(!collapsed) const {t} = useTranslation(structureLocaleNamespace) const handleEndReached = useCallback(() => { - if (isLoading || isLazyLoading || !shouldRender) return - - onListChange() - }, [isLazyLoading, isLoading, onListChange, shouldRender]) + if (shouldRender) { + onEndReached() + } + }, [onEndReached, shouldRender]) useEffect(() => { if (collapsed) return undefined @@ -224,7 +226,7 @@ export function DocumentListPaneContent(props: DocumentListPaneContentProps) { const key = `${index}-${collapsed}` return ( - + = {} + +export const ENABLE_LRU_MEMO = true diff --git a/packages/sanity/src/structure/panes/documentList/listenSearchQuery.ts b/packages/sanity/src/structure/panes/documentList/listenSearchQuery.ts index 25dd3498d83..f7aafc97684 100644 --- a/packages/sanity/src/structure/panes/documentList/listenSearchQuery.ts +++ b/packages/sanity/src/structure/panes/documentList/listenSearchQuery.ts @@ -1,25 +1,29 @@ import {type SanityClient} from '@sanity/client' +import QuickLRU from 'quick-lru' import { asyncScheduler, defer, + EMPTY, map, merge, mergeMap, type Observable, of, + type OperatorFunction, partition, + pipe, share, take, throttleTime, throwError, timer, } from 'rxjs' +import {tap} from 'rxjs/operators' import {exhaustMapWithTrailing} from 'rxjs-exhaustmap-with-trailing' import {createSearch, getSearchableTypes, type SanityDocumentLike, type Schema} from 'sanity' import {getExtendedProjection} from '../../structureBuilder/util/getExtendedProjection' -// FIXME -// eslint-disable-next-line boundaries/element-types +import {ENABLE_LRU_MEMO} from './constants' import {type SortOrder} from './types' interface ListenQueryOptions { @@ -35,7 +39,12 @@ interface ListenQueryOptions { enableLegacySearch?: boolean } -export function listenSearchQuery(options: ListenQueryOptions): Observable { +export interface SearchQueryResult { + fromCache: boolean + documents: SanityDocumentLike[] +} + +export function listenSearchQuery(options: ListenQueryOptions): Observable { const { client, schema, @@ -82,6 +91,8 @@ export function listenSearchQuery(options: ListenQueryOptions): Observable ev.type === 'welcome') + const memoKey = JSON.stringify({filter, limit, params, searchQuery, sort, staticTypeNames}) + return merge( welcome$.pipe(take(1)), mutationAndReconnect$.pipe(throttleTime(1000, asyncScheduler, {leading: true, trailing: true})), @@ -146,5 +157,37 @@ export function listenSearchQuery(options: ListenQueryOptions): Observable ({ + fromCache: memo.type === 'memo', + documents: memo.value, + })), + ) + : map((documents) => ({ + fromCache: false, + documents, + })), ) } + +const lru = new QuickLRU({maxSize: 100}) +function memoLRU( + memoKey: string, + cache: QuickLRU, +): OperatorFunction { + return (input$: Observable) => + merge( + defer(() => + cache.has(memoKey) ? of({type: 'memo' as const, value: cache.get(memoKey)!}) : EMPTY, + ), + input$.pipe( + tap((result) => cache.set(memoKey, result)), + map((value) => ({ + type: 'value' as const, + value: value, + })), + ), + ) +} diff --git a/packages/sanity/src/structure/panes/documentList/types.ts b/packages/sanity/src/structure/panes/documentList/types.ts index fe6fbaa6023..3d8e2496e7d 100644 --- a/packages/sanity/src/structure/panes/documentList/types.ts +++ b/packages/sanity/src/structure/panes/documentList/types.ts @@ -11,10 +11,4 @@ export type SortOrder = { extendedProjection?: string } -export interface QueryResult { - error: {message: string} | null - onRetry?: () => void - result: {documents: SanityDocumentLike[]} | null -} - -export type LoadingVariant = 'spinner' | 'initial' +export type LoadingVariant = 'spinner' | 'initial' | 'subtle' diff --git a/packages/sanity/src/structure/panes/documentList/useDocumentList.ts b/packages/sanity/src/structure/panes/documentList/useDocumentList.ts index 11a10c9d4f2..320b31dc8cf 100644 --- a/packages/sanity/src/structure/panes/documentList/useDocumentList.ts +++ b/packages/sanity/src/structure/panes/documentList/useDocumentList.ts @@ -1,6 +1,19 @@ -import {useCallback, useEffect, useMemo, useState} from 'react' -import {concat, fromEvent, merge, of, Subject, throwError} from 'rxjs' -import {catchError, map, mergeMap, scan, startWith, take} from 'rxjs/operators' +import {observableCallback} from 'observable-callback' +import {useMemo} from 'react' +import {useObservable} from 'react-rx' +import {concat, fromEvent, merge, of} from 'rxjs' +import { + catchError, + filter, + map, + mergeMap, + scan, + share, + shareReplay, + take, + takeUntil, + withLatestFrom, +} from 'rxjs/operators' import { DEFAULT_STUDIO_CLIENT_OPTIONS, useClient, @@ -12,15 +25,7 @@ import { import {DEFAULT_ORDERING, FULL_LIST_LIMIT, PARTIAL_PAGE_LIMIT} from './constants' import {findStaticTypesInFilter, removePublishedWithDrafts} from './helpers' import {listenSearchQuery} from './listenSearchQuery' -import {type DocumentListPaneItem, type QueryResult, type SortOrder} from './types' - -const EMPTY_ARRAY: [] = [] - -const INITIAL_STATE: QueryResult = { - error: null, - onRetry: undefined, - result: null, -} +import {type DocumentListPaneItem, type SortOrder} from './types' interface UseDocumentListOpts { apiVersion?: string @@ -32,25 +37,30 @@ interface UseDocumentListOpts { interface DocumentListState { error: {message: string} | null - hasMaxItems?: boolean - isLazyLoading: boolean + isLoadingFullList: boolean isLoading: boolean - isSearchReady: boolean + fromCache?: boolean items: DocumentListPaneItem[] - onListChange: () => void - onRetry?: () => void } -const INITIAL_QUERY_RESULTS: QueryResult = { - result: null, +const INITIAL_QUERY_STATE: DocumentListState = { error: null, + isLoading: true, + isLoadingFullList: false, + fromCache: false, + items: [], +} + +interface UseDocumentListHookValue extends DocumentListState { + onRetry: () => void + onLoadFullList: () => void } /** * @internal */ -export function useDocumentList(opts: UseDocumentListOpts): DocumentListState { - const {filter, params: paramsProp, sortOrder, searchQuery, apiVersion} = opts +export function useDocumentList(opts: UseDocumentListOpts): UseDocumentListHookValue { + const {filter: searchFilter, params: paramsProp, sortOrder, searchQuery, apiVersion} = opts const client = useClient({ ...DEFAULT_STUDIO_CLIENT_OPTIONS, apiVersion: apiVersion || DEFAULT_STUDIO_CLIENT_OPTIONS.apiVersion, @@ -59,172 +69,139 @@ export function useDocumentList(opts: UseDocumentListOpts): DocumentListState { const schema = useSchema() const maxFieldDepth = useSearchMaxFieldDepth() - const [resultState, setResult] = useState(INITIAL_STATE) - const {onRetry, error, result} = resultState - - const documents = result?.documents - - // Filter out published documents that have drafts to avoid duplicates in the list. - const items = useMemo( - () => (documents ? removePublishedWithDrafts(documents) : EMPTY_ARRAY), - [documents], - ) - - // A state variable to keep track of whether we are currently lazy loading the list. - // This is used to determine whether we should show the loading spinner at the bottom of the list. - const [isLazyLoading, setIsLazyLoading] = useState(false) - - // A state to keep track of whether we have fetched the full list of documents. - const [hasFullList, setHasFullList] = useState(false) - - // A state to keep track of whether we should fetch the full list of documents. - const [shouldFetchFullList, setShouldFetchFullList] = useState(false) - // Get the type name from the filter, if it is a simple type filter. const typeNameFromFilter = useMemo( - () => findStaticTypesInFilter(filter, paramsProp), - [filter, paramsProp], + () => findStaticTypesInFilter(searchFilter, paramsProp), + [searchFilter, paramsProp], ) - // We can't have the loading state as part of the result state, since the loading - // state would be updated whenever a mutation is performed in a document in the list. - // Instead, we determine if the list is loading by checking if the result is null. - // The result is null when: - // 1. We are making the initial request - // 2. The user has performed a search or changed the sort order - const isLoading = result === null && !error - - // A flag to indicate whether we have reached the maximum number of documents. - const hasMaxItems = documents?.length === FULL_LIST_LIMIT - - // This function is triggered when the user has scrolled to the bottom of the list - // and we need to fetch more items. - const onListChange = useCallback(() => { - if (isLoading || hasFullList || shouldFetchFullList) return - - setShouldFetchFullList(true) - }, [isLoading, hasFullList, shouldFetchFullList]) - - const handleSetResult = useCallback( - (res: QueryResult) => { - if (res.error) { - setResult(res) - return - } - - const documentsLength = res.result?.documents?.length || 0 - const isLoadingMoreItems = !res.error && res?.result === null && shouldFetchFullList - - // 1. When the result is null and shouldFetchFullList is true, we are loading _more_ items. - // In this case, we want to wait for the next result and set the isLazyLoading state to true. - if (isLoadingMoreItems) { - setIsLazyLoading(true) - return - } - - // 2. If the result is not null, and less than the partial page limit, we know that - // we have fetched the full list of documents. In this case, we want to set the - // hasFullList state to true to prevent further requests. - if (documentsLength < PARTIAL_PAGE_LIMIT && documentsLength !== 0 && !shouldFetchFullList) { - setHasFullList(true) - } - - // 3. If the result is null, we are loading items. In this case, we want to - // wait for the next result. - if (res?.result === null) { - setResult((prev) => ({...(prev.error ? res : prev)})) - return - } - - // 4. Finally, set the result - setIsLazyLoading(false) - setResult(res) - }, - [shouldFetchFullList], - ) + const [onRetry$, onRetry] = useMemo(() => observableCallback(), []) + const [onFetchFullList$, onLoadFullList] = useMemo(() => observableCallback(), []) const queryResults$ = useMemo(() => { - const onRetry$ = new Subject() - const _onRetry = () => onRetry$.next() - - const limit = shouldFetchFullList ? FULL_LIST_LIMIT : PARTIAL_PAGE_LIMIT - const sort = sortOrder || DEFAULT_ORDERING - - return listenSearchQuery({ + const listenSearchQueryArgs = { client, - filter, - limit, + filter: searchFilter, + limit: PARTIAL_PAGE_LIMIT, params: paramsProp, schema, searchQuery: searchQuery || '', - sort, + sort: sortOrder || DEFAULT_ORDERING, staticTypeNames: typeNameFromFilter, maxFieldDepth, enableLegacySearch, - }).pipe( - map((results) => ({ - result: {documents: results}, - error: null, - })), - startWith(INITIAL_QUERY_RESULTS), - catchError((err) => { - if (err instanceof ProgressEvent) { - // todo: hack to work around issue with get-it (used by sanity/client) that propagates connection errors as ProgressEvent instances. This if-block can be removed once @sanity/client is par with a version of get-it that includes this fix: https://github.com/sanity-io/get-it/pull/127 - return throwError(() => new Error(`Request error`)) - } - return throwError(() => err) - }), - catchError((err, caught$) => { + } + + const partialList$ = listenSearchQuery(listenSearchQueryArgs).pipe( + shareReplay({refCount: true, bufferSize: 1}), + ) + + // we want to fetch the full list if the last result of the partial list is at the limit + const fullList$ = onFetchFullList$.pipe( + withLatestFrom(partialList$), + filter(([, result]) => result?.documents.length === PARTIAL_PAGE_LIMIT), + // we want to set up the full list listener only once + take(1), + mergeMap(() => + concat( + of({type: 'loadFullList' as const}), + listenSearchQuery({...listenSearchQueryArgs, limit: FULL_LIST_LIMIT}).pipe( + map((result) => ({type: 'result' as const, result})), + ), + ), + ), + share(), + ) + + // The combined search results from both partial page and full list + return merge( + partialList$.pipe( + map((result) => ({ + type: 'result' as const, + result, + })), + // when the full list listener kicks off, we want to stop the partial list listener + takeUntil(fullList$), + ), + fullList$, + ).pipe( + catchError((err: unknown, caught$) => { return concat( - of({result: null, error: err}), + of({type: 'error' as const, error: safeError(err)}), merge(fromEvent(window, 'online'), onRetry$).pipe( take(1), mergeMap(() => caught$), ), ) }), - scan((prev, next) => ({...prev, ...next, onRetry: _onRetry})), + scan((prev, event) => { + if (event.type === 'error') { + return { + ...prev, + error: event.error, + } + } + if (event.type === 'result') { + return { + ...prev, + error: null, + fromCache: event.result.fromCache, + isLoading: false, + items: removePublishedWithDrafts(event.result.documents), + isLoadingFullList: false, + } + } + if (event.type === 'loadFullList') { + return { + ...prev, + error: null, + isLoadingFullList: true, + } + } + throw new Error('Unexpected') + }, INITIAL_QUERY_STATE), ) }, [ - shouldFetchFullList, - sortOrder, client, - filter, + searchFilter, paramsProp, schema, searchQuery, + sortOrder, typeNameFromFilter, maxFieldDepth, enableLegacySearch, + onFetchFullList$, + onRetry$, ]) - useEffect(() => { - const sub = queryResults$.subscribe(handleSetResult) - - return () => { - sub.unsubscribe() - } - }, [handleSetResult, queryResults$]) - - const reset = useCallback(() => { - setHasFullList(false) - setIsLazyLoading(false) - setResult(INITIAL_STATE) - setShouldFetchFullList(false) - }, []) - - useEffect(() => { - reset() - }, [reset, filter, paramsProp, sortOrder, searchQuery]) + const {error, items, isLoading, fromCache, isLoadingFullList} = useObservable( + queryResults$, + INITIAL_QUERY_STATE, + ) return { error, - hasMaxItems, - isLazyLoading, + onRetry, isLoading, - isSearchReady: !error, items, - onListChange, - onRetry, + fromCache, + onLoadFullList, + isLoadingFullList, + } +} + +// todo: candidate for re-use +const nonErrorThrownWarning = `[WARNING: This was thrown as a non-error. Only Error instances should be thrown]` +function safeError(thrown: unknown): Error { + if (thrown instanceof Error) { + return thrown + } + if (typeof thrown === 'object' && thrown !== null) { + if ('message' in thrown && typeof thrown.message === 'string') { + return new Error(`${thrown.message} ${nonErrorThrownWarning}`) + } + return new Error(`${String(thrown)} ${nonErrorThrownWarning}`) } + return new Error(`${String(thrown)} ${nonErrorThrownWarning}`) } diff --git a/packages/sanity/test/manifest/extractManifest.test.ts b/packages/sanity/test/manifest/extractManifest.test.ts new file mode 100644 index 00000000000..36cabadb2e9 --- /dev/null +++ b/packages/sanity/test/manifest/extractManifest.test.ts @@ -0,0 +1,990 @@ +/* eslint-disable camelcase */ +import {describe, expect, test} from '@jest/globals' +import {defineArrayMember, defineField, defineType} from '@sanity/types' + +import {extractManifestSchemaTypes} from '../../src/_internal/manifest/extractWorkspaceManifest' +import {createSchema} from '../../src/core' + +describe('Extract studio manifest', () => { + describe('serialize schema for manifest', () => { + test('extracted schema should only include user defined types (and no built-in types)', () => { + const documentType = 'basic' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fields: [defineField({name: 'title', type: 'string'})], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + expect(extracted.map((v) => v.name)).toStrictEqual([documentType]) + }) + + test('indicate conditional for function values on hidden and readOnly fields', () => { + const documentType = 'basic' + + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + readOnly: true, + hidden: false, + fields: [ + defineField({ + name: 'string', + type: 'string', + hidden: () => true, + readOnly: () => false, + }), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + type: 'document', + name: 'basic', + readOnly: true, + hidden: false, + fields: [ + { + name: 'string', + type: 'string', + hidden: 'conditional', + readOnly: 'conditional', + }, + ], + }) + }) + + test('should omit known non-serializable schema props ', () => { + const documentType = 'remove-props' + + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + //include + name: documentType, + type: 'document', + title: 'My document', + description: 'Stuff', + deprecated: { + reason: 'old', + }, + options: { + custom: 'value', + }, + initialValue: {title: 'Default'}, + liveEdit: true, + + //omit + icon: () => 'remove-icon', + groups: [{name: 'groups-are-removed'}], + __experimental_omnisearch_visibility: true, + __experimental_search: [ + { + path: 'title', + weight: 100, + }, + ], + __experimental_formPreviewTitle: true, + components: { + field: () => 'remove-components', + }, + orderings: [ + {name: 'remove-orderings', title: '', by: [{field: 'title', direction: 'desc'}]}, + ], + fields: [ + defineField({ + name: 'string', + type: 'string', + group: 'groups-are-removed', + }), + ], + preview: { + select: {title: 'remove-preview'}, + }, + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + type: 'document', + name: documentType, + title: 'My document', + description: 'Stuff', + deprecated: { + reason: 'old', + }, + options: { + custom: 'value', + }, + initialValue: {title: 'Default'}, + liveEdit: true, + fields: [ + { + name: 'string', + type: 'string', + }, + ], + }) + }) + + test('schema should include most userland properties', () => { + const documentType = 'basic' + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const recursiveObject: any = { + repeat: 'string', + } + recursiveObject.recurse = recursiveObject + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const customization: any = { + recursiveObject, // this one will be cut off at max-depth + serializableProp: 'dummy', + nonSerializableProp: () => {}, + options: { + serializableOption: true, + nonSerializableOption: () => {}, + nested: { + serializableOption: 1, + nonSerializableOption: () => {}, + }, + }, + } + + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fields: [ + defineField({ + title: 'Nested', + name: 'nested', + type: 'object', + fields: [ + defineField({ + title: 'Nested inline string', + name: 'nestedString', + type: 'string', + ...customization, + }), + ], + ...customization, + }), + ], + ...customization, + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const expectedCustomProps = { + serializableProp: 'dummy', + options: { + serializableOption: true, + nested: { + serializableOption: 1, + }, + }, + recursiveObject: { + recurse: { + recurse: { + recurse: { + repeat: 'string', + }, + repeat: 'string', + }, + repeat: 'string', + }, + repeat: 'string', + }, + } + + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + type: 'document', + name: 'basic', + fields: [ + { + name: 'nested', + type: 'object', + fields: [ + { + name: 'nestedString', + title: 'Nested inline string', + type: 'string', + ...expectedCustomProps, + }, + ], + ...expectedCustomProps, + }, + ], + ...expectedCustomProps, + }) + }) + + test('should serialize fieldset config', () => { + const documentType = 'fieldsets' + + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fields: [ + defineField({ + name: 'string', + type: 'string', + }), + ], + preview: { + select: {title: 'title'}, + prepare: () => ({ + title: 'remove-prepare', + }), + }, + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + type: 'document', + name: documentType, + fields: [ + { + name: 'string', + type: 'string', + }, + ], + }) + }) + + test('serialize fieldless types', () => { + const documentType = 'fieldless-types' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + title: 'Some document', + name: documentType, + type: 'document', + fields: [ + defineField({title: 'String field', name: 'string', type: 'string'}), + defineField({title: 'Text field', name: 'text', type: 'text'}), + defineField({title: 'Number field', name: 'number', type: 'number'}), + defineField({title: 'Boolean field', name: 'boolean', type: 'boolean'}), + defineField({title: 'Date field', name: 'date', type: 'date'}), + defineField({title: 'Datetime field', name: 'datetime', type: 'datetime'}), + defineField({title: 'Geopoint field', name: 'geopoint', type: 'geopoint'}), + defineField({title: 'Basic image field', name: 'image', type: 'image'}), + defineField({title: 'Basic file field', name: 'file', type: 'file'}), + defineField({title: 'Slug field', name: 'slug', type: 'slug'}), + defineField({title: 'URL field', name: 'url', type: 'url'}), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + {name: 'string', title: 'String field', type: 'string'}, + {name: 'text', title: 'Text field', type: 'text'}, + {name: 'number', title: 'Number field', type: 'number'}, + {name: 'boolean', title: 'Boolean field', type: 'boolean'}, + {name: 'date', title: 'Date field', type: 'date'}, + {name: 'datetime', title: 'Datetime field', type: 'datetime'}, + {name: 'geopoint', title: 'Geopoint field', type: 'geopoint'}, + {name: 'image', title: 'Basic image field', type: 'image'}, + {name: 'file', title: 'Basic file field', type: 'file'}, + { + name: 'slug', + title: 'Slug field', + type: 'slug', + validation: [{level: 'error', rules: [{flag: 'custom'}]}], + }, + { + name: 'url', + title: 'URL field', + type: 'url', + validation: [ + { + level: 'error', + rules: [ + { + constraint: { + options: { + allowCredentials: false, + allowRelative: false, + relativeOnly: false, + scheme: ['/^http$/', '/^https$/'], + }, + }, + flag: 'uri', + }, + ], + }, + ], + }, + ], + name: documentType, + title: 'Some document', + type: 'document', + }) + }) + + test('serialize types with fields', () => { + const documentType = 'field-types' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + fields: [ + { + name: 'existingType', + type: documentType, + }, + { + fields: [ + { + name: 'nestedString', + title: 'Nested inline string', + type: 'string', + }, + { + fields: [ + { + name: 'inner', + title: 'Inner', + type: 'number', + }, + ], + name: 'nestedTwice', + title: 'Child object', + type: 'object', + }, + ], + name: 'nested', + title: 'Nested', + type: 'object', + }, + { + fields: [ + { + name: 'title', + title: 'Image title', + type: 'string', + }, + ], + name: 'image', + type: 'image', + }, + { + fields: [ + { + name: 'title', + title: 'File title', + type: 'string', + }, + ], + name: 'file', + type: 'file', + }, + ], + name: documentType, + type: 'document', + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + { + name: 'existingType', + type: 'field-types', + }, + + { + fields: [ + { + name: 'nestedString', + title: 'Nested inline string', + type: 'string', + }, + { + fields: [ + { + name: 'inner', + type: 'number', + }, + ], + name: 'nestedTwice', + title: 'Child object', + type: 'object', + }, + ], + name: 'nested', + type: 'object', + }, + { + fields: [ + { + name: 'title', + title: 'Image title', + type: 'string', + }, + ], + name: 'image', + type: 'image', + }, + { + fields: [ + { + name: 'title', + title: 'File title', + type: 'string', + }, + ], + name: 'file', + type: 'file', + }, + ], + name: documentType, + type: 'document', + }) + }) + + test('serialize array-like fields (portable text tested separately)', () => { + const documentType = 'all-types' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + title: 'Basic doc', + name: documentType, + type: 'document', + fields: [ + defineField({ + title: 'String array', + name: 'stringArray', + type: 'array', + of: [{type: 'string'}], + }), + defineField({ + title: 'Number array', + name: 'numberArray', + type: 'array', + of: [{type: 'number'}], + }), + defineField({ + title: 'Boolean array', + name: 'booleanArray', + type: 'array', + of: [{type: 'boolean'}], + }), + defineField({ + name: 'objectArray', + type: 'array', + of: [ + defineArrayMember({ + title: 'Anonymous object item', + type: 'object', + fields: [ + defineField({ + name: 'itemTitle', + type: 'string', + }), + ], + }), + defineArrayMember({ + type: 'object', + title: 'Inline named object item', + name: 'item', + fields: [ + defineField({ + name: 'otherTitle', + type: 'string', + }), + ], + }), + defineArrayMember({ + title: 'Existing type object item', + type: documentType, + }), + ], + }), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + { + name: 'stringArray', + of: [{type: 'string'}], + title: 'String array', + type: 'array', + }, + { + name: 'numberArray', + of: [{type: 'number'}], + title: 'Number array', + type: 'array', + }, + { + name: 'booleanArray', + of: [{type: 'boolean'}], + title: 'Boolean array', + type: 'array', + }, + { + name: 'objectArray', + of: [ + { + title: 'Anonymous object item', + type: 'object', + fields: [{name: 'itemTitle', type: 'string'}], + }, + { + fields: [{name: 'otherTitle', type: 'string'}], + title: 'Inline named object item', + type: 'object', + name: 'item', + }, + { + title: 'Existing type object item', + type: 'all-types', + }, + ], + type: 'array', + }, + ], + name: 'all-types', + title: 'Basic doc', + type: 'document', + }) + }) + + test('serialize array with type reference and overridden typename', () => { + const arrayType = 'someArray' + const objectBaseType = 'someObject' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: objectBaseType, + type: 'object', + fields: [ + defineField({ + name: 'title', + type: 'string', + }), + ], + }), + defineType({ + name: arrayType, + type: 'array', + of: [{type: objectBaseType, name: 'override'}], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const serializedDoc = extracted.find((serialized) => serialized.name === arrayType) + expect(serializedDoc).toEqual({ + name: arrayType, + of: [{title: 'Some Object', type: objectBaseType, name: 'override'}], + type: 'array', + }) + }) + + test('serialize schema with indirectly recursive structure', () => { + const arrayType = 'someArray' + const objectBaseType = 'someObject' + const otherObjectType = 'other' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: objectBaseType, + type: 'object', + fields: [ + defineField({ + name: 'recurse', + type: otherObjectType, + }), + ], + }), + defineType({ + name: otherObjectType, + type: 'object', + fields: [ + defineField({ + name: 'recurse2', + type: arrayType, + }), + ], + }), + defineType({ + name: arrayType, + type: 'array', + of: [{type: objectBaseType}], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + expect(extracted).toEqual([ + { + fields: [{name: 'recurse', type: 'other'}], + name: 'someObject', + type: 'object', + }, + { + fields: [{name: 'recurse2', type: 'someArray'}], + name: 'other', + type: 'object', + }, + { + name: 'someArray', + of: [{type: 'someObject'}], + type: 'array', + }, + ]) + }) + + test('serialize portable text field', () => { + const documentType = 'pt' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fields: [ + defineField({ + title: 'Portable text', + name: 'pt', + type: 'array', + of: [ + defineArrayMember({ + title: 'Block', + name: 'block', + type: 'block', + of: [ + defineField({ + title: 'Inline block', + name: 'inlineBlock', + type: 'object', + fields: [ + defineField({ + title: 'Inline value', + name: 'value', + type: 'string', + }), + ], + }), + ], + marks: { + annotations: [ + defineField({ + title: 'Annotation', + name: 'annotation', + type: 'object', + fields: [ + defineField({ + title: 'Annotation value', + name: 'value', + type: 'string', + }), + ], + }), + ], + decorators: [{title: 'Custom mark', value: 'custom'}], + }, + lists: [{value: 'bullet', title: 'Bullet list'}], + styles: [{value: 'customStyle', title: 'Custom style'}], + }), + ], + }), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + { + name: 'pt', + of: [ + { + lists: [{title: 'Bullet list', value: 'bullet'}], + marks: { + annotations: [ + { + fields: [{name: 'value', title: 'Annotation value', type: 'string'}], + name: 'annotation', + type: 'object', + }, + ], + decorators: [{title: 'Custom mark', value: 'custom'}], + }, + of: [ + { + fields: [{name: 'value', title: 'Inline value', type: 'string'}], + name: 'inlineBlock', + title: 'Inline block', + type: 'object', + }, + ], + styles: [ + {title: 'Normal', value: 'normal'}, + {title: 'Custom style', value: 'customStyle'}, + ], + type: 'block', + }, + ], + title: 'Portable text', + type: 'array', + }, + ], + name: 'pt', + type: 'document', + }) + }) + + test('serialize fields with references', () => { + const documentType = 'ref-types' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fields: [ + defineField({ + title: 'Reference to', + name: 'reference', + type: 'reference', + to: [{type: documentType}], + }), + defineField({ + title: 'Cross dataset ref', + name: 'crossDatasetReference', + type: 'crossDatasetReference', + dataset: 'production', + studioUrl: () => 'cannot serialize studioUrl function', + to: [ + { + type: documentType, + preview: { + select: {title: 'title'}, + prepare: () => ({ + title: 'cannot serialize prepare function', + }), + }, + }, + ], + }), + defineField({ + title: 'Reference array', + name: 'refArray', + type: 'array', + of: [ + defineArrayMember({ + title: 'Reference to', + name: 'reference', + type: 'reference', + to: [{type: documentType}], + }), + ], + }), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + { + name: 'reference', + title: 'Reference to', + to: [{type: documentType}], + type: 'reference', + }, + { + dataset: 'production', + name: 'crossDatasetReference', + title: 'Cross dataset ref', + type: 'crossDatasetReference', + to: [ + { + type: documentType, + preview: { + select: {title: 'title'}, + }, + }, + ], + }, + { + name: 'refArray', + of: [ + { + title: 'Reference to', + to: [{type: documentType}], + type: 'reference', + }, + ], + title: 'Reference array', + type: 'array', + }, + ], + name: documentType, + type: 'document', + }) + }) + + test('fieldsets and fieldset on fields is serialized', () => { + const documentType = 'basic' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fieldsets: [ + { + name: 'test', + title: 'Test fieldset', + hidden: false, + readOnly: true, + options: { + collapsed: true, + }, + description: 'my fieldset', + }, + { + name: 'conditional', + hidden: () => true, + readOnly: () => true, + }, + ], + fields: [ + defineField({name: 'title', type: 'string', fieldset: 'test'}), + defineField({name: 'other', type: 'string', fieldset: 'conditional'}), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + { + fieldset: 'test', + name: 'title', + type: 'string', + }, + { + fieldset: 'conditional', + name: 'other', + type: 'string', + }, + ], + fieldsets: [ + { + description: 'my fieldset', + hidden: false, + name: 'test', + options: { + collapsed: true, + }, + readOnly: true, + title: 'Test fieldset', + }, + { + hidden: 'conditional', + name: 'conditional', + readOnly: 'conditional', + }, + ], + name: 'basic', + type: 'document', + }) + }) + + test('do not serialize default titles (default titles added by Schema.compile based on type/field name)', () => { + const documentType = 'basic-document' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fieldsets: [ + {name: 'someFieldset'}, + { + name: 'conditional', + hidden: () => true, + readOnly: () => true, + }, + ], + fields: [ + defineField({name: 'title', type: 'string'}), + defineField({name: 'someField', type: 'array', of: [{type: 'string'}]}), + defineField({name: 'customTitleField', type: 'string', title: 'Custom'}), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const serializedDoc = extracted.find((serialized) => serialized.name === documentType) + expect(serializedDoc).toEqual({ + fields: [ + {name: 'title', type: 'string'}, + {name: 'someField', of: [{type: 'string'}], type: 'array'}, + {name: 'customTitleField', type: 'string', title: 'Custom'}, + ], + name: 'basic-document', + type: 'document', + }) + }) + }) +}) diff --git a/packages/sanity/test/manifest/extractManifestRestore.test.ts b/packages/sanity/test/manifest/extractManifestRestore.test.ts new file mode 100644 index 00000000000..a64ab1e8699 --- /dev/null +++ b/packages/sanity/test/manifest/extractManifestRestore.test.ts @@ -0,0 +1,205 @@ +import {describe, expect, test} from '@jest/globals' +import { + defineArrayMember, + defineField, + defineType, + type ObjectSchemaType, + type SchemaType, +} from '@sanity/types' +import pick from 'lodash/pick' + +import {extractManifestSchemaTypes} from '../../src/_internal/manifest/extractWorkspaceManifest' +import {createSchema} from '../../src/core' + +describe('Extract studio manifest', () => { + test('extracted schema types should be mappable to a createSchema compatible version', () => { + const documentType = 'basic' + const sourceSchema = createSchema({ + name: 'test', + types: [ + defineType({ + name: documentType, + type: 'document', + fields: [ + defineField({name: 'string', type: 'string'}), + defineField({name: 'text', type: 'text'}), + defineField({name: 'number', type: 'number'}), + defineField({name: 'boolean', type: 'boolean'}), + defineField({name: 'date', type: 'date'}), + defineField({name: 'datetime', type: 'datetime'}), + defineField({name: 'geopoint', type: 'geopoint'}), + defineField({name: 'image', type: 'image'}), + defineField({name: 'file', type: 'file'}), + defineField({name: 'slug', type: 'slug'}), + defineField({name: 'url', type: 'url'}), + defineField({name: 'object', type: documentType}), + defineField({ + type: 'object', + name: 'nestedObject', + fields: [{name: 'nestedString', type: 'string'}], + }), + defineField({ + type: 'image', + name: 'customImage', + fields: [{name: 'title', type: 'string'}], + }), + defineField({ + type: 'file', + name: 'customFile', + fields: [{name: 'title', type: 'string'}], + options: {storeOriginalFilename: true}, + }), + defineField({ + name: 'typeAliasArray', + type: 'array', + of: [{type: documentType}], + }), + defineField({ + name: 'stringArray', + type: 'array', + of: [{type: 'string'}], + }), + defineField({ + name: 'numberArray', + type: 'array', + of: [{type: 'number'}], + }), + defineField({ + name: 'booleanArray', + type: 'array', + of: [{type: 'boolean'}], + }), + defineField({ + name: 'objectArray', + type: 'array', + of: [ + defineArrayMember({ + type: 'object', + fields: [defineField({name: 'itemTitle', type: 'string'})], + }), + ], + }), + defineField({ + name: 'reference', + type: 'reference', + to: [{type: documentType}], + }), + defineField({ + name: 'crossDatasetReference', + type: 'crossDatasetReference', + dataset: 'production', + to: [ + { + type: documentType, + preview: {select: {title: 'title'}}, + }, + ], + }), + defineField({ + name: 'refArray', + type: 'array', + of: [ + defineArrayMember({ + name: 'reference', + type: 'reference', + to: [{type: documentType}], + }), + ], + }), + defineField({ + name: 'pt', + type: 'array', + of: [ + defineArrayMember({ + name: 'block', + type: 'block', + of: [ + defineField({ + name: 'inlineBlock', + type: 'object', + fields: [ + defineField({ + name: 'value', + type: 'string', + }), + ], + }), + ], + marks: { + annotations: [ + defineField({ + name: 'annotation', + type: 'object', + fields: [ + defineField({ + name: 'value', + type: 'string', + }), + ], + }), + ], + decorators: [{title: 'Custom mark', value: 'custom'}], + }, + lists: [{value: 'bullet', title: 'Bullet list'}], + styles: [{value: 'customStyle', title: 'Custom style'}], + }), + ], + }), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(sourceSchema) + + const restoredSchema = createSchema({ + name: 'test', + types: extracted, + }) + + expect(restoredSchema._validation).toEqual([]) + expect(restoredSchema.getTypeNames().sort()).toEqual(sourceSchema.getTypeNames().sort()) + + const restoredDocument = restoredSchema.get(documentType) as ObjectSchemaType + const sourceDocument = sourceSchema.get(documentType) as ObjectSchemaType + + // this is not an exhaustive test (requires additional mapping to make validation, readOnly ect schema def compliant); + // it just asserts that a basic schema can be restored without crashing + expect(typeForComparison(restoredDocument)).toEqual(typeForComparison(sourceDocument)) + }) +}) + +function typeForComparison(_type: SchemaType, depth = 0): unknown { + const type = pick(_type, 'jsonType', 'name', 'title', 'fields', 'of', 'to') + + if (depth > 10) { + return undefined + } + + if ('to' in type) { + return { + ...type, + to: (type.to as SchemaType[]).map((item) => ({ + type: item.name, + })), + } + } + + if (type.jsonType === 'object' && type.fields) { + return { + ...type, + fields: type.fields.map((field) => ({ + ...field, + type: typeForComparison(field.type, depth + 1), + })), + } + } + if (type.jsonType === 'array' && 'of' in type) { + return { + ...type, + of: (type.of as SchemaType[]).map((item) => typeForComparison(item, depth + 1)), + } + } + + return type +} diff --git a/packages/sanity/test/manifest/extractManifestValidation.test.ts b/packages/sanity/test/manifest/extractManifestValidation.test.ts new file mode 100644 index 00000000000..9709c8ea554 --- /dev/null +++ b/packages/sanity/test/manifest/extractManifestValidation.test.ts @@ -0,0 +1,515 @@ +/* eslint-disable camelcase */ +import {describe, expect, test} from '@jest/globals' +import {defineField, defineType} from '@sanity/types' + +import {extractManifestSchemaTypes} from '../../src/_internal/manifest/extractWorkspaceManifest' +import {createSchema} from '../../src/core' + +describe('Extract studio manifest', () => { + describe('serialize validation rules', () => { + test('object validation rules', () => { + const docType = 'some-doc' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: docType, + type: 'document', + fields: [defineField({name: 'title', type: 'string'})], + validation: (rule) => [ + rule + .required() + .custom(() => 'doesnt-matter') + .warning('custom-warning'), + rule.custom(() => 'doesnt-matter').error('custom-error'), + rule.custom(() => 'doesnt-matter').info('custom-info'), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === docType)?.validation + expect(validation).toEqual([ + { + level: 'warning', + message: 'custom-warning', + rules: [{constraint: 'required', flag: 'presence'}, {flag: 'custom'}], + }, + { + level: 'error', + message: 'custom-error', + rules: [{flag: 'custom'}], + }, + { + level: 'info', + message: 'custom-info', + rules: [{flag: 'custom'}], + }, + ]) + }) + + test('array validation rules', () => { + const type = 'someArray' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'array', + of: [{type: 'string'}], + validation: (rule) => [ + rule + .required() + .unique() + .min(1) + .max(10) + .length(10) + .custom(() => 'doesnt-matter'), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + {constraint: 'required', flag: 'presence'}, + {constraint: 1, flag: 'min'}, + {constraint: 10, flag: 'max'}, + {constraint: 10, flag: 'length'}, + {flag: 'custom'}, + ], + }, + ]) + }) + + test('boolean validation rules', () => { + const type = 'someArray' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'boolean', + validation: (rule) => [rule.required().custom(() => 'doesnt-matter')], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [{constraint: 'required', flag: 'presence'}, {flag: 'custom'}], + }, + ]) + }) + + test('date validation rules', () => { + const type = 'someDate' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'date', + validation: (rule) => [ + rule + .required() + .min('2022-01-01') + .max('2022-01-02') + .custom(() => 'doesnt-matter'), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + {constraint: 'required', flag: 'presence'}, + {constraint: '2022-01-01', flag: 'min'}, + {constraint: '2022-01-02', flag: 'max'}, + {flag: 'custom'}, + ], + }, + ]) + }) + + test('image validation rules', () => { + const type = 'someImage' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'image', + validation: (rule) => [ + rule + .required() + .assetRequired() + .custom(() => 'doesnt-matter'), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + {constraint: 'required', flag: 'presence'}, + {constraint: {assetType: 'image'}, flag: 'assetRequired'}, + {flag: 'custom'}, + ], + }, + ]) + }) + + test('file validation rules', () => { + const type = 'someFile' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'file', + validation: (rule) => [ + rule + .required() + .assetRequired() + .custom(() => 'doesnt-matter'), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + {constraint: 'required', flag: 'presence'}, + {constraint: {assetType: 'file'}, flag: 'assetRequired'}, + {flag: 'custom'}, + ], + }, + ]) + }) + + test('number validation rules', () => { + const type = 'someNumber' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'number', + validation: (rule) => [ + rule + .custom(() => 'doesnt-matter') + .required() + .min(1) + .max(2), + rule.integer().positive(), + rule.greaterThan(-4).negative(), + rule.precision(2).lessThan(5), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + {flag: 'custom'}, + {constraint: 'required', flag: 'presence'}, + {constraint: 1, flag: 'min'}, + {constraint: 2, flag: 'max'}, + ], + }, + { + level: 'error', + rules: [{constraint: 0, flag: 'min'}], + }, + { + level: 'error', + rules: [ + {constraint: -4, flag: 'greaterThan'}, + {constraint: 0, flag: 'lessThan'}, + ], + }, + { + level: 'error', + rules: [ + {constraint: 2, flag: 'precision'}, + {constraint: 5, flag: 'lessThan'}, + ], + }, + ]) + }) + + test('reference validation rules', () => { + const type = 'someRef' + const docType = 'doc' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + type: 'document', + name: docType, + fields: [ + defineField({ + type: 'string', + name: 'title', + }), + ], + }), + defineType({ + name: type, + type: 'reference', + to: [{type: docType}], + validation: (rule) => rule.required().custom(() => 'doesnt-matter'), + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [{constraint: 'required', flag: 'presence'}, {flag: 'custom'}], + }, + ]) + }) + + test('slug validation rules', () => { + const type = 'someSlug' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'slug', + validation: (rule) => rule.required().custom(() => 'doesnt-matter'), + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + { + flag: 'custom', // this is the default unique checking rule + }, + { + constraint: 'required', + flag: 'presence', + }, + { + flag: 'custom', + }, + ], + }, + ]) + }) + + test('string validation rules', () => { + const type = 'someString' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'string', + validation: (rule) => [ + rule + .required() + .max(50) + .min(5) + .length(10) + .uppercase() + .lowercase() + .regex(/a+/, 'test', {name: 'yeah', invert: true}) + .regex(/a+/, {name: 'yeah', invert: true}) + .regex(/a+/, 'test') + .regex(/a+/) + .email() + .custom(() => 'doesnt-matter'), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + {constraint: 'required', flag: 'presence'}, + {constraint: 50, flag: 'max'}, + {constraint: 5, flag: 'min'}, + {constraint: 10, flag: 'length'}, + {constraint: 'uppercase', flag: 'stringCasing'}, + {constraint: 'lowercase', flag: 'stringCasing'}, + { + constraint: { + invert: false, + name: 'test', + pattern: '/a+/', + }, + flag: 'regex', + }, + { + constraint: { + invert: true, + name: 'yeah', + pattern: '/a+/', + }, + flag: 'regex', + }, + { + constraint: { + invert: false, + name: 'test', + pattern: '/a+/', + }, + flag: 'regex', + }, + { + constraint: { + invert: false, + pattern: '/a+/', + }, + flag: 'regex', + }, + { + flag: 'custom', + }, + ], + }, + ]) + }) + + test('url validation rules', () => { + const type = 'someUrl' + const schema = createSchema({ + name: 'test', + types: [ + defineType({ + name: type, + type: 'url', + validation: (rule) => [ + rule.required().custom(() => 'doesnt-matter'), + rule.uri({scheme: 'ftp'}), + rule.uri({ + scheme: ['https'], + allowCredentials: true, + allowRelative: true, + relativeOnly: false, + }), + rule.uri({ + scheme: /^custom-protocol.*$/g, + }), + ], + }), + ], + }) + + const extracted = extractManifestSchemaTypes(schema) + const validation = extracted.find((e) => e.name === type)?.validation + expect(validation).toEqual([ + { + level: 'error', + rules: [ + { + constraint: { + options: { + allowCredentials: false, + allowRelative: false, + relativeOnly: false, + scheme: ['/^http$/', '/^https$/'], + }, + }, + flag: 'uri', + }, + { + constraint: 'required', + flag: 'presence', + }, + { + flag: 'custom', + }, + ], + }, + { + level: 'error', + rules: [ + { + constraint: { + options: { + allowCredentials: false, + allowRelative: false, + relativeOnly: false, + scheme: ['/^ftp$/'], + }, + }, + flag: 'uri', + }, + ], + }, + { + level: 'error', + rules: [ + { + constraint: { + options: { + allowCredentials: true, + allowRelative: true, + relativeOnly: false, + scheme: ['/^https$/'], + }, + }, + flag: 'uri', + }, + ], + }, + { + level: 'error', + rules: [ + { + constraint: { + options: { + allowCredentials: false, + allowRelative: false, + relativeOnly: false, + scheme: ['/^custom-protocol.*$/g'], + }, + }, + flag: 'uri', + }, + ], + }, + ]) + }) + }) +}) diff --git a/perf/efps/helpers/calculatePercentile.ts b/perf/efps/helpers/aggregateLatencies.ts similarity index 61% rename from perf/efps/helpers/calculatePercentile.ts rename to perf/efps/helpers/aggregateLatencies.ts index 54acbded118..e3f82202a77 100644 --- a/perf/efps/helpers/calculatePercentile.ts +++ b/perf/efps/helpers/aggregateLatencies.ts @@ -1,4 +1,6 @@ -export function calculatePercentile(numbers: number[], percentile: number): number { +import {type EfpsResult} from '../types' + +function calculatePercentile(numbers: number[], percentile: number): number { // Sort the array in ascending order const sorted = numbers.slice().sort((a, b) => a - b) @@ -19,3 +21,12 @@ export function calculatePercentile(numbers: number[], percentile: number): numb const fraction = index - lowerIndex return lowerValue + (upperValue - lowerValue) * fraction } + +export function aggregateLatencies(values: number[]): EfpsResult['latency'] { + return { + p50: calculatePercentile(values, 0.5), + p75: calculatePercentile(values, 0.75), + p90: calculatePercentile(values, 0.9), + p99: calculatePercentile(values, 0.99), + } +} diff --git a/perf/efps/helpers/measureBlockingTime.ts b/perf/efps/helpers/measureBlockingTime.ts new file mode 100644 index 00000000000..4208c69738f --- /dev/null +++ b/perf/efps/helpers/measureBlockingTime.ts @@ -0,0 +1,47 @@ +import {type Page} from 'playwright' + +const BLOCKING_TASK_THRESHOLD = 50 + +export function measureBlockingTime(page: Page): () => Promise { + const idleGapLengthsPromise = page.evaluate(async () => { + const idleGapLengths: number[] = [] + const done = false + let last = performance.now() + + const handler = () => { + const current = performance.now() + idleGapLengths.push(current - last) + last = current + + if (done) return + requestAnimationFrame(handler) + } + + requestAnimationFrame(handler) + + await new Promise((resolve) => { + document.addEventListener('__blockingTimeFinish', resolve, {once: true}) + }) + + return idleGapLengths + }) + + async function getBlockingTime() { + await page.evaluate(() => { + document.dispatchEvent(new CustomEvent('__blockingTimeFinish')) + }) + + const idleGapLengths = await idleGapLengthsPromise + + const blockingTime = idleGapLengths + // only consider the gap lengths that are blocking + .filter((idleGapLength) => idleGapLength > BLOCKING_TASK_THRESHOLD) + // subtract the allowed time so we're only left with blocking time + .map((idleGapLength) => idleGapLength - BLOCKING_TASK_THRESHOLD) + .reduce((sum, next) => sum + next, 0) + + return blockingTime + } + + return getBlockingTime +} diff --git a/perf/efps/helpers/measureFpsForInput.ts b/perf/efps/helpers/measureFpsForInput.ts index 4b0c584c795..22310a92fb2 100644 --- a/perf/efps/helpers/measureFpsForInput.ts +++ b/perf/efps/helpers/measureFpsForInput.ts @@ -1,9 +1,28 @@ -import {type Locator} from 'playwright' +import {type Page} from 'playwright' import {type EfpsResult} from '../types' -import {calculatePercentile} from './calculatePercentile' +import {aggregateLatencies} from './aggregateLatencies' +import {measureBlockingTime} from './measureBlockingTime' -export async function measureFpsForInput(input: Locator): Promise { +interface MeasureFpsForInputOptions { + label?: string + page: Page + fieldName: string +} + +export async function measureFpsForInput({ + label, + fieldName, + page, +}: MeasureFpsForInputOptions): Promise { + const start = Date.now() + + const input = page + .locator( + `[data-testid="field-${fieldName}"] input[type="text"], ` + + `[data-testid="field-${fieldName}"] textarea`, + ) + .first() await input.waitFor({state: 'visible'}) const characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' @@ -49,6 +68,8 @@ export async function measureFpsForInput(input: Locator): Promise { await input.pressSequentially(startingMarker) await new Promise((resolve) => setTimeout(resolve, 500)) + const getBlockingTime = measureBlockingTime(page) + for (const character of characters) { inputEvents.push({character, timestamp: Date.now()}) await input.press(character) @@ -57,6 +78,9 @@ export async function measureFpsForInput(input: Locator): Promise { await input.blur() + await page.evaluate(() => window.document.dispatchEvent(new CustomEvent('__finish'))) + + const blockingTime = await getBlockingTime() const renderEvents = await rendersPromise await new Promise((resolve) => setTimeout(resolve, 500)) @@ -74,9 +98,10 @@ export async function measureFpsForInput(input: Locator): Promise { return matchingEvent.timestamp - inputEvent.timestamp }) - const p50 = 1000 / calculatePercentile(latencies, 0.5) - const p75 = 1000 / calculatePercentile(latencies, 0.75) - const p90 = 1000 / calculatePercentile(latencies, 0.9) - - return {p50, p75, p90, latencies} + return { + latency: aggregateLatencies(latencies), + blockingTime, + label: label || fieldName, + runDuration: Date.now() - start, + } } diff --git a/perf/efps/helpers/measureFpsForPte.ts b/perf/efps/helpers/measureFpsForPte.ts index ffa233e9064..06f89786c1b 100644 --- a/perf/efps/helpers/measureFpsForPte.ts +++ b/perf/efps/helpers/measureFpsForPte.ts @@ -1,9 +1,22 @@ -import {type Locator} from 'playwright' +import {type Page} from 'playwright' import {type EfpsResult} from '../types' -import {calculatePercentile} from './calculatePercentile' +import {aggregateLatencies} from './aggregateLatencies' +import {measureBlockingTime} from './measureBlockingTime' -export async function measureFpsForPte(pteField: Locator): Promise { +interface MeasureFpsForPteOptions { + fieldName: string + label?: string + page: Page +} + +export async function measureFpsForPte({ + fieldName, + page, + label, +}: MeasureFpsForPteOptions): Promise { + const start = Date.now() + const pteField = page.locator(`[data-testid="field-${fieldName}"]`) const characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' await pteField.waitFor({state: 'visible'}) @@ -24,14 +37,14 @@ export async function measureFpsForPte(pteField: Locator): Promise { }[] = [] const mutationObserver = new MutationObserver(() => { - const start = performance.now() + const textStart = performance.now() const textContent = el.textContent || '' - const end = performance.now() + const textEnd = performance.now() updates.push({ value: textContent, timestamp: Date.now(), - textContentProcessingTime: end - start, + textContentProcessingTime: textEnd - textStart, }) }) @@ -63,6 +76,7 @@ export async function measureFpsForPte(pteField: Locator): Promise { await contentEditable.pressSequentially(startingMarker) await new Promise((resolve) => setTimeout(resolve, 500)) + const getBlockingTime = measureBlockingTime(page) for (const character of characters) { inputEvents.push({character, timestamp: Date.now()}) await contentEditable.press(character) @@ -71,6 +85,7 @@ export async function measureFpsForPte(pteField: Locator): Promise { await contentEditable.blur() + const blockingTime = await getBlockingTime() const renderEvents = await rendersPromise const latencies = inputEvents.map((inputEvent) => { @@ -86,9 +101,10 @@ export async function measureFpsForPte(pteField: Locator): Promise { return matchingEvent.timestamp - inputEvent.timestamp - matchingEvent.textContentProcessingTime }) - const p50 = 1000 / calculatePercentile(latencies, 0.5) - const p75 = 1000 / calculatePercentile(latencies, 0.75) - const p90 = 1000 / calculatePercentile(latencies, 0.9) - - return {p50, p75, p90, latencies} + return { + latency: aggregateLatencies(latencies), + blockingTime, + label: label || fieldName, + runDuration: Date.now() - start, + } } diff --git a/perf/efps/index.ts b/perf/efps/index.ts index c7cb95e7703..6e7e864e962 100644 --- a/perf/efps/index.ts +++ b/perf/efps/index.ts @@ -2,6 +2,8 @@ // eslint-disable-next-line import/no-unassigned-import import 'dotenv/config' +import fs from 'node:fs' +import os from 'node:os' import path from 'node:path' import process from 'node:process' import {fileURLToPath} from 'node:url' @@ -11,16 +13,22 @@ import chalk from 'chalk' import Table from 'cli-table3' import Ora from 'ora' -// eslint-disable-next-line import/no-extraneous-dependencies import {exec} from './helpers/exec' import {runTest} from './runTest' import article from './tests/article/article' import recipe from './tests/recipe/recipe' -import singleString from './tests/singleString/singleString' import synthetic from './tests/synthetic/synthetic' +import {type EfpsAbResult, type EfpsResult, type EfpsTest} from './types' -const headless = true -const tests = [singleString, recipe, article, synthetic] +const WARNING_THRESHOLD = 0.2 +const TEST_ATTEMPTS = process.env.CI ? 3 : 1 + +const HEADLESS = true +// eslint-disable-next-line turbo/no-undeclared-env-vars +const ENABLE_PROFILER = process.env.ENABLE_PROFILER === 'true' +// eslint-disable-next-line turbo/no-undeclared-env-vars +const REFERENCE_TAG = process.env.REFERENCE_TAG || 'latest' +const TESTS = [article, recipe, synthetic] const projectId = process.env.VITE_PERF_EFPS_PROJECT_ID! const dataset = process.env.VITE_PERF_EFPS_DATASET! @@ -52,9 +60,58 @@ const resultsDir = path.join( .toLowerCase()}`, ) +const getSanityPkgPathForTag = async (tag: string) => { + const tmpDir = path.join(os.tmpdir(), `sanity-${tag}`) + + try { + await fs.promises.rm(tmpDir, {recursive: true}) + } catch { + // intentionally blank + } + await fs.promises.mkdir(tmpDir, {recursive: true}) + + await exec({ + command: `npm install sanity@${tag}`, + cwd: tmpDir, + spinner, + text: [`Downloading sanity@${tag} package…`, `Downloaded sanity@${tag}`], + }) + + return path.join(tmpDir, 'node_modules', 'sanity') +} + +const formatEfps = (latencyMs: number) => { + const efps = 1000 / latencyMs + const rounded = efps.toFixed(1) + + if (efps >= 100) return chalk.green('99.9+') + if (efps >= 60) return chalk.green(rounded) + if (efps >= 20) return chalk.yellow(rounded) + return chalk.red(rounded) +} + +const formatPercentageChange = (experiment: number, reference: number): string => { + if (experiment < 16 && reference < 16) return '-/-%' + const delta = (experiment - reference) / reference + if (!delta) return '-/-%' + const percentage = delta * 100 + const rounded = percentage.toFixed(1) + const sign = delta >= 0 ? '+' : '' + return `${sign}${rounded}%` +} + +// For markdown formatting without colors +const formatEfpsPlain = (latencyMs: number) => { + const efps = 1000 / latencyMs + const rounded = efps.toFixed(1) + + if (efps >= 100) return '99.9+' + return rounded +} + const spinner = Ora() -spinner.info(`Running ${tests.length} tests: ${tests.map((t) => `'${t.name}'`).join(', ')}`) +spinner.info(`Running ${TESTS.length} tests: ${TESTS.map((t) => `'${t.name}'`).join(', ')}`) await exec({ text: ['Building the monorepo…', 'Built monorepo'], @@ -69,47 +126,290 @@ await exec({ spinner, }) -const table = new Table({ - head: [chalk.bold('benchmark'), 'eFPS p50', 'eFPS p75', 'eFPS p90'].map((cell) => - chalk.cyan(cell), - ), -}) +const localSanityPkgPath = path.dirname(fileURLToPath(import.meta.resolve('sanity/package.json'))) -const formatFps = (fps: number) => { - const rounded = fps.toFixed(1) - if (fps >= 60) return chalk.green(rounded) - if (fps < 20) return chalk.red(rounded) - return chalk.yellow(rounded) +const referenceSanityPkgPath = await getSanityPkgPathForTag(REFERENCE_TAG) +const experimentSanityPkgPath = localSanityPkgPath + +function mergeResults(baseResults: EfpsResult[] | undefined, incomingResults: EfpsResult[]) { + if (!baseResults) return incomingResults + + return incomingResults.map((incomingResult, index) => { + const baseResult = baseResults[index] + + const incomingMedianLatency = incomingResult.latency.p50 + const baseMedianLatency = baseResult.latency.p50 + + // if the incoming test run performed better, we'll take that one + if (incomingMedianLatency < baseMedianLatency) return incomingResult + // otherwise, use the previous run + return baseResult + }) } -for (let i = 0; i < tests.length; i++) { - const test = tests[i] - const results = await runTest({ - prefix: `Running '${test.name}' [${i + 1}/${tests.length}]…`, - test, - resultsDir, - spinner, - client, - headless, - projectId, +const testResults: Array<{ + name: string + results: EfpsAbResult[] +}> = [] + +async function runAbTest(test: EfpsTest) { + let referenceResults: EfpsResult[] | undefined + let experimentResults: EfpsResult[] | undefined + + for (let attempt = 0; attempt < TEST_ATTEMPTS; attempt++) { + const attemptMessage = TEST_ATTEMPTS > 1 ? ` [${attempt + 1}/${TEST_ATTEMPTS}]` : '' + const referenceMessage = `Running test '${test.name}' on \`sanity@${REFERENCE_TAG}\`${attemptMessage}` + spinner.start(referenceMessage) + + referenceResults = mergeResults( + referenceResults, + await runTest({ + key: 'reference', + test, + resultsDir, + client, + headless: HEADLESS, + enableProfiler: ENABLE_PROFILER, + projectId, + sanityPkgPath: referenceSanityPkgPath, + log: (message) => { + spinner.text = `${referenceMessage}: ${message}` + }, + }), + ) + spinner.succeed(`Ran test '${test.name}' on \`sanity@${REFERENCE_TAG}\`${attemptMessage}`) + + const experimentMessage = `Running test '${test.name}' on this branch${attemptMessage}` + spinner.start(experimentMessage) + experimentResults = mergeResults( + experimentResults, + await runTest({ + key: 'experiment', + test, + resultsDir, + client, + headless: HEADLESS, + enableProfiler: ENABLE_PROFILER, + projectId, + sanityPkgPath: experimentSanityPkgPath, + log: (message) => { + spinner.text = `${experimentMessage}: ${message}` + }, + }), + ) + spinner.succeed(`Ran test '${test.name}' on this branch${attemptMessage}`) + } + + return experimentResults!.map( + (experimentResult, index): EfpsAbResult => ({ + experiment: experimentResult, + reference: referenceResults![index], + }), + ) +} + +for (let i = 0; i < TESTS.length; i++) { + const test = TESTS[i] + testResults.push({ + name: test.name, + results: await runAbTest(test), }) +} + +const comparisonTableCli = new Table({ + head: ['Benchmark', 'reference', 'experiment', 'Δ (%)', ''].map((cell) => chalk.cyan(cell)), +}) + +const detailedInformationCliHead = [ + 'Benchmark', + 'latency', + 'p75', + 'p90', + 'p99', + 'blocking time', + 'test duration', +].map((i) => chalk.cyan(i)) - for (const result of results) { - table.push({ - [[chalk.bold(test.name), result.label ? `(${result.label})` : ''].join(' ')]: [ - formatFps(result.p50), - formatFps(result.p75), - formatFps(result.p90), - ], - }) +const referenceTableCli = new Table({head: detailedInformationCliHead}) +const experimentTableCli = new Table({head: detailedInformationCliHead}) + +function isSignificantlyDifferent(experiment: number, reference: number) { + // values are too small to and are already performing well + if (experiment < 16 && reference < 16) return false + const delta = (experiment - reference) / reference + return delta >= WARNING_THRESHOLD +} + +for (const {name, results} of testResults) { + for (const {experiment, reference} of results) { + const significantlyDifferent = isSignificantlyDifferent( + experiment.latency.p50, + reference.latency.p50, + ) + + const sign = experiment.latency.p50 >= reference.latency.p50 ? '+' : '' + const msDifference = `${sign}${(experiment.latency.p50 - reference.latency.p50).toFixed(0)}ms` + const percentageChange = formatPercentageChange(experiment.latency.p50, reference.latency.p50) + + const benchmarkName = `${name} (${experiment.label})` + + comparisonTableCli.push([ + benchmarkName, + `${formatEfps(reference.latency.p50)} efps (${reference.latency.p50.toFixed(0)}ms)`, + `${formatEfps(experiment.latency.p50)} efps (${experiment.latency.p50.toFixed(0)}ms)`, + `${significantlyDifferent ? chalk.red(msDifference) : msDifference} (${percentageChange})`, + significantlyDifferent ? '🔴' : '✅', + ]) + + referenceTableCli.push([ + benchmarkName, + `${reference.latency.p50.toFixed(0)}ms`, + `${reference.latency.p75.toFixed(0)}ms`, + `${reference.latency.p90.toFixed(0)}ms`, + `${reference.latency.p99.toFixed(0)}ms`, + `${reference.blockingTime.toFixed(0)}ms`, + `${(reference.runDuration / 1000).toFixed(1)}s`, + ]) + + experimentTableCli.push([ + benchmarkName, + `${experiment.latency.p50.toFixed(0)}ms`, + `${experiment.latency.p75.toFixed(0)}ms`, + `${experiment.latency.p90.toFixed(0)}ms`, + `${experiment.latency.p99.toFixed(0)}ms`, + `${experiment.blockingTime.toFixed(0)}ms`, + `${(experiment.runDuration / 1000).toFixed(1)}s`, + ]) } } -console.log(table.toString()) -console.log(` +console.log() +console.log('Reference vs experiment') +console.log(comparisonTableCli.toString()) +console.log() +console.log('Reference result') +console.log(referenceTableCli.toString()) +console.log() +console.log('Experiment result') +console.log(experimentTableCli.toString()) + +let comparisonTable = ` +| Benchmark | reference
latency of \`sanity@${REFERENCE_TAG}\` | experiment
latency of this branch | Δ (%)
latency difference | | +| :-- | :-- | :-- | :-- | --- | +` + +const detailedInformationHeader = ` +| Benchmark | latency | p75 | p90 | p99 | blocking time | test duration | +| --------- | ------: | --: | --: | --: | ------------: | ------------: | +` + +let referenceTable = detailedInformationHeader +let experimentTable = detailedInformationHeader + +for (const {name, results} of testResults) { + for (const {experiment, reference} of results) { + const significantlyDifferent = isSignificantlyDifferent( + experiment.latency.p50, + reference.latency.p50, + ) + + const sign = experiment.latency.p50 >= reference.latency.p50 ? '+' : '' + const msDifference = `${sign}${(experiment.latency.p50 - reference.latency.p50).toFixed(0)}ms` + const percentageChange = formatPercentageChange(experiment.latency.p50, reference.latency.p50) + + const benchmarkName = `${name} (${experiment.label})` + + comparisonTable += + // benchmark name + `| ${benchmarkName} ` + + // reference latency + `| ${formatEfpsPlain(reference.latency.p50)} efps (${reference.latency.p50.toFixed(0)}ms) ` + + // experiment latency + `| ${formatEfpsPlain(experiment.latency.p50)} efps (${experiment.latency.p50.toFixed(0)}ms) ` + + // difference + `| ${msDifference} (${percentageChange}) ` + + // status + `| ${significantlyDifferent ? '🔴' : '✅'} ` + + `|\n` + + referenceTable += + // benchmark name + `| ${benchmarkName} ` + + // latency + `| ${reference.latency.p50.toFixed(0)}ms ` + + // p75 + `| ${reference.latency.p75.toFixed(0)}ms ` + + // p90 + `| ${reference.latency.p90.toFixed(0)}ms ` + + // p99 + `| ${reference.latency.p99.toFixed(0)}ms ` + + // blocking time + `| ${reference.blockingTime.toFixed(0)}ms ` + + // test duration + `| ${(reference.runDuration / 1000).toFixed(1)}s ` + + `|\n` + + experimentTable += + // benchmark name + `| ${benchmarkName} ` + + // latency + `| ${experiment.latency.p50.toFixed(0)}ms ` + + // p75 + `| ${experiment.latency.p75.toFixed(0)}ms ` + + // p90 + `| ${experiment.latency.p90.toFixed(0)}ms ` + + // p99 + `| ${experiment.latency.p99.toFixed(0)}ms ` + + // blocking time + `| ${experiment.blockingTime.toFixed(0)}ms ` + + // test duration + `| ${(experiment.runDuration / 1000).toFixed(1)}s ` + + `|\n` + } +} + +const markdown = `### ⚡️ Editor Performance Report + +Updated ${new Date().toUTCString()} + +${comparisonTable} + +> **efps** — editor "frames per second". The number of updates assumed to be possible within a second. +> +> Derived from input latency. \`efps = 1000 / input_latency\` + +
+ +Detailed information + +### 🏠 Reference result + +The performance result of \`sanity@${REFERENCE_TAG}\` + + +${referenceTable} + +### 🧪 Experiment result + +The performance result of this branch + +${experimentTable} + +### 📚 Glossary + +> #### column definitions +> +> - **benchmark** — the name of the test, e.g. "article", followed by the label of the field being measured, e.g. "(title)". +> - **latency** — the time between when a key was pressed and when it was rendered. derived from a set of samples. the median (p50) is shown to show the most common latency. +> - **p75** — the 75th percentile of the input latency in the test run. 75% of the sampled inputs in this benchmark were processed faster than this value. this provides insight into the upper range of typical performance. +> - **p90** — the 90th percentile of the input latency in the test run. 90% of the sampled inputs were faster than this. this metric helps identify slower interactions that occurred less frequently during the benchmark. +> - **p99** — the 99th percentile of the input latency in the test run. only 1% of sampled inputs were slower than this. this represents the worst-case scenarios encountered during the benchmark, useful for identifying potential performance outliers. +> - **blocking time** — the total time during which the main thread was blocked, preventing user input and UI updates. this metric helps identify performance bottlenecks that may cause the interface to feel unresponsive. +> - **test duration** — how long the test run took to complete. + +
+` -│ ${chalk.bold('eFPS — editor "Frames Per Second"')} -│ -│ The number of renders ("frames") that is assumed to be possible -│ within a second. Derived from input latency. ${chalk.green('Higher')} is better. -`) +// Write markdown file to root of results +const markdownOutputPath = path.join(workspaceDir, 'results', 'benchmark-results.md') +await fs.promises.writeFile(markdownOutputPath, markdown) diff --git a/perf/efps/runTest.ts b/perf/efps/runTest.ts index 201487686df..1f93a34f9fc 100644 --- a/perf/efps/runTest.ts +++ b/perf/efps/runTest.ts @@ -5,7 +5,6 @@ import {fileURLToPath} from 'node:url' import {type SanityClient} from '@sanity/client' import react from '@vitejs/plugin-react' -import {type Ora} from 'ora' import {chromium} from 'playwright' import sourcemaps from 'rollup-plugin-sourcemaps' import handler from 'serve-handler' @@ -17,43 +16,44 @@ import {type EfpsResult, type EfpsTest, type EfpsTestRunnerContext} from './type const workspaceDir = path.dirname(fileURLToPath(import.meta.url)) interface RunTestOptions { - prefix: string test: EfpsTest resultsDir: string - spinner: Ora projectId: string headless: boolean client: SanityClient + sanityPkgPath: string + key: string + enableProfiler: boolean + log: (text: string) => void } export async function runTest({ - prefix, test, resultsDir, - spinner, projectId, headless, client, + sanityPkgPath, + key, + enableProfiler, + log, }: RunTestOptions): Promise { - const log = (text: string) => { - spinner.text = `${prefix}\n └ ${text}` - } - - spinner.start(prefix) - - const outDir = path.join(workspaceDir, 'builds', test.name) - const testResultsDir = path.join(resultsDir, test.name) + const outDir = path.join(workspaceDir, 'builds', test.name, key) + const testResultsDir = path.join(resultsDir, test.name, key) await fs.promises.mkdir(outDir, {recursive: true}) log('Building…') + const alias: Record = { + '#config': fileURLToPath(test.configPath!), + 'sanity': sanityPkgPath, + } + await vite.build({ appType: 'spa', build: {outDir, sourcemap: true}, plugins: [{...sourcemaps(), enforce: 'pre'}, react()], - resolve: { - alias: {'#config': fileURLToPath(test.configPath!)}, - }, + resolve: {alias}, logLevel: 'silent', }) @@ -103,17 +103,19 @@ export async function runTest({ typeof test.document === 'function' ? await test.document(runnerContext) : test.document document = await client.create(documentToCreate) - const cdp = await context.newCDPSession(page) + const cdp = enableProfiler ? await context.newCDPSession(page) : null log('Loading editor…') await page.goto( - `http://localhost:3300/intent/edit/id=${encodeURIComponent(document._id)};type=${encodeURIComponent( - documentToCreate._type, - )}`, + `http://localhost:3300/intent/edit/id=${encodeURIComponent( + document._id, + )};type=${encodeURIComponent(documentToCreate._type)}`, ) - await cdp.send('Profiler.enable') - await cdp.send('Profiler.start') + if (cdp) { + await cdp.send('Profiler.enable') + await cdp.send('Profiler.start') + } log('Benchmarking…') const result = await test.run({...runnerContext, document}) @@ -121,24 +123,24 @@ export async function runTest({ log('Saving results…') const results = Array.isArray(result) ? result : [result] - const {profile} = await cdp.send('Profiler.stop') - const remappedProfile = await remapCpuProfile(profile, outDir) - await fs.promises.mkdir(testResultsDir, {recursive: true}) await fs.promises.writeFile( path.join(testResultsDir, 'results.json'), JSON.stringify(results, null, 2), ) - await fs.promises.writeFile( - path.join(testResultsDir, 'raw.cpuprofile'), - JSON.stringify(profile), - ) - await fs.promises.writeFile( - path.join(testResultsDir, 'mapped.cpuprofile'), - JSON.stringify(remappedProfile), - ) - spinner.succeed(`Ran benchmark '${test.name}'`) + if (cdp) { + const {profile} = await cdp.send('Profiler.stop') + await fs.promises.writeFile( + path.join(testResultsDir, 'raw.cpuprofile'), + JSON.stringify(profile), + ) + const remappedProfile = await remapCpuProfile(profile, outDir) + await fs.promises.writeFile( + path.join(testResultsDir, 'mapped.cpuprofile'), + JSON.stringify(remappedProfile), + ) + } return results } finally { diff --git a/perf/efps/tests/article/article.ts b/perf/efps/tests/article/article.ts index f99cdde3b3f..25d88082d88 100644 --- a/perf/efps/tests/article/article.ts +++ b/perf/efps/tests/article/article.ts @@ -140,30 +140,10 @@ export default defineEfpsTest({ return document }, - run: async ({page}) => { - return [ - { - label: 'title', - ...(await measureFpsForInput( - page.locator('[data-testid="field-title"] input[type="text"]'), - )), - }, - { - label: 'body', - ...(await measureFpsForPte(page.locator('[data-testid="field-body"]'))), - }, - { - label: 'string in object', - ...(await measureFpsForInput( - page.locator('[data-testid="field-seo.metaTitle"] input[type="text"]'), - )), - }, - { - label: 'string in array', - ...(await measureFpsForInput( - page.locator('[data-testid="field-tags"] input[type="text"]').first(), - )), - }, - ] - }, + run: async ({page}) => [ + await measureFpsForInput({page, fieldName: 'title'}), + await measureFpsForPte({page, fieldName: 'body'}), + await measureFpsForInput({page, fieldName: 'seo.metaTitle', label: 'string inside object'}), + await measureFpsForInput({page, fieldName: 'tags', label: 'string inside array'}), + ], }) diff --git a/perf/efps/tests/recipe/recipe.ts b/perf/efps/tests/recipe/recipe.ts index 3c9f8e4c230..9b6fc523076 100644 --- a/perf/efps/tests/recipe/recipe.ts +++ b/perf/efps/tests/recipe/recipe.ts @@ -160,22 +160,9 @@ export default defineEfpsTest({ return recipe }, - run: async ({page}) => { - return [ - { - label: 'name', - ...(await measureFpsForInput( - page.locator('[data-testid="field-name"] input[type="text"]'), - )), - }, - { - label: 'description', - ...(await measureFpsForInput(page.locator('[data-testid="field-description"] textarea'))), - }, - { - label: 'instructions', - ...(await measureFpsForPte(page.locator('[data-testid="field-instructions"]'))), - }, - ] - }, + run: async ({page}) => [ + await measureFpsForInput({page, fieldName: 'name'}), + await measureFpsForInput({page, fieldName: 'description'}), + await measureFpsForPte({page, fieldName: 'instructions'}), + ], }) diff --git a/perf/efps/tests/synthetic/synthetic.ts b/perf/efps/tests/synthetic/synthetic.ts index 2ed731428b8..e0cdc573180 100644 --- a/perf/efps/tests/synthetic/synthetic.ts +++ b/perf/efps/tests/synthetic/synthetic.ts @@ -120,20 +120,12 @@ export default defineEfpsTest({ return synthetic }, - run: async ({page}) => { - return [ - { - label: 'title', - ...(await measureFpsForInput( - page.locator('[data-testid="field-title"] input[type="text"]'), - )), - }, - { - label: 'string in object', - ...(await measureFpsForInput( - page.locator('[data-testid="field-syntheticObject.name"] input[type="text"]'), - )), - }, - ] - }, + run: async ({page}) => [ + await measureFpsForInput({page, fieldName: 'title'}), + await measureFpsForInput({ + page, + fieldName: 'syntheticObject.name', + label: 'string inside object', + }), + ], }) diff --git a/perf/efps/types.ts b/perf/efps/types.ts index 910932926e3..835c64c8bde 100644 --- a/perf/efps/types.ts +++ b/perf/efps/types.ts @@ -12,18 +12,25 @@ export interface EfpsTest { name: string configPath: string | undefined document: SanityDocumentStub | ((context: EfpsTestRunnerContext) => Promise) - run: (context: EfpsTestRunnerContext & {document: SanityDocument}) => Promise + run: (context: EfpsTestRunnerContext & {document: SanityDocument}) => Promise } export interface EfpsResult { - label?: string - p50: number - p75: number - p90: number - latencies: number[] + label: string + runDuration: number + blockingTime: number + latency: { + p50: number + p75: number + p90: number + p99: number + } } -export type EfpsTestResult = EfpsResult | EfpsResult[] +export interface EfpsAbResult { + experiment: EfpsResult + reference: EfpsResult +} export function defineEfpsTest(config: EfpsTest): EfpsTest { return config diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ed581bbd560..b86b43dcb96 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -66,9 +66,6 @@ importers: '@sanity/uuid': specifier: ^3.0.2 version: 3.0.2 - '@types/glob': - specifier: ^7.2.0 - version: 7.2.0 '@types/lodash': specifier: ^4.17.7 version: 4.17.7 @@ -163,8 +160,8 @@ importers: specifier: ^2.0.0 version: 2.1.0 glob: - specifier: ^7.2.0 - version: 7.2.3 + specifier: ^10.4.0 + version: 10.4.5 globby: specifier: ^10.0.0 version: 10.0.2 @@ -7501,7 +7498,7 @@ packages: resolution: {integrity: sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==} engines: {node: '>= 4.0'} os: [darwin] - deprecated: Upgrade to fsevents v2 to mitigate potential security issues + deprecated: The v1 package contains DANGEROUS / INSECURE binaries. Upgrade to safe fsevents v2 fsevents@2.3.2: resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} diff --git a/scripts/normalizeDependencyVersions.ts b/scripts/normalizeDependencyVersions.ts index 48c7508d193..994021de9bf 100644 --- a/scripts/normalizeDependencyVersions.ts +++ b/scripts/normalizeDependencyVersions.ts @@ -3,7 +3,7 @@ import fs from 'node:fs' import path from 'node:path' import chalk from 'chalk' -import glob from 'glob' +import {globSync} from 'glob' import semver from 'semver' interface LernaConfig { @@ -42,7 +42,7 @@ const sortRanges = (ranges: string[]) => const patterns = config.packages.map((pkg) => path.join(pkg, 'package.json')) const pkgs = patterns - .flatMap((pattern) => glob.sync(pattern)) + .flatMap((pattern) => globSync(pattern)) .map((file) => path.join(rootPath, file)) .map((file) => ({contents: fs.readFileSync(file, 'utf8'), file})) .map(({contents, file}) => ({file, pkg: JSON.parse(contents)})) diff --git a/scripts/utils/getPackagePaths.ts b/scripts/utils/getPackagePaths.ts index b99bd2196f2..cb7db0a4eea 100644 --- a/scripts/utils/getPackagePaths.ts +++ b/scripts/utils/getPackagePaths.ts @@ -2,7 +2,7 @@ import fs from 'node:fs' import path from 'node:path' -import glob from 'glob' +import {globSync} from 'glob' interface LernaConfig { packages: string[] @@ -22,7 +22,7 @@ const patterns = config.packages.map((pkg) => path.join(pkg, 'package.json')) * @internal */ export function getManifestPaths(): string[] { - return patterns.flatMap((pattern) => glob.sync(pattern)) + return patterns.flatMap((pattern) => globSync(pattern)) } /**