diff --git a/cocos/asset/asset-manager/config.ts b/cocos/asset/asset-manager/config.ts index 1a4a0899332..88d32f0a44c 100644 --- a/cocos/asset/asset-manager/config.ts +++ b/cocos/asset/asset-manager/config.ts @@ -253,7 +253,7 @@ export default class Config { this._initUuid(options.uuids); this._initPath(options.paths); this._initScene(options.scenes); - this._initPackage(options.packs); + this._initPackage(options.packs, options.extensionMap); this._initVersion(options.versions); this._initRedirect(options.redirect); for (const ext in options.extensionMap) { @@ -382,12 +382,20 @@ export default class Config { } } - private _initPackage (packageList: Record): void { + private _initPackage (packageList: Record, extensionMap: IConfigOption['extensionMap']): void { if (!packageList) { return; } const assetInfos = this.assetInfos; for (const packUuid in packageList) { const uuids = packageList[packUuid]; - const pack = { uuid: packUuid, packedUuids: uuids, ext: '.json' }; + let mappedExtension = '.json'; + for (const ext in extensionMap) { + const mappedUUIDs = extensionMap[ext]; + if (mappedUUIDs.includes(packUuid)) { + mappedExtension = ext; + break; + } + } + const pack = { uuid: packUuid, packedUuids: uuids, ext: mappedExtension }; assetInfos.add(packUuid, pack); for (let i = 0, l = uuids.length; i < l; i++) { diff --git a/cocos/asset/asset-manager/pack-manager.ts b/cocos/asset/asset-manager/pack-manager.ts index 42ffa79eef3..28885e02e9f 100644 --- a/cocos/asset/asset-manager/pack-manager.ts +++ b/cocos/asset/asset-manager/pack-manager.ts @@ -24,7 +24,7 @@ import { ImageAsset } from '../assets/image-asset'; import { Texture2D } from '../assets/texture-2d'; -import { packCustomObjData, unpackJSONs } from '../../serialization/deserialize'; +import { isGeneralPurposePack, packCustomObjData, unpackJSONs } from '../../serialization/deserialize'; import { assertIsTrue, error, errorID, js } from '../../core'; import Cache from './cache'; import downloader from './downloader'; @@ -56,6 +56,7 @@ export class PackManager { private _loading = new Cache(); private _unpackers: Record = { '.json': this.unpackJson, + '.ccon': this.unpackJson, }; /** @@ -79,7 +80,7 @@ export class PackManager { * */ public unpackJson ( - pack: string[], + pack: readonly string[], json: any, options: Record, onComplete: ((err: Error | null, data?: Record | null) => void), @@ -87,14 +88,14 @@ export class PackManager { const out: Record = js.createMap(true); let err: Error | null = null; - if (Array.isArray(json)) { - json = unpackJSONs(json as unknown as Parameters[0]); + if (isGeneralPurposePack(json)) { + const unpacked = unpackJSONs(json); - if (json.length !== pack.length) { + if (unpacked.length !== pack.length) { errorID(4915); } for (let i = 0; i < pack.length; i++) { - out[`${pack[i]}@import`] = json[i]; + out[`${pack[i]}@import`] = unpacked[i]; } } else { const textureType = js.getClassId(Texture2D); diff --git a/cocos/serialization/compiled/typed-array.ts b/cocos/serialization/compiled/typed-array.ts new file mode 100644 index 00000000000..fdec8eea2aa --- /dev/null +++ b/cocos/serialization/compiled/typed-array.ts @@ -0,0 +1,129 @@ +import { assertIsTrue, sys } from '../../core'; +import { IRuntimeFileData } from '../deserialize'; + +assertIsTrue(sys.isLittleEndian, `Deserialization system currently suppose little endian.`); + +export const typedArrayTypeTable = Object.freeze([ + Float32Array, + Float64Array, + + Int8Array, + Int16Array, + Int32Array, + + Uint8Array, + Uint16Array, + Uint32Array, + + Uint8ClampedArray, + // BigInt64Array, + // BigUint64Array, +] as const); + +/** + * Describes the serialized data of an typed array. + * - If it's an array, it's `TypedArrayDataJson`. + * - Otherwise, it's `TypedArrayDataPtr`. + */ +export type TypedArrayData = TypedArrayDataJson | TypedArrayDataPtr; + +export type TypedArrayDataJson = [ + /** + * Indicates the constructor of typed array. + * It's index of the constructor in `TypedArrays`. + */ + typeIndex: number, + + /** + * Array element values. + */ + elements: number[], +]; + +/** + * Let `offset` be this value, + * Let `storage` be the binary buffer attached to the deserialized document. + * Then, the data of `storage` started from `offset` + * can be described using the following structure(in C++, assuming fields are packed tightly): + * + * ```cpp + * struct _ { + * /// Indicates the constructor of typed array. + * /// It's index of the constructor in `typedArrayTypeTable`. + * std::uint32_t typeIndex; + * + * /// The typed array's element count. Note this is not "byte length". + * std:: uint32_t length; + * + * /// Automatically padding bytes to align the `arrayBufferBytes`. + * /// See comments on `arrayBufferBytes`. + * std::byte[] _padding; + * + * /// Bytes of the underlying `ArrayBuffer` of this typed array. + * /// Should be aligned to `typedArrayConstructor.BYTES_PER_ELEMENT` + * /// according to https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/TypedArray#bytelength_must_be_aligned. + * std::byte[] arrayBufferBytes; + * } + * ``` + */ +export type TypedArrayDataPtr = number; + +// eslint-disable-next-line @typescript-eslint/explicit-function-return-type +function getTypedArrayConstructor (typeIndex: number) { + assertIsTrue(typeIndex >= 0 && typeIndex < typedArrayTypeTable.length); + return typedArrayTypeTable[typeIndex]; +} + +function calculatePaddingToAlignAs (v: number, align: number): number { + if (align === 0) { + return 0; + } + const remainder = v % align; + if (remainder !== 0) { + return align - remainder; + } + return 0; +} + +function decodeTypedArray (data: IRuntimeFileData, value: TypedArrayData): ArrayBufferView { + if (Array.isArray(value)) { + const [typeIndex, elements] = value; + const TypedArrayConstructor = getTypedArrayConstructor(typeIndex); + return new TypedArrayConstructor(elements); + } else { + const context = data[0]; + const attachedBinary = context._attachedBinary; + assertIsTrue(attachedBinary, `Incorrect data: binary is expected.`); + const dataView = (context._attachedBinaryDataViewCache + ??= new DataView(attachedBinary.buffer, attachedBinary.byteOffset, attachedBinary.byteLength)); + + let p = value; + const header = dataView.getUint8(p); + p += 1; + const length = dataView.getUint32(p, true); + p += 4; + + const typeIndex = header & 0xFF; + const TypedArrayConstructor = getTypedArrayConstructor(typeIndex); + + // The elements must be padded. + p += calculatePaddingToAlignAs(p + attachedBinary.byteOffset, TypedArrayConstructor.BYTES_PER_ELEMENT); + + // Copy the section: + // - Allocates the result. + // - Creates a view on big buffer. + // - Copy using `TypedArray.prototype.set`. + // This manner do not consider the endianness problem. + // + // Here listed the benchmark in various other ways: + // https://jsperf.app/vayeri/2/preview + // + const result = new TypedArrayConstructor(length); + result.set(new TypedArrayConstructor(attachedBinary.buffer, attachedBinary.byteOffset + p, length)); + return result; + } +} + +export function deserializeTypedArray (data: IRuntimeFileData, owner: any, key: string, value: TypedArrayData): void { + owner[key] = decodeTypedArray(data, value); +} diff --git a/cocos/serialization/deserialize.ts b/cocos/serialization/deserialize.ts index 8f88ae4091a..14a79642325 100644 --- a/cocos/serialization/deserialize.ts +++ b/cocos/serialization/deserialize.ts @@ -28,7 +28,9 @@ import { cclegacy, errorID, getError, js, assertIsTrue } from '../core'; import { deserializeDynamic, DeserializeDynamicOptions, parseUuidDependenciesDynamic } from './deserialize-dynamic'; import { Asset } from '../asset/assets/asset'; +import { CCON } from './ccon'; import type { CompiledDeserializeFn } from './deserialize-dynamic'; +import { deserializeTypedArray, TypedArrayData } from './compiled/typed-array'; import { reportMissingClass as defaultReportMissingClass } from './report-missing-class'; @@ -124,6 +126,12 @@ const enum DataTypeID { // Common TypedArray for legacyCC.Node only. Never be null. TRS, + // From the point of view of simplified implementation, + // it is not supported to deserialize TypedArray that is initialized to null in the constructor. + // Also, the length of TypedArray cannot be changed. + // Developers will rarely manually assign a null. + TypedArray, + // ValueType without default value (in arrays, dictionaries). // Developers will rarely manually assign a null. ValueType, @@ -158,6 +166,7 @@ interface DataTypes { [DataTypeID.ValueTypeCreated]: IValueTypeData; [DataTypeID.AssetRefByInnerObj]: number; [DataTypeID.TRS]: ITRSData; + [DataTypeID.TypedArray]: TypedArrayData; [DataTypeID.ValueType]: IValueTypeData; [DataTypeID.Array_Class]: DataTypes[DataTypeID.Class][]; [DataTypeID.CustomizedClass]: ICustomObjectData; @@ -297,6 +306,7 @@ export declare namespace deserialize.Internal { export type ITRSData_ = ITRSData; export type IDictData_ = IDictData; export type IArrayData_ = IArrayData; + export type ITypedArrayData_ = TypedArrayData; } const enum Refs { @@ -339,6 +349,9 @@ const enum File { DependUuidIndices, ARRAY_LENGTH, + + RUNTIME_BEGIN = ARRAY_LENGTH, + BinaryStorage_runtime = RUNTIME_BEGIN, } // Main file structure @@ -379,6 +392,11 @@ type IFileData = MapEnum<{ type IRuntimeFileDataMap = Omit & { [File.Context]: FileInfo & DeserializeContext; + + /** + * The binary storage attached to this document. + */ + [File.BinaryStorage_runtime]: Uint8Array | undefined; } /** @@ -386,7 +404,7 @@ type IRuntimeFileDataMap = Omit & { */ export type IRuntimeFileData = MapEnum<{ [x in keyof IRuntimeFileDataMap as `${x}`]: IRuntimeFileDataMap[x]; -}, 11 /* Currently we should manually specify the enumerators count. */>; +}, 12 /* Currently we should manually specify the enumerators count. */>; type IDeserializeInput = IFileData | IRuntimeFileData; @@ -394,8 +412,17 @@ type ISharedData = TupleSlice; type IPackedFileSection = [ ...document: TupleSlice, + + /** + * This section's binary storage span into packed binary buffer. + * Or `undefined`(the section array has no such element) + * if this section does not have an associated binary storage. + */ + binaryStorage: [byteOffset: number, byteLength: number] | undefined, ]; +const PACKED_SECTION_BINARY_STORAGE_INDEX = 6; + const PACKED_SECTIONS = File.Instances; type IPackedFileData = [ @@ -423,6 +450,8 @@ type ClassFinder = deserialize.ClassFinder; interface DeserializeContext extends ICustomHandler { _version?: number; + _attachedBinary?: Uint8Array; + _attachedBinaryDataViewCache?: DataView; } interface IOptions extends Partial { @@ -723,6 +752,7 @@ ASSIGNMENTS[DataTypeID.Class] = parseClass; ASSIGNMENTS[DataTypeID.ValueTypeCreated] = deserializeBuiltinValueTypeInto; ASSIGNMENTS[DataTypeID.AssetRefByInnerObj] = parseAssetRefByInnerObj; ASSIGNMENTS[DataTypeID.TRS] = parseTRS; +ASSIGNMENTS[DataTypeID.TypedArray] = deserializeTypedArray; ASSIGNMENTS[DataTypeID.ValueType] = deserializeBuiltinValueType; ASSIGNMENTS[DataTypeID.Array_Class] = genArrayParser(parseClass); ASSIGNMENTS[DataTypeID.CustomizedClass] = parseCustomClass; @@ -891,6 +921,11 @@ export function isCompiledJson (json: unknown): boolean { const version = json[0]; // array[0] will not be a number in the editor version return typeof version === 'number' || version instanceof FileInfo; + } else if (json instanceof CCON) { + // This is a very verbose check. + // Make sure we won't ran in infinite loop due to data error. + assertIsTrue(!(json.document instanceof CCON)); + return isCompiledJson(json.document); } else { return false; } @@ -898,6 +933,7 @@ export function isCompiledJson (json: unknown): boolean { function initializeDeserializationContext( data: IDeserializeInput, + attachedBinary: Uint8Array | undefined, details: Details, options?: IOptions & DeserializeDynamicOptions, ) { @@ -918,6 +954,7 @@ function initializeDeserializationContext( const context = options as IRuntimeFileData[File.Context]; context._version = version; context.result = details; + context._attachedBinary = attachedBinary; data[File.Context] = context; if (!preprocessed) { @@ -935,7 +972,7 @@ function initializeDeserializationContext( * @param options Deserialization Options. * @return The original object. */ -export function deserialize (data: IDeserializeInput | string | any, details?: Details, options?: IOptions & DeserializeDynamicOptions): unknown { +export function deserialize (data: IDeserializeInput | string | CCON | any, details?: Details, options?: IOptions & DeserializeDynamicOptions): unknown { if (typeof data === 'string') { data = JSON.parse(data); } @@ -953,13 +990,27 @@ export function deserialize (data: IDeserializeInput | string | any, details?: D if (!FORCE_COMPILED && !isCompiledJson(data)) { res = deserializeDynamic(data, details, options); } else { + let input: IDeserializeInput; + let binary: Uint8Array | undefined = undefined; + if (data instanceof CCON) { + input = data.document as IDeserializeInput; + // Currently, a ccon should have only one chunk at most. + assertIsTrue(data.chunks.length === 1); + binary = data.chunks[0]; + } else { + input = data as IDeserializeInput; + binary = input[File.BinaryStorage_runtime]; + } + initializeDeserializationContext( - data, + input, + binary, details, options, ); - const runtimeData = data as IRuntimeFileData; + const runtimeData = input as IRuntimeFileData; + const context = runtimeData[File.Context]; cclegacy.game._isCloning = true; const instances = runtimeData[File.Instances]; @@ -973,6 +1024,12 @@ export function deserialize (data: IDeserializeInput | string | any, details?: D parseResult(runtimeData); res = instances[rootIndex]; + + // Clean up our injections. + { + context._attachedBinary = undefined; + context._attachedBinaryDataViewCache = undefined; + } } if (isBorrowedDetails) { @@ -1005,8 +1062,33 @@ class FileInfo { } } +export type GeneralPurposePack = IPackedFileData | CCON; + +/** + * Decides if the pack is a general-purpose pack. + * If true, it should be unpacked through `unpackJSONs`. + * @param data Pack data. + * @returns Unpacked contents, that's, a list of serialized objects. + */ +export function isGeneralPurposePack(data: unknown): data is GeneralPurposePack { + return Array.isArray(data) || data instanceof CCON; +} + export function unpackJSONs ( - data: IPackedFileData, classFinder?: ClassFinder, reportMissingClass?: deserialize.ReportMissingClass): IDeserializeInput[] { + input: GeneralPurposePack, + classFinder?: ClassFinder, + reportMissingClass?: deserialize.ReportMissingClass, +): IDeserializeInput[] { + let data: IPackedFileData; + let binaryChunk: Uint8Array | undefined = undefined; + if (input instanceof CCON) { + data = input.document as IPackedFileData; + assertIsTrue(input.chunks.length <= 1); + binaryChunk = input.chunks[0]; + } else { + data = input; + } + if (data[File.Version] < SUPPORT_MIN_FORMAT_VERSION) { throw new Error(getError(5304, data[File.Version])); } @@ -1022,7 +1104,23 @@ export function unpackJSONs ( const sections = data[PACKED_SECTIONS]; for (let i = 0; i < sections.length; ++i) { const section = sections[i]; + const binaryStorageSpan = section[PACKED_SECTION_BINARY_STORAGE_INDEX]; (section as any[]).unshift(version, sharedUuids, sharedStrings, sharedClasses, sharedMasks); + if (typeof binaryStorageSpan !== 'undefined') { + if (!binaryChunk) { + // Bad data: there's section requiring binary storage but the incoming data didn't provide one. + throw new Error(`Bad data: there's section requiring binary storage but the incoming data didn't provide one`); + } + + const [byteOffset, byteLength] = binaryStorageSpan; + + // Note: we do copy here. + // The reason is, if we don't copy instead of directly reference, + // the reference prevents the `binaryChunk` from being gc. + const sliceStart = binaryChunk.byteOffset + byteOffset; + const copy = binaryChunk.buffer.slice(sliceStart, sliceStart + byteLength); + (section as unknown as IRuntimeFileData)[File.BinaryStorage_runtime] = new Uint8Array(copy); + } } return sections as unknown as IDeserializeInput[]; } @@ -1134,6 +1232,7 @@ if (TEST) { CustomizedClass: DataTypeID.CustomizedClass, Dict: DataTypeID.Dict, Array: DataTypeID.Array, + TypedArray: DataTypeID.TypedArray, }, unpackJSONs, }; diff --git a/editor/exports/serialization.ts b/editor/exports/serialization.ts index 00b297fa884..d85f950dc55 100644 --- a/editor/exports/serialization.ts +++ b/editor/exports/serialization.ts @@ -11,3 +11,11 @@ export { export { serializeBuiltinValueType, } from '../../cocos/serialization/compiled/builtin-value-type'; + +export { typedArrayTypeTable } from '../../cocos/serialization/compiled/typed-array'; + +export type { + TypedArrayData, + TypedArrayDataJson, + TypedArrayDataPtr, +} from '../../cocos/serialization/compiled/typed-array';