diff --git a/cocos/serialization/compiled/typed-array.ts b/cocos/serialization/compiled/typed-array.ts index 42eb0e40074..efa5ac94d23 100644 --- a/cocos/serialization/compiled/typed-array.ts +++ b/cocos/serialization/compiled/typed-array.ts @@ -57,7 +57,7 @@ export type TypedArrayDataJson = [ * * /// Automatically padding bytes to align the `arrayBufferBytes`. * /// See comments on `arrayBufferBytes`. - * std::byte[] padding; + * std::byte[] _padding; * * /// Bytes of the underlying `ArrayBuffer` of this typed array. * /// Should be aligned to `typedArrayConstructor.BYTES_PER_ELEMENT` @@ -85,16 +85,16 @@ function calculatePaddingToAlignAs (v: number, align: number): number { return 0; } -export function decodeTypedArray (data: IRuntimeFileData, value: TypedArrayData): ArrayBufferView { +function decodeTypedArray (data: IRuntimeFileData, value: TypedArrayData): ArrayBufferView { if (Array.isArray(value)) { const [typeIndex, elements] = value; const TypedArrayConstructor = getTypedArrayConstructor(typeIndex); return new TypedArrayConstructor(elements); } else { const context = data[0]; - const attachedBinary = context.attachedBinary; + const attachedBinary = context._attachedBinary; assertIsTrue(attachedBinary, `Incorrect data: binary is expected.`); - const dataView = (context.attachedBinaryDataViewCache + const dataView = (context._attachedBinaryDataViewCache ??= new DataView(attachedBinary.buffer, attachedBinary.byteOffset, attachedBinary.byteLength)); let p = value; @@ -123,3 +123,7 @@ export function decodeTypedArray (data: IRuntimeFileData, value: TypedArrayData) return result; } } + +export function deserializeTypedArray (data: IRuntimeFileData, owner: any, key: string, value: TypedArrayData): void { + owner[key] = decodeTypedArray(data, value); +} diff --git a/cocos/serialization/deserialize.ts b/cocos/serialization/deserialize.ts index 65d9724a00e..1d5d7e79177 100644 --- a/cocos/serialization/deserialize.ts +++ b/cocos/serialization/deserialize.ts @@ -30,7 +30,7 @@ import { Asset } from '../asset/assets/asset'; import { CCON } from './ccon'; import type { CompiledDeserializeFn } from './deserialize-dynamic'; -import { decodeTypedArray, TypedArrayData } from './compiled/typed-array'; +import { deserializeTypedArray, TypedArrayData } from './compiled/typed-array'; import { reportMissingClass as defaultReportMissingClass } from './report-missing-class'; @@ -419,7 +419,7 @@ type IPackedFileSection = [ binaryStorage: [byteOffset: number, byteLength: number] | Empty, ]; -const PACKED_FILE_SECTION_BINARY_STORAGE_INDEX = 6; +const PACKED_SECTION_BINARY_STORAGE_INDEX = 6; const PACKED_SECTIONS = File.Instances; @@ -448,8 +448,8 @@ type ClassFinder = deserialize.ClassFinder; interface DeserializeContext extends ICustomHandler { _version?: number; - attachedBinary?: Uint8Array; - attachedBinaryDataViewCache?: DataView; + _attachedBinary?: Uint8Array; + _attachedBinaryDataViewCache?: DataView; } interface IOptions extends Partial { @@ -738,10 +738,6 @@ function parseArray (data: IRuntimeFileData, owner: any, key: string, value: IAr owner[key] = array; } -function parseTypedArray (data: IRuntimeFileData, owner: any, key: string, value: TypedArrayData) { - owner[key] = decodeTypedArray(data, value); -} - const ASSIGNMENTS: { [K in keyof DataTypes]?: ParseFunction; // eslint-disable-next-line @typescript-eslint/ban-types @@ -754,12 +750,12 @@ ASSIGNMENTS[DataTypeID.Class] = parseClass; ASSIGNMENTS[DataTypeID.ValueTypeCreated] = deserializeBuiltinValueTypeInto; ASSIGNMENTS[DataTypeID.AssetRefByInnerObj] = parseAssetRefByInnerObj; ASSIGNMENTS[DataTypeID.TRS] = parseTRS; +ASSIGNMENTS[DataTypeID.TypedArray] = deserializeTypedArray; ASSIGNMENTS[DataTypeID.ValueType] = deserializeBuiltinValueType; ASSIGNMENTS[DataTypeID.Array_Class] = genArrayParser(parseClass); ASSIGNMENTS[DataTypeID.CustomizedClass] = parseCustomClass; ASSIGNMENTS[DataTypeID.Dict] = parseDict; ASSIGNMENTS[DataTypeID.Array] = parseArray; -ASSIGNMENTS[DataTypeID.TypedArray] = parseTypedArray; function parseInstances (data: IRuntimeFileData): RootInstanceIndex { const instances = data[File.Instances]; @@ -957,7 +953,7 @@ function initializeDeserializationContext( const context = options as IRuntimeFileData[File.Context]; context._version = version; context.result = details; - context.attachedBinary = attachedBinary; + context._attachedBinary = attachedBinary; data[File.Context] = context; if (!preprocessed) { @@ -1002,10 +998,7 @@ export function deserialize (data: IDeserializeInput | string | CCON | any, deta binary = data.chunks[0]; } else { input = data as IDeserializeInput; - const binaryStorage = input[File.BinaryStorage_runtime]; - if (ArrayBuffer.isView(binaryStorage)) { - binary = binaryStorage; - } + binary = input[File.BinaryStorage_runtime]; } initializeDeserializationContext( @@ -1033,8 +1026,8 @@ export function deserialize (data: IDeserializeInput | string | CCON | any, deta // Clean up our injections. { - context.attachedBinary = undefined; - context.attachedBinaryDataViewCache = undefined; + context._attachedBinary = undefined; + context._attachedBinaryDataViewCache = undefined; } } @@ -1110,7 +1103,7 @@ export function unpackJSONs ( const sections = data[PACKED_SECTIONS]; for (let i = 0; i < sections.length; ++i) { const section = sections[i]; - const binaryStorageSpan = section[PACKED_FILE_SECTION_BINARY_STORAGE_INDEX]; + const binaryStorageSpan = section[PACKED_SECTION_BINARY_STORAGE_INDEX]; (section as any[]).unshift(version, sharedUuids, sharedStrings, sharedClasses, sharedMasks); if (binaryStorageSpan !== EMPTY_PLACEHOLDER) { if (!binaryChunk) {