mirror of
https://github.com/yeicor-3d/yet-another-cad-viewer.git
synced 2025-12-19 22:24:17 +01:00
starting work on glb(s) management
This commit is contained in:
@@ -9,6 +9,7 @@
|
||||
"build": "parcel build src/index.html --reporter @parcel/reporter-bundle-analyzer --detailed-report"
|
||||
},
|
||||
"dependencies": {
|
||||
"@gltf-transform/core": "^3.10.0",
|
||||
"@google/model-viewer": "^3.4.0",
|
||||
"three": "^0.160.1",
|
||||
"three-orientation-gizmo": "https://github.com/jrj2211/three-orientation-gizmo",
|
||||
|
||||
83
src/models/glb/glbs.ts
Normal file
83
src/models/glb/glbs.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
export type SplitGlbsResult = {
|
||||
numChunks: number;
|
||||
glbReader: ReadableStream<Uint8Array>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a stream of binary data (e.g. from a fetch response), splits a GLBS file into its component GLB files and
|
||||
* returns them as a stream of Uint8Arrays with known lengths. It also supports simple GLB files by returning itself.
|
||||
*/
|
||||
export async function splitGlbs(reader: ReadableStream<Uint8Array>): Promise<SplitGlbsResult> {
|
||||
// Create a transform stream that splits the GLBS file into its component GLB files by reading the length of each
|
||||
// chunk and then reading that many bytes from the input stream.
|
||||
let buffer4Bytes = new Uint8Array(4);
|
||||
let readerImpl = reader.getReader({mode: 'byob'});
|
||||
await readerImpl.read(buffer4Bytes);
|
||||
if (buffer4Bytes[0] === '{'.charCodeAt(0) || Array.from(buffer4Bytes) === "glTF".split('').map(c => c.charCodeAt(0))) {
|
||||
return {numChunks: 1, glbReader: await singleBlob(reader)}
|
||||
}
|
||||
let isGlbs = Array.from(buffer4Bytes) === "GLBS".split('').map(c => c.charCodeAt(0));
|
||||
if (!isGlbs) throw new Error('Invalid magic numbers for expected GLBS file: ' + buffer4Bytes);
|
||||
// Create a new readable stream that splits the GLBS file into its component GLB files by reading the length of each
|
||||
// chunk and then reading that many bytes from the input stream.
|
||||
// - But first, we read the number of chunks (can be 0xFFFFFFFF if the number of chunks is unknown).
|
||||
await readerImpl.read(buffer4Bytes);
|
||||
let numChunks = new DataView(buffer4Bytes.buffer).getUint32(0, true);
|
||||
return {
|
||||
numChunks,
|
||||
// - Then, we read the length of each chunk followed by the chunk itself.
|
||||
glbReader: new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
// - Read length
|
||||
let {done} = await readerImpl.read(buffer4Bytes);
|
||||
if (done) {
|
||||
if (numChunks != 0xFFFFFFFF) throw new Error('Unexpected end of stream while reading chunk length');
|
||||
else break // We reached the end of the stream of unknown length, so we stop reading chunks.
|
||||
}
|
||||
let length = new DataView(buffer4Bytes.buffer).getUint32(0, true);
|
||||
// - Read chunk
|
||||
let chunkReader = await singleBlob(reader, length);
|
||||
let {value: fullChunk} = await chunkReader.getReader().read();
|
||||
controller.enqueue(fullChunk);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
async function singleBlob(reader: ReadableStream<Uint8Array>, stopAfter: number | null = null): Promise<ReadableStream<Uint8Array>> {
|
||||
// Make sure the reader reads the entire stream at once.
|
||||
const readerImpl = reader.getReader();
|
||||
let bufferedChunks: Uint8Array = new Uint8Array();
|
||||
try {
|
||||
let done = false;
|
||||
let length = 0;
|
||||
while (!done) {
|
||||
let {value, done: d} = await readerImpl.read();
|
||||
if (value) {
|
||||
// TODO: This is inefficient. We should be able to avoid copying the buffer each time. byob?
|
||||
let oldBuffer = bufferedChunks;
|
||||
let newLength = bufferedChunks.length + value.length;
|
||||
if (stopAfter !== null && newLength > stopAfter) {
|
||||
newLength = stopAfter;
|
||||
value = value.slice(0, stopAfter - bufferedChunks.length);
|
||||
}
|
||||
bufferedChunks = new Uint8Array(newLength);
|
||||
bufferedChunks.set(oldBuffer);
|
||||
bufferedChunks.set(value, length);
|
||||
length += value.length;
|
||||
}
|
||||
done = d;
|
||||
}
|
||||
} finally {
|
||||
await readerImpl.cancel();
|
||||
}
|
||||
return new ReadableStream<Uint8Array>({
|
||||
start(controller) {
|
||||
controller.enqueue(bufferedChunks);
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
38
src/models/glb/merge.ts
Normal file
38
src/models/glb/merge.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { WebIO } from "@gltf-transform/core";
|
||||
|
||||
// /**
|
||||
// * Given a stream of binary data (e.g. from a fetch response), load a GLBS file (or simply a GLB file) and automatically
|
||||
// * merge them into a single GLB file. progress is a callback that is called with the document after each step of the
|
||||
// * loading process.
|
||||
// */
|
||||
// export async function loadAndMerge(blob: Uint8Array, document: Document, progress: (doc: Document, pct: number) => Promise<void>): Promise<Document> {
|
||||
// // Identify the type of file by loading the first 4 bytes.
|
||||
// let magicNumbers = []
|
||||
// const [headerReader, mainReader] = reader.tee()
|
||||
// let headerReaderImpl = headerReader.getReader({mode: 'byob'});
|
||||
// try {
|
||||
// const header = new Uint8Array(4);
|
||||
// await headerReaderImpl.read(header)
|
||||
// magicNumbers = Array.from(header)
|
||||
// } catch (e) {
|
||||
// console.error(e);
|
||||
// } finally {
|
||||
// await headerReaderImpl.cancel()
|
||||
// }
|
||||
// // Depending on the file type, merge the GLB or GLBS files.
|
||||
// let finalDocument: Document;
|
||||
// if (magicNumbers[0] === '{'.charCodeAt(0)) { // GLTF
|
||||
// finalDocument = await mergeGltf(mainReader, document);
|
||||
// } else if (magicNumbers === "glTF".split('').map(c => c.charCodeAt(0))) { // GLB
|
||||
// finalDocument = await mergeGlb(mainReader, document);
|
||||
// } else if (magicNumbers === "glTF".split('').map(c => c.charCodeAt(0))) { // GLBS
|
||||
// finalDocument = await mergeGlbs(mainReader, document);
|
||||
// } else {
|
||||
// throw new Error('Unknown file type (not GLTF, GLB, or GLBS, magic numbers: ' + magicNumbers + ')');
|
||||
// }
|
||||
// return finalDocument
|
||||
// }
|
||||
//
|
||||
// function mergeGlb(blob: Uint8Array, document: Document): Promise<Document> {
|
||||
// new WebIO().readAsJSON()
|
||||
// }
|
||||
12
yarn.lock
12
yarn.lock
@@ -29,6 +29,13 @@
|
||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.9.tgz#7b903b6149b0f8fa7ad564af646c4c38a77fc44b"
|
||||
integrity sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA==
|
||||
|
||||
"@gltf-transform/core@^3.10.0":
|
||||
version "3.10.0"
|
||||
resolved "https://registry.yarnpkg.com/@gltf-transform/core/-/core-3.10.0.tgz#854e7345f23971e4e7367a29183a2d1b62d45e46"
|
||||
integrity sha512-NxVKhSWvH0j1tjZE8Yl461HUMyZLmYmqcbqHw0TOcQd5Q1SV7Y5w6W68XMt9/amRfMAiJLLNREE7kbr+Z0Ydbw==
|
||||
dependencies:
|
||||
property-graph "^1.3.1"
|
||||
|
||||
"@google/model-viewer@^3.4.0":
|
||||
version "3.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@google/model-viewer/-/model-viewer-3.4.0.tgz#dd3fd098b85ae5953a93f8eeef0e62434a0e7cc0"
|
||||
@@ -1878,6 +1885,11 @@ promise-worker-transferable@^1.0.4:
|
||||
is-promise "^2.1.0"
|
||||
lie "^3.0.2"
|
||||
|
||||
property-graph@^1.3.1:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/property-graph/-/property-graph-1.3.1.tgz#25fb2c8040ba9b15f04cda90a443f3f844505293"
|
||||
integrity sha512-gei3N/bHWJdCItJ4blnlGWd9iauEZI+JZYj/A0D177XSI01+QhiJGAVscYBhe3Yywow3A2QJzVtsO2P+UgrRRQ==
|
||||
|
||||
react-error-overlay@6.0.9:
|
||||
version "6.0.9"
|
||||
resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.9.tgz#3c743010c9359608c375ecd6bc76f35d93995b0a"
|
||||
|
||||
Reference in New Issue
Block a user