several fixes to tessellation and extremely fast (in comparison) initial load of CAD objects

This commit is contained in:
Yeicor
2024-02-19 20:53:10 +01:00
parent 656daf1bf3
commit c9e8bde9ca
9 changed files with 183 additions and 152 deletions

View File

@@ -3,9 +3,7 @@ import type {ModelScene} from "@google/model-viewer/lib/three-components/ModelSc
import {Ref, ref} from 'vue';
import {Document} from '@gltf-transform/core';
import {ModelViewerInfo} from "./viewer/ModelViewerWrapper.vue";
import {splitGlbs} from "../models/glb/glbs";
import {mergeFinalize, mergePartial, toBuffer} from "../models/glb/merge";
import {settings} from "./settings";
export type SceneMgrRefData = {
/** When updated, forces the viewer to load a new model replacing the current one */
@@ -44,33 +42,20 @@ export class SceneMgr {
/** Loads a GLB/GLBS model from a URL and adds it to the viewer or replaces it if the names match */
static async loadModel(refData: Ref<SceneMgrRefData>, data: SceneMgrData, name: string, url: string) {
let loadStart = performance.now();
// Connect to the URL of the model
let response = await fetch(url);
if (!response.ok) throw new Error("Failed to fetch model: " + response.statusText);
// Split the stream into valid GLB chunks
let glbsSplitter = splitGlbs(response.body!);
let {value: numChunks} = await glbsSplitter.next();
console.log("Loading", name, "which has", numChunks, "GLB chunks");
// Start merging each chunk into the current document, replacing or adding as needed
let lastShow = performance.now();
while (true) {
let {value: glbData, done} = await glbsSplitter.next();
if (done) break;
data.document = await mergePartial(glbData, name, data.document);
await new Promise(r => setTimeout(r, 0)); // Yield to update the UI at 60fps
// TODO: Report load progress
// Show the partial model while loading every once in a while
if (performance.now() - lastShow > settings.displayLoadingEveryMs) {
await this.showCurrentDoc(refData, data);
lastShow = performance.now();
}
}
// Start merging into the current document, replacing or adding as needed
let glb = new Uint8Array(await response.arrayBuffer());
data.document = await mergePartial(glb, name, data.document);
// Display the final fully loaded model
await this.showCurrentDoc(refData, data);
console.log("Model", name, "loaded in", performance.now() - loadStart, "ms");
}
/** Serializes the current document into a GLB and updates the viewerSrc */

View File

@@ -2,9 +2,9 @@
export const settings = {
preloadModels: [
// @ts-ignore
// new URL('../../assets/fox.glb', import.meta.url).href,
new URL('../../assets/fox.glb', import.meta.url).href,
// @ts-ignore
new URL('../../assets/logo.glbs', import.meta.url).href,
new URL('../../assets/logo.glb', import.meta.url).href,
// Websocket URLs automatically listen for new models from the python backend
//"ws://localhost:8080/"
],

View File

@@ -1,69 +0,0 @@
const textDecoder = new TextDecoder();
/**
* Given a stream of binary data (e.g. from a fetch response), splits a GLBS file into its component GLB files and
* returns them as a generator of Uint8Arrays (that starts with the expected length).
* It also supports simple GLB files (no splitting needed).
*/
export async function* splitGlbs(readerSrc: ReadableStream<Uint8Array>): AsyncGenerator<number | Uint8Array> {
let reader = readerSrc.getReader();
let [buffer4Bytes, buffered] = await readN(reader, new Uint8Array(), 4);
console.assert(buffer4Bytes.length === 4, 'Expected 4 bytes for magic numbers')
let magic = textDecoder.decode(buffer4Bytes)
if (magic === 'glTF' /* GLB */ || magic[0] == '{' /* glTF */) {
yield 1
let remaining = await readAll(reader, buffered);
// Add back the header to the beginning of the document
let finalBuffer = new Uint8Array(buffer4Bytes.length + remaining.length);
finalBuffer.set(buffer4Bytes);
finalBuffer.set(remaining, buffer4Bytes.length);
yield finalBuffer
} else if (magic === "GLBS") {
// First, we read the number of chunks (can be 0xFFFFFFFF if the number of chunks is unknown).
[buffer4Bytes, buffered] = await readN(reader, buffered, 4);
let numChunks = new DataView(buffer4Bytes.buffer).getUint32(0, true);
yield numChunks
// Then, we read the length of each chunk followed by the chunk itself.
for (let i = 0; i < numChunks; i++) {
// - Read length
[buffer4Bytes, buffered] = await readN(reader, buffered, 4);
if (buffer4Bytes.length === 0) {
if (numChunks != 0xFFFFFFFF) throw new Error('Unexpected end of stream while reading chunk length:'+
' expected ' + (numChunks - i) + ' more chunks');
else break // We reached the end of the stream of unknown length, so we stop reading chunks.
}
let length = new DataView(buffer4Bytes.buffer).getUint32(0, true);
// - Read chunk
let chunk: Uint8Array
[chunk, buffered] = await readN(reader, buffered, length);
yield chunk
}
} else throw new Error('Invalid magic numbers for expected GLB/GLBS file: ' + magic);
reader.releaseLock()
}
/**
* Reads up to `n` bytes from the reader and returns them as a Uint8Array.
* An over-read is possible, in which case the returned array will still have `n` bytes and the over-read bytes will be
* returned. They should be provided to the next call to `readN` to avoid losing data.
*/
async function readN(reader: ReadableStreamDefaultReader<Uint8Array>, buffered: Uint8Array, n: number | null = null): Promise<[Uint8Array, Uint8Array]> {
let buffer = buffered;
while (n === null || buffer.length < n) {
let {done, value} = await reader.read();
if (done) break;
let newBuffer = new Uint8Array(buffer.length + value.length);
newBuffer.set(buffer);
newBuffer.set(value, buffer.length);
buffer = newBuffer;
}
if (n !== null) {
return [buffer.slice(0, n), buffer.slice(n)]
} else {
return [buffer, new Uint8Array()];
}
}
async function readAll(reader: ReadableStreamDefaultReader<Uint8Array>, buffered: Uint8Array): Promise<Uint8Array> {
return (await readN(reader, buffered, null))[0];
}

View File

@@ -69,7 +69,7 @@ function dropByName(name: string): Transform {
function mergeScenes(): Transform {
return (doc: Document) => {
let root = doc.getRoot();
let scene = root.getDefaultScene();
let scene = root.getDefaultScene() ?? root.listScenes()[0];
for (let dropScene of root.listScenes()) {
if (dropScene === scene) continue;
for (let node of dropScene.listChildren()) {