better networked model loading

This commit is contained in:
Yeicor
2024-02-17 18:14:27 +01:00
parent a0b8cfe2a8
commit f50efd3701
5 changed files with 89 additions and 85 deletions

View File

@@ -25,7 +25,7 @@ let sData: Ref<SceneManagerData> = SceneMgr.newData();
// Set up the load model event listener // Set up the load model event listener
let networkMgr = new NetworkManager(); let networkMgr = new NetworkManager();
networkMgr.addEventListener('update', (model: NetworkUpdateEvent) => { networkMgr.addEventListener('update', (model: NetworkUpdateEvent) => {
sData.value.viewerSrc = model.url; SceneMgr.loadModel(sData.value, model.name, model.url);
}); });
// Start loading all configured models ASAP // Start loading all configured models ASAP
for (let model of settings.preloadModels) { for (let model of settings.preloadModels) {

View File

@@ -1,6 +1,7 @@
export class NetworkUpdateEvent extends Event { export class NetworkUpdateEvent extends Event {
name: string; name: string;
url: string; url: string;
constructor(name: string, url: string) { constructor(name: string, url: string) {
super("update"); super("update");
this.name = name; this.name = name;
@@ -27,15 +28,11 @@ export class NetworkManager extends EventTarget {
// Get the last part of the URL as the "name" of the model // Get the last part of the URL as the "name" of the model
let name = url.split("/").pop(); let name = url.split("/").pop();
name = name?.split(".")[0] || `unknown-${Math.random()}`; name = name?.split(".")[0] || `unknown-${Math.random()}`;
let prevHash = this.knownObjectHashes[name];
// Use a head request to get the hash of the file // Use a head request to get the hash of the file
let response = await fetch(url, { method: "HEAD" }); let response = await fetch(url, {method: "HEAD"});
let hash = response.headers.get("etag"); let hash = response.headers.get("etag");
// Only trigger an update if the hash has changed // Only trigger an update if the hash has changed
if (hash !== prevHash) { this.foundModel(name, hash, url);
this.knownObjectHashes[name] = hash;
this.dispatchEvent(new NetworkUpdateEvent(name, url));
}
} }
} }
@@ -44,13 +41,7 @@ export class NetworkManager extends EventTarget {
ws.onmessage = (event) => { ws.onmessage = (event) => {
let data = JSON.parse(event.data); let data = JSON.parse(event.data);
console.debug("WebSocket message", data); console.debug("WebSocket message", data);
let name = data.name; this.foundModel(data.name, data.hash, data.url);
let prevHash = this.knownObjectHashes[name];
let hash = data.hash;
if (hash !== prevHash) {
this.knownObjectHashes[name] = hash;
this.dispatchEvent(new NetworkUpdateEvent(name, data.url));
}
}; };
ws.onerror = (event) => { ws.onerror = (event) => {
console.error("WebSocket error", event); console.error("WebSocket error", event);
@@ -60,4 +51,12 @@ export class NetworkManager extends EventTarget {
setTimeout(() => this.monitorWebSocket(url), 500); setTimeout(() => this.monitorWebSocket(url), 500);
} }
} }
private foundModel(name: string, hash: string, url: string) {
let prevHash = this.knownObjectHashes[name];
if (hash !== prevHash) {
this.knownObjectHashes[name] = hash;
this.dispatchEvent(new NetworkUpdateEvent(name, url));
}
}
} }

View File

@@ -3,6 +3,7 @@ import type {ModelScene} from "@google/model-viewer/lib/three-components/ModelSc
import {ref, Ref} from 'vue'; import {ref, Ref} from 'vue';
import {Document} from '@gltf-transform/core'; import {Document} from '@gltf-transform/core';
import {ModelViewerInfo} from "./viewer/ModelViewerWrapper.vue"; import {ModelViewerInfo} from "./viewer/ModelViewerWrapper.vue";
import {splitGlbs} from "../models/glb/glbs";
type SceneManagerData = { type SceneManagerData = {
/** When updated, forces the viewer to load a new model replacing the current one */ /** When updated, forces the viewer to load a new model replacing the current one */
@@ -32,6 +33,22 @@ export class SceneMgr {
}); });
} }
/** Loads a GLB/GLBS model from a URL and adds it to the viewer or replaces it if the names match */
static async loadModel(data: SceneManagerData, name: string, url: string) {
let response = await fetch(url);
if (!response.ok) throw new Error("Failed to fetch model: " + response.statusText);
let glbsSplitter = splitGlbs(response.body!);
let {value: numChunks} = await glbsSplitter.next();
console.log("Loading model with", numChunks, "chunks");
while (true) {
let {value: chunk, done} = await glbsSplitter.next();
if (done) break;
console.log("Got chunk", chunk);
// Override the current model with the new one
data.viewerSrc = URL.createObjectURL(new Blob([chunk], {type: 'model/gltf-binary'}));
}
}
/** Should be called any model finishes loading successfully (after a viewerSrc update) */ /** Should be called any model finishes loading successfully (after a viewerSrc update) */
static onload(data: SceneManagerData, info: typeof ModelViewerInfo) { static onload(data: SceneManagerData, info: typeof ModelViewerInfo) {
console.log("ModelViewer loaded", info); console.log("ModelViewer loaded", info);

View File

@@ -1,79 +1,67 @@
export type SplitGlbsResult = { const textDecoder = new TextDecoder();
numChunks: number;
glbReader: ReadableStream<Uint8Array>;
}
/** /**
* Given a stream of binary data (e.g. from a fetch response), splits a GLBS file into its component GLB files and * Given a stream of binary data (e.g. from a fetch response), splits a GLBS file into its component GLB files and
* returns them as a stream of Uint8Arrays with known lengths. It also supports simple GLB files by returning itself. * returns them as a generator of Uint8Arrays (that starts with the expected length).
* It also supports simple GLB files (no splitting needed).
*/ */
export async function splitGlbs(reader: ReadableStream<Uint8Array>): Promise<SplitGlbsResult> { export async function* splitGlbs(readerSrc: ReadableStream<Uint8Array>): AsyncGenerator<number | Uint8Array> {
// Create a transform stream that splits the GLBS file into its component GLB files by reading the length of each let reader = readerSrc.getReader();
// chunk and then reading that many bytes from the input stream. let [buffer4Bytes, buffered] = await readN(reader, new Uint8Array(), 4);
let buffer4Bytes = new Uint8Array(4); console.assert(buffer4Bytes.length === 4, 'Expected 4 bytes for magic numbers')
let readerImpl = reader.getReader({mode: 'byob'}); let magic = textDecoder.decode(buffer4Bytes)
await readerImpl.read(buffer4Bytes); if (magic === 'glTF' /* GLB */ || magic[0] == '{' /* glTF */) {
if (buffer4Bytes[0] === '{'.charCodeAt(0) || Array.from(buffer4Bytes) === "glTF".split('').map(c => c.charCodeAt(0))) { yield 1
return {numChunks: 1, glbReader: await singleBlob(reader)} let remaining = await readAll(reader, buffered);
} // Add back the header to the beginning of the document
let isGlbs = Array.from(buffer4Bytes) === "GLBS".split('').map(c => c.charCodeAt(0)); let finalBuffer = new Uint8Array(buffer4Bytes.length + remaining.length);
if (!isGlbs) throw new Error('Invalid magic numbers for expected GLBS file: ' + buffer4Bytes); finalBuffer.set(buffer4Bytes);
// Create a new readable stream that splits the GLBS file into its component GLB files by reading the length of each finalBuffer.set(remaining, buffer4Bytes.length);
// chunk and then reading that many bytes from the input stream. yield finalBuffer
// - But first, we read the number of chunks (can be 0xFFFFFFFF if the number of chunks is unknown). } else if (magic !== "GLBS") {
await readerImpl.read(buffer4Bytes); // First, we read the number of chunks (can be 0xFFFFFFFF if the number of chunks is unknown).
let numChunks = new DataView(buffer4Bytes.buffer).getUint32(0, true); [buffer4Bytes, buffered] = await readN(reader, buffered, 4);
return { let numChunks = new DataView(buffer4Bytes.buffer).getUint32(0, true);
numChunks, yield numChunks
// - Then, we read the length of each chunk followed by the chunk itself. // Then, we read the length of each chunk followed by the chunk itself.
glbReader: new ReadableStream<Uint8Array>({ for (let i = 0; i < numChunks; i++) {
async start(controller) { // - Read length
for (let i = 0; i < numChunks; i++) { [buffer4Bytes, buffered] = await readN(reader, buffered, 4);
// - Read length if (buffer4Bytes.length === 0) {
let {done} = await readerImpl.read(buffer4Bytes); if (numChunks != 0xFFFFFFFF) throw new Error('Unexpected end of stream while reading chunk length');
if (done) { else break // We reached the end of the stream of unknown length, so we stop reading chunks.
if (numChunks != 0xFFFFFFFF) throw new Error('Unexpected end of stream while reading chunk length');
else break // We reached the end of the stream of unknown length, so we stop reading chunks.
}
let length = new DataView(buffer4Bytes.buffer).getUint32(0, true);
// - Read chunk
let chunkReader = await singleBlob(reader, length);
let {value: fullChunk} = await chunkReader.getReader().read();
controller.enqueue(fullChunk);
}
controller.close();
} }
}) let length = new DataView(buffer4Bytes.buffer).getUint32(0, true);
}; // - Read chunk
let chunk: Uint8Array
[chunk, buffered] = await readN(reader, buffered, length);
yield chunk
}
} else throw new Error('Invalid magic numbers for expected GLB/GLBS file: ' + magic);
reader.releaseLock()
} }
async function singleBlob(reader: ReadableStream<Uint8Array>, stopAfter: number | null = null): Promise<ReadableStream<Uint8Array>> { /** Reads exactly `n` bytes from the reader and returns them as a Uint8Array.
// Make sure the reader reads the entire stream at once. * An over-read is possible, in which case the returned array will still have `n` bytes and the over-read bytes will be
const readerImpl = reader.getReader(); * returned. They should be provided to the next call to `readN` to avoid losing data.
let bufferedChunks: Uint8Array = new Uint8Array(); */
let done = false; async function readN(reader: ReadableStreamDefaultReader<Uint8Array>, buffered: Uint8Array, n: number | null = null): Promise<[Uint8Array, Uint8Array]> {
let length = 0; let buffer = buffered;
while (!done) { while (n === null || buffer.length < n) {
let {value, done: d} = await readerImpl.read(); let {done, value} = await reader.read();
if (value) { if (done) break;
// TODO: This is inefficient. We should be able to avoid copying the buffer each time. byob? let newBuffer = new Uint8Array(buffer.length + value.length);
let oldBuffer = bufferedChunks; newBuffer.set(buffer);
let newLength = bufferedChunks.length + value.length; newBuffer.set(value, buffer.length);
if (stopAfter !== null && newLength > stopAfter) { buffer = newBuffer;
newLength = stopAfter; }
value = value.slice(0, stopAfter - bufferedChunks.length); if (n !== null) {
} return [buffer.slice(0, n), buffer.slice(n)]
bufferedChunks = new Uint8Array(newLength); } else {
bufferedChunks.set(oldBuffer); return [buffer, new Uint8Array()];
bufferedChunks.set(value, length);
length += value.length;
}
done = d;
} }
return new ReadableStream<Uint8Array>({ }
start(controller) {
controller.enqueue(bufferedChunks); async function readAll(reader: ReadableStreamDefaultReader<Uint8Array>, buffered: Uint8Array): Promise<Uint8Array> {
controller.close(); return (await readN(reader, buffered, null))[0];
}
});
} }

View File

@@ -1,5 +1,5 @@
{ {
"compilerOptions": { "compilerOptions": {
"lib": ["ES2020", "DOM"] "lib": ["es2015", "DOM"]
} }
} }