better networked model loading

This commit is contained in:
Yeicor
2024-02-17 18:14:27 +01:00
parent a0b8cfe2a8
commit f50efd3701
5 changed files with 89 additions and 85 deletions

View File

@@ -25,7 +25,7 @@ let sData: Ref<SceneManagerData> = SceneMgr.newData();
// Set up the load model event listener
let networkMgr = new NetworkManager();
networkMgr.addEventListener('update', (model: NetworkUpdateEvent) => {
sData.value.viewerSrc = model.url;
SceneMgr.loadModel(sData.value, model.name, model.url);
});
// Start loading all configured models ASAP
for (let model of settings.preloadModels) {

View File

@@ -1,6 +1,7 @@
export class NetworkUpdateEvent extends Event {
name: string;
url: string;
constructor(name: string, url: string) {
super("update");
this.name = name;
@@ -27,15 +28,11 @@ export class NetworkManager extends EventTarget {
// Get the last part of the URL as the "name" of the model
let name = url.split("/").pop();
name = name?.split(".")[0] || `unknown-${Math.random()}`;
let prevHash = this.knownObjectHashes[name];
// Use a head request to get the hash of the file
let response = await fetch(url, { method: "HEAD" });
let response = await fetch(url, {method: "HEAD"});
let hash = response.headers.get("etag");
// Only trigger an update if the hash has changed
if (hash !== prevHash) {
this.knownObjectHashes[name] = hash;
this.dispatchEvent(new NetworkUpdateEvent(name, url));
}
this.foundModel(name, hash, url);
}
}
@@ -44,13 +41,7 @@ export class NetworkManager extends EventTarget {
ws.onmessage = (event) => {
let data = JSON.parse(event.data);
console.debug("WebSocket message", data);
let name = data.name;
let prevHash = this.knownObjectHashes[name];
let hash = data.hash;
if (hash !== prevHash) {
this.knownObjectHashes[name] = hash;
this.dispatchEvent(new NetworkUpdateEvent(name, data.url));
}
this.foundModel(data.name, data.hash, data.url);
};
ws.onerror = (event) => {
console.error("WebSocket error", event);
@@ -60,4 +51,12 @@ export class NetworkManager extends EventTarget {
setTimeout(() => this.monitorWebSocket(url), 500);
}
}
private foundModel(name: string, hash: string, url: string) {
let prevHash = this.knownObjectHashes[name];
if (hash !== prevHash) {
this.knownObjectHashes[name] = hash;
this.dispatchEvent(new NetworkUpdateEvent(name, url));
}
}
}

View File

@@ -3,6 +3,7 @@ import type {ModelScene} from "@google/model-viewer/lib/three-components/ModelSc
import {ref, Ref} from 'vue';
import {Document} from '@gltf-transform/core';
import {ModelViewerInfo} from "./viewer/ModelViewerWrapper.vue";
import {splitGlbs} from "../models/glb/glbs";
type SceneManagerData = {
/** When updated, forces the viewer to load a new model replacing the current one */
@@ -32,6 +33,22 @@ export class SceneMgr {
});
}
/** Loads a GLB/GLBS model from a URL and adds it to the viewer or replaces it if the names match */
static async loadModel(data: SceneManagerData, name: string, url: string) {
let response = await fetch(url);
if (!response.ok) throw new Error("Failed to fetch model: " + response.statusText);
let glbsSplitter = splitGlbs(response.body!);
let {value: numChunks} = await glbsSplitter.next();
console.log("Loading model with", numChunks, "chunks");
while (true) {
let {value: chunk, done} = await glbsSplitter.next();
if (done) break;
console.log("Got chunk", chunk);
// Override the current model with the new one
data.viewerSrc = URL.createObjectURL(new Blob([chunk], {type: 'model/gltf-binary'}));
}
}
/** Should be called any model finishes loading successfully (after a viewerSrc update) */
static onload(data: SceneManagerData, info: typeof ModelViewerInfo) {
console.log("ModelViewer loaded", info);

View File

@@ -1,79 +1,67 @@
export type SplitGlbsResult = {
numChunks: number;
glbReader: ReadableStream<Uint8Array>;
}
const textDecoder = new TextDecoder();
/**
* Given a stream of binary data (e.g. from a fetch response), splits a GLBS file into its component GLB files and
* returns them as a stream of Uint8Arrays with known lengths. It also supports simple GLB files by returning itself.
* returns them as a generator of Uint8Arrays (that starts with the expected length).
* It also supports simple GLB files (no splitting needed).
*/
export async function splitGlbs(reader: ReadableStream<Uint8Array>): Promise<SplitGlbsResult> {
// Create a transform stream that splits the GLBS file into its component GLB files by reading the length of each
// chunk and then reading that many bytes from the input stream.
let buffer4Bytes = new Uint8Array(4);
let readerImpl = reader.getReader({mode: 'byob'});
await readerImpl.read(buffer4Bytes);
if (buffer4Bytes[0] === '{'.charCodeAt(0) || Array.from(buffer4Bytes) === "glTF".split('').map(c => c.charCodeAt(0))) {
return {numChunks: 1, glbReader: await singleBlob(reader)}
}
let isGlbs = Array.from(buffer4Bytes) === "GLBS".split('').map(c => c.charCodeAt(0));
if (!isGlbs) throw new Error('Invalid magic numbers for expected GLBS file: ' + buffer4Bytes);
// Create a new readable stream that splits the GLBS file into its component GLB files by reading the length of each
// chunk and then reading that many bytes from the input stream.
// - But first, we read the number of chunks (can be 0xFFFFFFFF if the number of chunks is unknown).
await readerImpl.read(buffer4Bytes);
let numChunks = new DataView(buffer4Bytes.buffer).getUint32(0, true);
return {
numChunks,
// - Then, we read the length of each chunk followed by the chunk itself.
glbReader: new ReadableStream<Uint8Array>({
async start(controller) {
for (let i = 0; i < numChunks; i++) {
// - Read length
let {done} = await readerImpl.read(buffer4Bytes);
if (done) {
if (numChunks != 0xFFFFFFFF) throw new Error('Unexpected end of stream while reading chunk length');
else break // We reached the end of the stream of unknown length, so we stop reading chunks.
}
let length = new DataView(buffer4Bytes.buffer).getUint32(0, true);
// - Read chunk
let chunkReader = await singleBlob(reader, length);
let {value: fullChunk} = await chunkReader.getReader().read();
controller.enqueue(fullChunk);
}
controller.close();
export async function* splitGlbs(readerSrc: ReadableStream<Uint8Array>): AsyncGenerator<number | Uint8Array> {
let reader = readerSrc.getReader();
let [buffer4Bytes, buffered] = await readN(reader, new Uint8Array(), 4);
console.assert(buffer4Bytes.length === 4, 'Expected 4 bytes for magic numbers')
let magic = textDecoder.decode(buffer4Bytes)
if (magic === 'glTF' /* GLB */ || magic[0] == '{' /* glTF */) {
yield 1
let remaining = await readAll(reader, buffered);
// Add back the header to the beginning of the document
let finalBuffer = new Uint8Array(buffer4Bytes.length + remaining.length);
finalBuffer.set(buffer4Bytes);
finalBuffer.set(remaining, buffer4Bytes.length);
yield finalBuffer
} else if (magic !== "GLBS") {
// First, we read the number of chunks (can be 0xFFFFFFFF if the number of chunks is unknown).
[buffer4Bytes, buffered] = await readN(reader, buffered, 4);
let numChunks = new DataView(buffer4Bytes.buffer).getUint32(0, true);
yield numChunks
// Then, we read the length of each chunk followed by the chunk itself.
for (let i = 0; i < numChunks; i++) {
// - Read length
[buffer4Bytes, buffered] = await readN(reader, buffered, 4);
if (buffer4Bytes.length === 0) {
if (numChunks != 0xFFFFFFFF) throw new Error('Unexpected end of stream while reading chunk length');
else break // We reached the end of the stream of unknown length, so we stop reading chunks.
}
})
};
let length = new DataView(buffer4Bytes.buffer).getUint32(0, true);
// - Read chunk
let chunk: Uint8Array
[chunk, buffered] = await readN(reader, buffered, length);
yield chunk
}
} else throw new Error('Invalid magic numbers for expected GLB/GLBS file: ' + magic);
reader.releaseLock()
}
async function singleBlob(reader: ReadableStream<Uint8Array>, stopAfter: number | null = null): Promise<ReadableStream<Uint8Array>> {
// Make sure the reader reads the entire stream at once.
const readerImpl = reader.getReader();
let bufferedChunks: Uint8Array = new Uint8Array();
let done = false;
let length = 0;
while (!done) {
let {value, done: d} = await readerImpl.read();
if (value) {
// TODO: This is inefficient. We should be able to avoid copying the buffer each time. byob?
let oldBuffer = bufferedChunks;
let newLength = bufferedChunks.length + value.length;
if (stopAfter !== null && newLength > stopAfter) {
newLength = stopAfter;
value = value.slice(0, stopAfter - bufferedChunks.length);
}
bufferedChunks = new Uint8Array(newLength);
bufferedChunks.set(oldBuffer);
bufferedChunks.set(value, length);
length += value.length;
}
done = d;
/** Reads exactly `n` bytes from the reader and returns them as a Uint8Array.
* An over-read is possible, in which case the returned array will still have `n` bytes and the over-read bytes will be
* returned. They should be provided to the next call to `readN` to avoid losing data.
*/
async function readN(reader: ReadableStreamDefaultReader<Uint8Array>, buffered: Uint8Array, n: number | null = null): Promise<[Uint8Array, Uint8Array]> {
let buffer = buffered;
while (n === null || buffer.length < n) {
let {done, value} = await reader.read();
if (done) break;
let newBuffer = new Uint8Array(buffer.length + value.length);
newBuffer.set(buffer);
newBuffer.set(value, buffer.length);
buffer = newBuffer;
}
if (n !== null) {
return [buffer.slice(0, n), buffer.slice(n)]
} else {
return [buffer, new Uint8Array()];
}
return new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(bufferedChunks);
controller.close();
}
});
}
async function readAll(reader: ReadableStreamDefaultReader<Uint8Array>, buffered: Uint8Array): Promise<Uint8Array> {
return (await readN(reader, buffered, null))[0];
}

View File

@@ -1,5 +1,5 @@
{
"compilerOptions": {
"lib": ["ES2020", "DOM"]
"lib": ["es2015", "DOM"]
}
}