mirror of
https://github.com/yeicor-3d/yet-another-cad-viewer.git
synced 2025-12-19 22:24:17 +01:00
export all server objects to a file for static deployments and demo for logo
This commit is contained in:
5
.gitignore
vendored
5
.gitignore
vendored
@@ -8,10 +8,9 @@
|
|||||||
/.idea/
|
/.idea/
|
||||||
/parcel-bundle-reports/
|
/parcel-bundle-reports/
|
||||||
|
|
||||||
# TODO: Figure out if we want to keep a big default skybox image in the repo
|
# TODO: Figure out which assets to keep in the repo
|
||||||
/assets/st_peters_square_night_8k.jpg
|
|
||||||
/assets/fox.glb
|
/assets/fox.glb
|
||||||
/yacv_server/logo/*.glb
|
/assets/logo.glbs
|
||||||
|
|
||||||
*.iml
|
*.iml
|
||||||
venv/
|
venv/
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"lib": ["ES2020", "DOM"],
|
"lib": ["ES2020", "DOM"]
|
||||||
"allowSyntheticDefaultImports": true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -15,12 +15,16 @@ if 'YACV_DISABLE_SERVER' not in os.environ:
|
|||||||
# the environment variable YACV_DISABLE_SERVER to a non-empty value
|
# the environment variable YACV_DISABLE_SERVER to a non-empty value
|
||||||
server.start()
|
server.start()
|
||||||
|
|
||||||
|
# Expose some nice aliases using the default server instance
|
||||||
|
show = server.show
|
||||||
|
show_object = show
|
||||||
|
|
||||||
|
|
||||||
def _get_app() -> web.Application:
|
def _get_app() -> web.Application:
|
||||||
"""Required by aiohttp-devtools"""
|
"""Required by aiohttp-devtools"""
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
from logo.logo import build_logo
|
from logo.logo import build_logo
|
||||||
server.show_object(build_logo(), 'logo')
|
server.show_cad(build_logo(), 'logo')
|
||||||
return server.app
|
return server.app
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
41
yacv_server/logo.py
Normal file
41
yacv_server/logo.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
from OCP.TopoDS import TopoDS_Shape
|
||||||
|
from build123d import *
|
||||||
|
|
||||||
|
|
||||||
|
def build_logo() -> TopoDS_Shape:
|
||||||
|
"""Builds the CAD part of the logo"""
|
||||||
|
with BuildPart() as logo_obj:
|
||||||
|
Box(1, 2, 3)
|
||||||
|
return logo_obj.part.wrapped
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
|
||||||
|
# Start an offline "server" to merge the CAD part of the logo with the animated GLTF part of the logo
|
||||||
|
os.environ['YACV_DISABLE_SERVER'] = '1'
|
||||||
|
from __init__ import show_object, server
|
||||||
|
ASSETS_DIR = os.getenv('ASSETS_DIR', os.path.join(os.path.dirname(__file__), '..', 'assets'))
|
||||||
|
|
||||||
|
# 1. Add the CAD part of the logo to the server
|
||||||
|
obj = build_logo()
|
||||||
|
show_object(obj, 'logo')
|
||||||
|
|
||||||
|
# 2. Load the GLTF part of the logo
|
||||||
|
with open(os.path.join(ASSETS_DIR, 'fox.glb'), 'rb') as f:
|
||||||
|
gltf = f.read()
|
||||||
|
show_object(gltf, 'fox.glb')
|
||||||
|
|
||||||
|
# 3. Save the complete logo to a GLBS file
|
||||||
|
with open(os.path.join(ASSETS_DIR, 'logo.glbs'), 'wb') as f:
|
||||||
|
async def writer():
|
||||||
|
async for chunk in server.export_all():
|
||||||
|
f.write(chunk)
|
||||||
|
|
||||||
|
asyncio.run(writer())
|
||||||
|
|
||||||
|
print('Logo saved to', os.path.join(ASSETS_DIR, 'logo.glbs'))
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
from OCP.TopoDS import TopoDS_Shape
|
|
||||||
from build123d import *
|
|
||||||
from tqdm import tqdm
|
|
||||||
|
|
||||||
from tessellate import tessellate, tessellate_count
|
|
||||||
|
|
||||||
|
|
||||||
def build_logo() -> TopoDS_Shape:
|
|
||||||
"""Builds the CAD part of the logo"""
|
|
||||||
with BuildPart() as logo_obj:
|
|
||||||
Box(1, 2, 3)
|
|
||||||
return logo_obj.part.wrapped
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
obj = build_logo()
|
|
||||||
|
|
||||||
for update in tqdm(tessellate(obj.wrapped), total=tessellate_count(obj.wrapped)):
|
|
||||||
# print(update.gltf)
|
|
||||||
update.gltf.save(f'logo_{update.kind}.glb') # Will overwrite the file for each update
|
|
||||||
@@ -1,13 +1,14 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import atexit
|
import atexit
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from typing import Optional, Dict, Union, AsyncGenerator
|
from typing import Optional, Dict, Union, AsyncGenerator, List
|
||||||
|
|
||||||
import tqdm.asyncio
|
import tqdm.asyncio
|
||||||
from OCP.TopoDS import TopoDS_Shape
|
from OCP.TopoDS import TopoDS_Shape
|
||||||
@@ -64,7 +65,7 @@ class Server:
|
|||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Starts the web server in the background"""
|
"""Starts the web server in the background"""
|
||||||
assert self.thread is None, "Server already started"
|
assert self.thread is None, "Server currently running, cannot start another one"
|
||||||
# Start the server in a separate daemon thread
|
# Start the server in a separate daemon thread
|
||||||
self.thread = Thread(target=self._run_server, name='yacv_server', daemon=True)
|
self.thread = Thread(target=self._run_server, name='yacv_server', daemon=True)
|
||||||
signal.signal(signal.SIGINT | signal.SIGTERM, self.stop)
|
signal.signal(signal.SIGINT | signal.SIGTERM, self.stop)
|
||||||
@@ -141,17 +142,25 @@ class Server:
|
|||||||
logger.info('show_object(%s, %s) took %.3f seconds', name, hash, time.time() - start)
|
logger.info('show_object(%s, %s) took %.3f seconds', name, hash, time.time() - start)
|
||||||
return precomputed_info
|
return precomputed_info
|
||||||
|
|
||||||
|
def show(self, any_object: Union[bytes, TopoDS_Shape, any], name: Optional[str] = None, **kwargs):
|
||||||
|
"""Publishes "any" object to the server"""
|
||||||
|
if isinstance(any_object, bytes):
|
||||||
|
self.show_gltf(any_object, name, **kwargs)
|
||||||
|
else:
|
||||||
|
self.show_cad(any_object, name, **kwargs)
|
||||||
|
|
||||||
def show_gltf(self, gltf: bytes, name: Optional[str] = None, **kwargs):
|
def show_gltf(self, gltf: bytes, name: Optional[str] = None, **kwargs):
|
||||||
"""Publishes any single-file GLTF object to the server (GLB format recommended)."""
|
"""Publishes any single-file GLTF object to the server (GLB format recommended)."""
|
||||||
start = time.time()
|
start = time.time()
|
||||||
# Precompute the info and send it to the client as if it was a CAD object
|
# Precompute the info and send it to the client as if it was a CAD object
|
||||||
precomputed_info = self._show_common(hashlib.md5(gltf).hexdigest(), name, start)
|
precomputed_info = self._show_common(name, hashlib.md5(gltf).hexdigest(), start)
|
||||||
# Also pre-populate the GLTF data for the object API
|
# Also pre-populate the GLTF data for the object API
|
||||||
publish_to = BufferedPubSub[bytes]()
|
publish_to = BufferedPubSub[bytes]()
|
||||||
publish_to.publish_nowait(gltf)
|
publish_to.publish_nowait(gltf)
|
||||||
|
publish_to.publish_nowait(b'') # Signal the end of the stream
|
||||||
self.object_events[precomputed_info.name] = publish_to
|
self.object_events[precomputed_info.name] = publish_to
|
||||||
|
|
||||||
def show_object(self, obj: Union[TopoDS_Shape, any], name: Optional[str] = None, **kwargs):
|
def show_cad(self, obj: Union[TopoDS_Shape, any], name: Optional[str] = None, **kwargs):
|
||||||
"""Publishes a CAD object to the server"""
|
"""Publishes a CAD object to the server"""
|
||||||
start = time.time()
|
start = time.time()
|
||||||
# Try to grab a shape if a different type of object was passed
|
# Try to grab a shape if a different type of object was passed
|
||||||
@@ -175,7 +184,7 @@ class Server:
|
|||||||
"""Returns the object file with the matching name, building it if necessary."""
|
"""Returns the object file with the matching name, building it if necessary."""
|
||||||
|
|
||||||
# Start exporting the object (or fail if not found)
|
# Start exporting the object (or fail if not found)
|
||||||
export_data = self.export(request.match_info['name'])
|
export_data = self._export(request.match_info['name'])
|
||||||
response = web.StreamResponse()
|
response = web.StreamResponse()
|
||||||
try:
|
try:
|
||||||
# First exported element is the object itself, grab it to collect data
|
# First exported element is the object itself, grab it to collect data
|
||||||
@@ -190,9 +199,10 @@ class Server:
|
|||||||
|
|
||||||
# Convert the GLB sequence to a GLBS sequence and write it to the response
|
# Convert the GLB sequence to a GLBS sequence and write it to the response
|
||||||
with logging_redirect_tqdm(tqdm_class=tqdm.asyncio.tqdm):
|
with logging_redirect_tqdm(tqdm_class=tqdm.asyncio.tqdm):
|
||||||
# noinspection PyTypeChecker
|
if logger.isEnabledFor(logging.INFO):
|
||||||
glb_parts = tqdm.asyncio.tqdm(export_data, total=total_parts)
|
# noinspection PyTypeChecker
|
||||||
async for chunk in glb_sequence_to_glbs(glb_parts):
|
export_data = tqdm.asyncio.tqdm(export_data, total=total_parts)
|
||||||
|
async for chunk in glb_sequence_to_glbs(export_data):
|
||||||
await response.write(chunk)
|
await response.write(chunk)
|
||||||
finally:
|
finally:
|
||||||
# Close the export data subscription
|
# Close the export data subscription
|
||||||
@@ -202,19 +212,21 @@ class Server:
|
|||||||
await response.write_eof()
|
await response.write_eof()
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def export(self, name: str) -> AsyncGenerator[Union[TopoDS_Shape, bytes], None]:
|
async def _export(self, name: str) -> AsyncGenerator[Union[TopoDS_Shape, bytes], None]:
|
||||||
"""Export the given previously-shown object to a sequence of GLB files, building it if necessary."""
|
"""Export the given previously-shown object to a sequence of GLB files, building it if necessary."""
|
||||||
start = time.time()
|
start = time.time()
|
||||||
# Check that the object to build exists and grab it if it does
|
# Check that the object to build exists and grab it if it does
|
||||||
subscription = self.show_events.subscribe(include_future=False)
|
|
||||||
obj: Optional[TopoDS_Shape] = None
|
|
||||||
found = False
|
found = False
|
||||||
async for data in subscription:
|
obj: Optional[TopoDS_Shape] = None
|
||||||
if data.name == name:
|
subscription = self.show_events.subscribe(include_future=False)
|
||||||
obj = data.obj
|
try:
|
||||||
found = True # Required because obj could be None
|
async for data in subscription:
|
||||||
break
|
if data.name == name:
|
||||||
await subscription.aclose()
|
obj = data.obj
|
||||||
|
found = True # Required because obj could be None
|
||||||
|
break
|
||||||
|
finally:
|
||||||
|
await subscription.aclose()
|
||||||
if not found:
|
if not found:
|
||||||
raise web.HTTPNotFound(text=f'No object named {name} was previously shown')
|
raise web.HTTPNotFound(text=f'No object named {name} was previously shown')
|
||||||
|
|
||||||
@@ -253,3 +265,51 @@ class Server:
|
|||||||
yield chunk
|
yield chunk
|
||||||
finally:
|
finally:
|
||||||
await subscription.aclose()
|
await subscription.aclose()
|
||||||
|
|
||||||
|
async def export_all(self) -> AsyncGenerator[bytes, None]:
|
||||||
|
"""Export all previously shown objects to a single GLBS file, returned as an async generator.
|
||||||
|
|
||||||
|
This is useful for fully-static deployments where the frontend handles everything."""
|
||||||
|
# Check that the object to build exists and grab it if it does
|
||||||
|
all_object_names: List[str] = []
|
||||||
|
total_export_size = 0
|
||||||
|
subscription = self.show_events.subscribe(include_future=False)
|
||||||
|
try:
|
||||||
|
async for data in subscription:
|
||||||
|
all_object_names.append(data.name)
|
||||||
|
if data.obj is not None:
|
||||||
|
total_export_size += tessellate_count(data.obj)
|
||||||
|
else:
|
||||||
|
total_export_size += 1
|
||||||
|
finally:
|
||||||
|
await subscription.aclose()
|
||||||
|
|
||||||
|
# Create a generator that merges the export of all objects
|
||||||
|
async def _merge_exports() -> AsyncGenerator[bytes, None]:
|
||||||
|
for i, name in enumerate(all_object_names):
|
||||||
|
obj_subscription = self._export(name)
|
||||||
|
try:
|
||||||
|
obj = await anext(obj_subscription)
|
||||||
|
glb_parts = obj_subscription
|
||||||
|
if logger.isEnabledFor(logging.INFO):
|
||||||
|
total = tessellate_count(obj) if obj is not None else 1
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
glb_parts = tqdm.asyncio.tqdm(obj_subscription, total=total)
|
||||||
|
async for glb_part in glb_parts:
|
||||||
|
yield glb_part
|
||||||
|
finally:
|
||||||
|
await obj_subscription.aclose()
|
||||||
|
|
||||||
|
# Need to have a single subscription to all objects to write a valid GLBS file
|
||||||
|
subscription = _merge_exports()
|
||||||
|
try:
|
||||||
|
with logging_redirect_tqdm(tqdm_class=tqdm.asyncio.tqdm):
|
||||||
|
glbs_parts = subscription
|
||||||
|
if logger.isEnabledFor(logging.INFO):
|
||||||
|
# noinspection PyTypeChecker
|
||||||
|
glbs_parts = tqdm.asyncio.tqdm(glbs_parts, total=total_export_size, position=0)
|
||||||
|
glbs_parts = glb_sequence_to_glbs(glbs_parts)
|
||||||
|
async for glbs_part in glbs_parts:
|
||||||
|
yield glbs_part
|
||||||
|
finally:
|
||||||
|
await subscription.aclose()
|
||||||
|
|||||||
Reference in New Issue
Block a user