mirror of
https://github.com/yeicor-3d/yet-another-cad-viewer.git
synced 2025-12-19 14:14:13 +01:00
big rewrite focusing on faster performance and selection improvements
This commit is contained in:
@@ -1,42 +1,21 @@
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from aiohttp import web
|
||||
from yacv_server.yacv import YACV
|
||||
|
||||
from yacv_server.server import Server
|
||||
|
||||
server = Server()
|
||||
yacv = YACV()
|
||||
"""The server instance. This is the main entry point to serve CAD objects and other data to the frontend."""
|
||||
|
||||
if 'YACV_DISABLE_SERVER' not in os.environ:
|
||||
# Start a new server ASAP to let the polling client connect while still building CAD objects
|
||||
# This is a bit of a hack, but it is seamless to the user. This behavior can be disabled by setting
|
||||
# the environment variable YACV_DISABLE_SERVER to a non-empty value
|
||||
server.start()
|
||||
yacv.start()
|
||||
|
||||
# Expose some nice aliases using the default server instance
|
||||
show = server.show
|
||||
show = yacv.show
|
||||
show_object = show
|
||||
show_image = server.show_image
|
||||
show_all = server.show_cad_all
|
||||
export_all = server.export_all
|
||||
|
||||
|
||||
def _get_app() -> web.Application:
|
||||
"""Required by aiohttp-devtools"""
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
from logo import build_logo, ASSETS_DIR
|
||||
logo, img_location, img_path = build_logo()
|
||||
server.show_cad(logo, 'logo')
|
||||
server.show_cad(img_location, 'location')
|
||||
server.show_image(img_path, img_location, 20)
|
||||
server.show_gltf(open(os.path.join(ASSETS_DIR, 'fox.glb'), 'rb').read(), 'fox')
|
||||
return server.app
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Publish the logo to the server (reusing code from the aiohttp-devtools)
|
||||
_get_app()
|
||||
# Keep the server running for testing
|
||||
time.sleep(60)
|
||||
show_image = yacv.show_image
|
||||
show_all = yacv.show_cad_all
|
||||
export_all = yacv.export_all
|
||||
remove = yacv.remove
|
||||
clear = yacv.clear
|
||||
|
||||
@@ -58,8 +58,8 @@ def grab_all_cad() -> List[Tuple[str, CADLike]]:
|
||||
return shapes
|
||||
|
||||
|
||||
def image_to_gltf(source: str | bytes, center: any, ppmm: int, name: Optional[str] = None,
|
||||
save_mime: str = 'image/jpeg') -> Tuple[bytes, str]:
|
||||
def image_to_gltf(source: str | bytes, center: any, width: Optional[float] = None, height: Optional[float] = None,
|
||||
name: Optional[str] = None, save_mime: str = 'image/jpeg') -> Tuple[bytes, str]:
|
||||
"""Convert an image to a GLTF CAD object, indicating the center location and pixels per millimeter."""
|
||||
from PIL import Image
|
||||
import io
|
||||
@@ -105,11 +105,17 @@ def image_to_gltf(source: str | bytes, center: any, ppmm: int, name: Optional[st
|
||||
|
||||
# Build the gltf
|
||||
mgr = GLTFMgr(image=(img_buf, save_mime))
|
||||
if width is None and height is None:
|
||||
raise ValueError('At least one of width or height must be specified') # Fallback to pixels == mm?
|
||||
elif width is None:
|
||||
width = img.width / img.height * height
|
||||
elif height is None:
|
||||
height = height or img.height / img.width * width # Apply default aspect ratio if unspecified
|
||||
mgr.add_face([
|
||||
vert(plane.origin - plane.x_dir * img.width / (2 * ppmm) - plane.y_dir * img.height / (2 * ppmm)),
|
||||
vert(plane.origin + plane.x_dir * img.width / (2 * ppmm) - plane.y_dir * img.height / (2 * ppmm)),
|
||||
vert(plane.origin + plane.x_dir * img.width / (2 * ppmm) + plane.y_dir * img.height / (2 * ppmm)),
|
||||
vert(plane.origin - plane.x_dir * img.width / (2 * ppmm) + plane.y_dir * img.height / (2 * ppmm)),
|
||||
vert(plane.origin - plane.x_dir * width / 2 - plane.y_dir * height / 2),
|
||||
vert(plane.origin + plane.x_dir * width / 2 - plane.y_dir * height / 2),
|
||||
vert(plane.origin + plane.x_dir * width / 2 + plane.y_dir * height / 2),
|
||||
vert(plane.origin - plane.x_dir * width / 2 + plane.y_dir * height / 2),
|
||||
], [
|
||||
(0, 2, 1),
|
||||
(0, 3, 2),
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from typing import Tuple
|
||||
from typing import Union, Dict
|
||||
|
||||
from build123d import *
|
||||
|
||||
ASSETS_DIR = os.getenv('ASSETS_DIR', os.path.join(os.path.dirname(__file__), '..', 'assets'))
|
||||
|
||||
|
||||
def build_logo(text: bool = True) -> Tuple[Part, Location, str]:
|
||||
def build_logo(text: bool = True) -> Dict[str, Union[Part, Location, str]]:
|
||||
"""Builds the CAD part of the logo"""
|
||||
with BuildPart(Plane.XY.offset(50)) as logo_obj:
|
||||
Box(22, 40, 30)
|
||||
@@ -25,34 +23,44 @@ def build_logo(text: bool = True) -> Tuple[Part, Location, str]:
|
||||
logo_img_location.position = Vector(logo_img_location.position.X - 4e-2, logo_img_location.position.Y,
|
||||
logo_img_location.position.Z)
|
||||
logo_img_path = os.path.join(ASSETS_DIR, 'img.jpg')
|
||||
return logo_obj.part, logo_img_location, logo_img_path
|
||||
|
||||
fox_glb_bytes = open(os.path.join(ASSETS_DIR, 'fox.glb'), 'rb').read()
|
||||
|
||||
return {'fox': fox_glb_bytes, 'logo': logo_obj, 'location': logo_img_location, 'img_path': logo_img_path}
|
||||
|
||||
|
||||
def show_logo(parts: Dict[str, Union[Part, Location, str]]) -> None:
|
||||
"""Shows the prebuilt logo parts"""
|
||||
from yacv_server import show_image, show_object
|
||||
for name, part in parts.items():
|
||||
if isinstance(part, str):
|
||||
show_image(source=part, center=parts['location'], height=18, auto_clear=False)
|
||||
else:
|
||||
show_object(part, name, auto_clear=False)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from yacv_server import export_all, remove
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
# Start an offline server to export the CAD part of the logo in a way compatible with the frontend
|
||||
# If this is not set, the server will auto-start on import and show_* calls will provide live updates
|
||||
os.environ['YACV_DISABLE_SERVER'] = '1'
|
||||
from yacv_server import show, show_image
|
||||
testing_server = bool(os.getenv('TESTING_SERVER', 'False'))
|
||||
|
||||
if not testing_server:
|
||||
# Start an offline server to export the CAD part of the logo in a way compatible with the frontend
|
||||
# If this is not set, the server will auto-start on import and show_* calls will provide live updates
|
||||
os.environ['YACV_DISABLE_SERVER'] = 'True'
|
||||
|
||||
# Build the CAD part of the logo
|
||||
logo = build_logo()
|
||||
|
||||
# Add the CAD part of the logo to the server
|
||||
logo, img_location, img_path = build_logo()
|
||||
show(logo, 'base')
|
||||
show(img_location, 'location')
|
||||
show_image(img_path, img_location, 20)
|
||||
show_logo(logo)
|
||||
|
||||
|
||||
async def exporter():
|
||||
# We need access to the actual server object for advanced features like exporting to file
|
||||
from yacv_server import server
|
||||
for name in server.shown_object_names():
|
||||
print(f'Exporting {name} to GLB...')
|
||||
with open(os.path.join(ASSETS_DIR, 'logo_build', f'{name}.glb'), 'wb') as f:
|
||||
f.write(await server.export(name))
|
||||
|
||||
|
||||
# Save the complete logo to multiple GLB files (async required)
|
||||
asyncio.run(exporter())
|
||||
|
||||
print('Logo saved!')
|
||||
if testing_server:
|
||||
remove('location') # Test removing a part
|
||||
else:
|
||||
# Save the complete logo to multiple GLB files
|
||||
export_all(os.path.join(ASSETS_DIR, 'logo_build'))
|
||||
print('Logo saved!')
|
||||
|
||||
143
yacv_server/myhttp.py
Normal file
143
yacv_server/myhttp.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import io
|
||||
import os
|
||||
import threading
|
||||
import urllib.parse
|
||||
from http import HTTPStatus
|
||||
from http.server import SimpleHTTPRequestHandler
|
||||
|
||||
from iterators import TimeoutIterator
|
||||
|
||||
from mylogger import logger
|
||||
|
||||
# Find the frontend folder (optional, but recommended)
|
||||
FILE_DIR = os.path.dirname(__file__)
|
||||
FRONTEND_BASE_PATH = os.getenv('FRONTEND_BASE_PATH', os.path.join(FILE_DIR, 'frontend'))
|
||||
if not os.path.exists(FRONTEND_BASE_PATH):
|
||||
if os.path.exists(os.path.join(FILE_DIR, '..', 'dist')): # Fallback to dev build
|
||||
FRONTEND_BASE_PATH = os.path.join(FILE_DIR, '..', 'dist')
|
||||
else:
|
||||
logger.warning('Frontend not found at %s', FRONTEND_BASE_PATH)
|
||||
FRONTEND_BASE_PATH = None
|
||||
|
||||
# Define the API paths (also available at the root path for simplicity)
|
||||
UPDATES_API_PATH = '/api/updates'
|
||||
OBJECTS_API_PATH = '/api/object' # /{name}
|
||||
|
||||
|
||||
class HTTPHandler(SimpleHTTPRequestHandler):
|
||||
yacv: 'yacv.YACV'
|
||||
frontend_lock: threading.Lock # To avoid exiting too early while frontend makes requests
|
||||
at_least_one_client: threading.Event
|
||||
|
||||
def __init__(self, *args, yacv: 'yacv.YACV', **kwargs):
|
||||
self.yacv = yacv
|
||||
self.frontend_lock = threading.Lock()
|
||||
self.at_least_one_client = threading.Event()
|
||||
super().__init__(*args, **kwargs, directory=FRONTEND_BASE_PATH)
|
||||
|
||||
def log_message(self, fmt, *args):
|
||||
logger.debug(fmt, *args)
|
||||
|
||||
def end_headers(self):
|
||||
# Add CORS headers to the response
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
||||
super().end_headers()
|
||||
|
||||
def translate_path(self, path: str) -> str:
|
||||
"""Translate a path to the local filesystem, adds some basic security checks"""
|
||||
path = super().translate_path(path)
|
||||
path = os.path.realpath(path) # Avoid symlink hacks
|
||||
if self.directory: # Ensure proper subdirectory
|
||||
base = os.path.abspath(self.directory)
|
||||
if not os.path.abspath(path).startswith(base):
|
||||
self.send_error(HTTPStatus.FORBIDDEN, "Path is not in the frontend directory")
|
||||
return ''
|
||||
return path
|
||||
|
||||
def send_head(self):
|
||||
path_parts = self.path.split('?', 1)
|
||||
if len(path_parts) == 1:
|
||||
path_parts.append('')
|
||||
[path, query_str] = path_parts
|
||||
query = urllib.parse.parse_qs(query_str)
|
||||
if path == UPDATES_API_PATH or path == '/' and query.get('api_updates') is not None:
|
||||
return self._api_updates()
|
||||
elif path.startswith(OBJECTS_API_PATH) or path == '/' and query.get('api_object') is not None:
|
||||
if path.startswith(OBJECTS_API_PATH):
|
||||
obj_name = self.path[len(OBJECTS_API_PATH) + 1:]
|
||||
else:
|
||||
obj_name = query.get('api_object').pop()
|
||||
return self._api_object(obj_name)
|
||||
elif path.endswith('/'): # Frontend index.html
|
||||
self.path += 'index.html'
|
||||
return super().send_head()
|
||||
else: # Normal frontend file
|
||||
return super().send_head()
|
||||
|
||||
def _api_updates(self):
|
||||
"""Handles a publish-only websocket connection that send show_object events along with their hashes and URLs"""
|
||||
self.send_response(HTTPStatus.OK)
|
||||
self.send_header("Content-Type", "text/event-stream")
|
||||
self.send_header("Cache-Control", "no-cache")
|
||||
# Chunked transfer encoding!
|
||||
self.send_header("Transfer-Encoding", "chunked")
|
||||
self.end_headers()
|
||||
self.at_least_one_client.set()
|
||||
logger.debug('Updates client connected')
|
||||
|
||||
def write_chunk(_chunk_data: str):
|
||||
self.wfile.write(hex(len(_chunk_data))[2:].encode('utf-8'))
|
||||
self.wfile.write(b'\r\n')
|
||||
self.wfile.write(_chunk_data.encode('utf-8'))
|
||||
self.wfile.write(b'\r\n')
|
||||
self.wfile.flush()
|
||||
|
||||
write_chunk('retry: 100\n\n')
|
||||
|
||||
# Send buffered events first, while keeping a lock
|
||||
with self.frontend_lock:
|
||||
for data in self.yacv.show_events.buffer():
|
||||
logger.debug('Sending info about %s: %s', data.name, data)
|
||||
# noinspection PyUnresolvedReferences
|
||||
to_send = data.to_json()
|
||||
write_chunk(f'data: {to_send}\n\n')
|
||||
|
||||
# Send future events over the same connection
|
||||
# Also send keep-alive to know if the client is still connected
|
||||
subscription = self.yacv.show_events.subscribe(include_buffered=False)
|
||||
it = TimeoutIterator(subscription, sentinel=None, reset_on_next=True, timeout=5.0) # Keep-alive interval
|
||||
try:
|
||||
for data in it:
|
||||
if data is None:
|
||||
write_chunk(':keep-alive\n\n')
|
||||
else:
|
||||
logger.debug('Sending info about %s: %s', data.name, data)
|
||||
# noinspection PyUnresolvedReferences
|
||||
to_send = data.to_json()
|
||||
write_chunk(f'data: {to_send}\n\n')
|
||||
for i in range(200): # Need to fill browser buffers for instant updates!
|
||||
write_chunk(':flush\n\n')
|
||||
except BrokenPipeError: # Client disconnected normally
|
||||
pass
|
||||
finally:
|
||||
it.interrupt()
|
||||
subscription.close()
|
||||
logger.debug('Updates client disconnected')
|
||||
|
||||
def _api_object(self, obj_name: str):
|
||||
"""Returns the object file with the matching name, building it if necessary."""
|
||||
with self.frontend_lock:
|
||||
# Export the object (or fail if not found)
|
||||
exported_glb = self.yacv.export(obj_name)
|
||||
if exported_glb is None:
|
||||
self.send_error(HTTPStatus.NOT_FOUND, f'Object {obj_name} not found')
|
||||
return io.BytesIO()
|
||||
|
||||
# Wrap the GLB in a response and return it
|
||||
self.send_response(HTTPStatus.OK)
|
||||
self.send_header('Content-Type', 'model/gltf-binary')
|
||||
self.send_header('Content-Length', str(len(exported_glb)))
|
||||
self.send_header('Content-Disposition', f'attachment; filename="{obj_name}.glb"')
|
||||
self.end_headers()
|
||||
self.wfile.write(exported_glb)
|
||||
@@ -1 +1 @@
|
||||
# TODO: Plugins that can freely modify the GLTF file as it is being built
|
||||
# TODO(if there is interest): Plugins that can freely modify the GLTF file as it is being built
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import asyncio
|
||||
import threading
|
||||
import queue
|
||||
import threading
|
||||
from typing import List, TypeVar, \
|
||||
Generic, AsyncGenerator
|
||||
Generic, Generator
|
||||
|
||||
from yacv_server.mylogger import logger
|
||||
|
||||
@@ -8,61 +10,74 @@ T = TypeVar('T')
|
||||
|
||||
|
||||
class BufferedPubSub(Generic[T]):
|
||||
"""A simple implementation of publish-subscribe pattern using asyncio and buffering all previous events"""
|
||||
"""A simple implementation of publish-subscribe pattern using threading and buffering all previous events"""
|
||||
|
||||
_buffer: List[T]
|
||||
_subscribers: List[asyncio.Queue[T]]
|
||||
_lock = asyncio.Lock()
|
||||
max_buffer_size = 1000
|
||||
_buffer_lock: threading.Lock
|
||||
_subscribers: List[queue.Queue[T]]
|
||||
_subscribers_lock: threading.Lock
|
||||
max_buffer_size: int
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, max_buffer_size: int = 100):
|
||||
self._buffer = []
|
||||
self._buffer_lock = threading.Lock()
|
||||
self._subscribers = []
|
||||
self._subscribers_lock = threading.Lock()
|
||||
self.max_buffer_size = max_buffer_size
|
||||
|
||||
def publish_nowait(self, event: T):
|
||||
def publish(self, event: T):
|
||||
"""Publishes an event without blocking (synchronous API does not require locking)"""
|
||||
self._buffer.append(event)
|
||||
if len(self._buffer) > self.max_buffer_size:
|
||||
self._buffer.pop(0)
|
||||
for q in self._subscribers:
|
||||
q.put_nowait(event)
|
||||
with self._buffer_lock:
|
||||
self._buffer.append(event)
|
||||
if len(self._buffer) > self.max_buffer_size:
|
||||
self._buffer.pop(0)
|
||||
for q in self._subscribers:
|
||||
q.put(event)
|
||||
|
||||
async def _subscribe(self, include_buffered: bool = True, include_future: bool = True) -> asyncio.Queue[T]:
|
||||
def _subscribe(self, include_buffered: bool = True, include_future: bool = True) -> queue.Queue[T]:
|
||||
"""Subscribes to events"""
|
||||
q = asyncio.Queue()
|
||||
async with self._lock:
|
||||
q = queue.Queue()
|
||||
with self._subscribers_lock:
|
||||
self._subscribers.append(q)
|
||||
logger.debug(f"Subscribed to %s (%d subscribers)", self, len(self._subscribers))
|
||||
if include_buffered:
|
||||
for event in self._buffer:
|
||||
await q.put(event)
|
||||
with self._buffer_lock:
|
||||
for event in self._buffer:
|
||||
q.put(event)
|
||||
if not include_future:
|
||||
await q.put(None)
|
||||
q.put(None)
|
||||
return q
|
||||
|
||||
async def _unsubscribe(self, q: asyncio.Queue[T]):
|
||||
def _unsubscribe(self, q: queue.Queue[T]):
|
||||
"""Unsubscribes from events"""
|
||||
async with self._lock:
|
||||
with self._subscribers_lock:
|
||||
self._subscribers.remove(q)
|
||||
logger.debug(f"Unsubscribed from %s (%d subscribers)", self, len(self._subscribers))
|
||||
|
||||
async def subscribe(self, include_buffered: bool = True, include_future: bool = True) -> AsyncGenerator[T, None]:
|
||||
"""Subscribes to events as an async generator that yields events and automatically unsubscribes"""
|
||||
q = await self._subscribe(include_buffered, include_future)
|
||||
def subscribe(self, include_buffered: bool = True, include_future: bool = True) -> Generator[T, None, None]:
|
||||
"""Subscribes to events as an generator that yields events and automatically unsubscribes"""
|
||||
q = self._subscribe(include_buffered, include_future)
|
||||
try:
|
||||
while True:
|
||||
v = await q.get()
|
||||
v = q.get()
|
||||
# include_future is incompatible with None values as they are used to signal the end of the stream
|
||||
if v is None and not include_future:
|
||||
break
|
||||
yield v
|
||||
finally: # When aclose() is called
|
||||
await self._unsubscribe(q)
|
||||
self._unsubscribe(q)
|
||||
|
||||
def buffer(self) -> List[T]:
|
||||
"""Returns a shallow copy of the list of buffered events"""
|
||||
return self._buffer[:]
|
||||
with self._buffer_lock:
|
||||
return self._buffer[:]
|
||||
|
||||
def delete(self, event: T):
|
||||
"""Deletes an event from the buffer"""
|
||||
self._buffer.remove(event)
|
||||
with self._buffer_lock:
|
||||
self._buffer.remove(event)
|
||||
|
||||
def clear(self):
|
||||
"""Clears the buffer"""
|
||||
with self._buffer_lock:
|
||||
self._buffer.clear()
|
||||
@@ -1,361 +0,0 @@
|
||||
import asyncio
|
||||
import atexit
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from threading import Thread
|
||||
from typing import Optional, Dict, Union, Callable
|
||||
|
||||
import aiohttp_cors
|
||||
from OCP.TopLoc import TopLoc_Location
|
||||
from OCP.TopoDS import TopoDS_Shape
|
||||
from aiohttp import web
|
||||
from aiohttp_sse import sse_response
|
||||
from build123d import Shape, Axis, Location, Vector
|
||||
from dataclasses_json import dataclass_json
|
||||
|
||||
from yacv_server.cad import get_shape, grab_all_cad, image_to_gltf, CADLike
|
||||
from yacv_server.mylogger import logger
|
||||
from yacv_server.pubsub import BufferedPubSub
|
||||
from yacv_server.tessellate import _hashcode, tessellate
|
||||
|
||||
# Find the frontend folder (optional, but recommended)
|
||||
FILE_DIR = os.path.dirname(__file__)
|
||||
FRONTEND_BASE_PATH = os.getenv('FRONTEND_BASE_PATH', os.path.join(FILE_DIR, 'frontend'))
|
||||
if not os.path.exists(FRONTEND_BASE_PATH):
|
||||
if os.path.exists(os.path.join(FILE_DIR, '..', 'dist')): # Fallback to dev build
|
||||
FRONTEND_BASE_PATH = os.path.join(FILE_DIR, '..', 'dist')
|
||||
else:
|
||||
logger.warning('Frontend not found at %s', FRONTEND_BASE_PATH)
|
||||
FRONTEND_BASE_PATH = None
|
||||
|
||||
# Define the API paths (also available at the root path for simplicity)
|
||||
UPDATES_API_PATH = '/api/updates'
|
||||
OBJECTS_API_PATH = '/api/object' # /{name}
|
||||
|
||||
|
||||
@dataclass_json
|
||||
@dataclass
|
||||
class UpdatesApiData:
|
||||
"""Data sent to the client through the updates API"""
|
||||
name: str
|
||||
"""Name of the object. Should be unique unless you want to overwrite the previous object"""
|
||||
hash: str
|
||||
"""Hash of the object, to detect changes without rebuilding the object"""
|
||||
|
||||
|
||||
class UpdatesApiFullData(UpdatesApiData):
|
||||
obj: Optional[CADLike]
|
||||
"""The OCCT object, if any (not serialized)"""
|
||||
kwargs: Optional[Dict[str, any]]
|
||||
"""The show_object options, if any (not serialized)"""
|
||||
|
||||
def __init__(self, name: str, hash: str, obj: Optional[CADLike] = None,
|
||||
kwargs: Optional[Dict[str, any]] = None):
|
||||
self.name = name
|
||||
self.hash = hash
|
||||
self.obj = obj
|
||||
self.kwargs = kwargs
|
||||
|
||||
def to_json(self) -> str:
|
||||
# noinspection PyUnresolvedReferences
|
||||
return super().to_json()
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
async def _index_handler(request: web.Request) -> web.Response:
|
||||
return web.HTTPTemporaryRedirect(location='index.html')
|
||||
|
||||
|
||||
class Server:
|
||||
app = web.Application()
|
||||
runner: web.AppRunner
|
||||
thread: Optional[Thread] = None
|
||||
startup_complete = asyncio.Event()
|
||||
do_shutdown = asyncio.Event()
|
||||
at_least_one_client = asyncio.Event()
|
||||
show_events = BufferedPubSub[UpdatesApiFullData]()
|
||||
object_events: Dict[str, BufferedPubSub[bytes]] = {}
|
||||
object_events_lock = asyncio.Lock()
|
||||
frontend_lock = asyncio.Lock() # To avoid exiting too early while frontend makes requests
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
# --- Routes ---
|
||||
# - APIs
|
||||
self.app.router.add_route('GET', f'{UPDATES_API_PATH}', self._api_updates)
|
||||
self.app.router.add_route('GET', f'{OBJECTS_API_PATH}/{{name}}', self._api_object)
|
||||
# - Single websocket/objects/frontend entrypoint to ease client configuration
|
||||
self.app.router.add_get('/', self._entrypoint)
|
||||
# - Static files from the frontend
|
||||
self.app.router.add_get('/{path:(.*/|)}', _index_handler) # Any folder -> index.html
|
||||
if FRONTEND_BASE_PATH is not None:
|
||||
self.app.router.add_static('/', path=FRONTEND_BASE_PATH, name='static_frontend')
|
||||
# --- CORS ---
|
||||
cors = aiohttp_cors.setup(self.app, defaults={
|
||||
"*": aiohttp_cors.ResourceOptions(
|
||||
allow_credentials=True,
|
||||
expose_headers="*",
|
||||
allow_headers="*",
|
||||
)
|
||||
})
|
||||
for route in list(self.app.router.routes()):
|
||||
cors.add(route)
|
||||
# --- Misc ---
|
||||
self.loop = asyncio.new_event_loop()
|
||||
|
||||
def start(self):
|
||||
"""Starts the web server in the background"""
|
||||
assert self.thread is None, "Server currently running, cannot start another one"
|
||||
# Start the server in a separate daemon thread
|
||||
self.thread = Thread(target=self._run_server, name='yacv_server', daemon=True)
|
||||
signal.signal(signal.SIGINT | signal.SIGTERM, self.stop)
|
||||
atexit.register(self.stop)
|
||||
self.thread.start()
|
||||
logger.info('Server started (requested)...')
|
||||
# Wait for the server to be ready before returning
|
||||
while not self.startup_complete.is_set():
|
||||
time.sleep(0.01)
|
||||
logger.info('Server started (received)...')
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def stop(self, *args):
|
||||
"""Stops the web server"""
|
||||
if self.thread is None:
|
||||
print('Cannot stop server because it is not running')
|
||||
return
|
||||
|
||||
graceful_secs_connect = float(os.getenv('YACV_GRACEFUL_SECS_CONNECT', 12.0))
|
||||
graceful_secs_request = float(os.getenv('YACV_GRACEFUL_SECS_REQUEST', 5.0))
|
||||
# Make sure we can hold the lock for more than 100ms (to avoid exiting too early)
|
||||
logger.info('Stopping server (waiting for at least one frontend request first, cancel with CTRL+C)...')
|
||||
start = time.time()
|
||||
try:
|
||||
while not self.at_least_one_client.is_set() and time.time() - start < graceful_secs_connect:
|
||||
time.sleep(0.01)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
logger.info('Stopping server (waiting for no more frontend requests)...')
|
||||
start = time.time()
|
||||
while time.time() - start < graceful_secs_request:
|
||||
if self.frontend_lock.locked():
|
||||
start = time.time()
|
||||
time.sleep(0.01)
|
||||
|
||||
# Stop the server in the background
|
||||
self.loop.call_soon_threadsafe(lambda *a: self.do_shutdown.set())
|
||||
logger.info('Stopping server (sent)...')
|
||||
|
||||
# Wait for the server to stop gracefully
|
||||
self.thread.join(timeout=30)
|
||||
self.thread = None
|
||||
logger.info('Stopping server (confirmed)...')
|
||||
if len(args) >= 1 and args[0] in (signal.SIGINT, signal.SIGTERM):
|
||||
sys.exit(0) # Exit with success
|
||||
|
||||
def _run_server(self):
|
||||
"""Runs the web server"""
|
||||
asyncio.set_event_loop(self.loop)
|
||||
self.loop.run_until_complete(self._run_server_async())
|
||||
self.loop.stop()
|
||||
self.loop.close()
|
||||
|
||||
async def _run_server_async(self):
|
||||
"""Runs the web server (async)"""
|
||||
runner = web.AppRunner(self.app)
|
||||
await runner.setup()
|
||||
site = web.TCPSite(runner, os.getenv('YACV_HOST', 'localhost'), int(os.getenv('YACV_PORT', 32323)))
|
||||
await site.start()
|
||||
logger.info('Server started (sent)...')
|
||||
self.startup_complete.set()
|
||||
# Wait for a signal to stop the server while running
|
||||
await self.do_shutdown.wait()
|
||||
logger.info('Stopping server (received)...')
|
||||
await runner.shutdown()
|
||||
# await runner.cleanup() # Gets stuck?
|
||||
logger.info('Stopping server (done)...')
|
||||
|
||||
async def _entrypoint(self, request: web.Request) -> web.StreamResponse:
|
||||
"""Main entrypoint to the server, which automatically serves the frontend/updates/objects"""
|
||||
if request.query.get('api_updates', '') != '': # ?api_updates -> updates API
|
||||
return await self._api_updates(request)
|
||||
elif request.query.get('api_object', '') != '': # ?api_object={name} -> object API
|
||||
request.match_info['name'] = request.query['api_object']
|
||||
return await self._api_object(request)
|
||||
else: # Anything else -> frontend index.html
|
||||
return await _index_handler(request)
|
||||
|
||||
async def _api_updates(self, request: web.Request) -> web.StreamResponse:
|
||||
"""Handles a publish-only websocket connection that send show_object events along with their hashes and URLs"""
|
||||
self.at_least_one_client.set()
|
||||
async with sse_response(request) as resp:
|
||||
resp.ping_interval = 0.1 # HACK: Browsers don't receive instant updates without this
|
||||
logger.debug('Client connected: %s', request.remote)
|
||||
|
||||
# Send buffered events first, while keeping a lock
|
||||
async with self.frontend_lock:
|
||||
for data in self.show_events.buffer():
|
||||
logger.debug('Sending info about %s to %s: %s', data.name, request.remote, data)
|
||||
# noinspection PyUnresolvedReferences
|
||||
await resp.send(data.to_json())
|
||||
|
||||
# Send future events over the same connection
|
||||
subscription = self.show_events.subscribe(include_buffered=False)
|
||||
try:
|
||||
async for data in subscription:
|
||||
logger.debug('Sending info about %s to %s: %s', data.name, request.remote, data)
|
||||
# noinspection PyUnresolvedReferences
|
||||
await resp.send(data.to_json())
|
||||
finally:
|
||||
await subscription.aclose()
|
||||
logger.debug('Client disconnected: %s', request.remote)
|
||||
|
||||
return resp
|
||||
|
||||
obj_counter = 0
|
||||
|
||||
def _show_common(self, name: Optional[str], hash: str, start: float, obj: Optional[CADLike] = None,
|
||||
kwargs=None):
|
||||
name = name or f'object_{self.obj_counter}'
|
||||
self.obj_counter += 1
|
||||
# Remove a previous object with the same name
|
||||
for old_event in self.show_events.buffer():
|
||||
if old_event.name == name:
|
||||
self.show_events.delete(old_event)
|
||||
if name in self.object_events:
|
||||
del self.object_events[name]
|
||||
break
|
||||
precomputed_info = UpdatesApiFullData(name=name, hash=hash, obj=obj, kwargs=kwargs or {})
|
||||
self.show_events.publish_nowait(precomputed_info)
|
||||
logger.info('show_object(%s, %s) took %.3f seconds', name, hash, time.time() - start)
|
||||
return precomputed_info
|
||||
|
||||
def show(self, any_object: Union[bytes, CADLike, any], name: Optional[str] = None, **kwargs):
|
||||
"""Publishes "any" object to the server"""
|
||||
if isinstance(any_object, bytes):
|
||||
self.show_gltf(any_object, name, **kwargs)
|
||||
else:
|
||||
self.show_cad(any_object, name, **kwargs)
|
||||
|
||||
def show_gltf(self, gltf: bytes, name: Optional[str] = None, **kwargs):
|
||||
"""Publishes any single-file GLTF object to the server."""
|
||||
start = time.time()
|
||||
# Precompute the info and send it to the client as if it was a CAD object
|
||||
precomputed_info = self._show_common(name, _hashcode(gltf, **kwargs), start, kwargs=kwargs)
|
||||
# Also pre-populate the GLTF data for the object API
|
||||
publish_to = BufferedPubSub[bytes]()
|
||||
publish_to.publish_nowait(gltf)
|
||||
publish_to.publish_nowait(b'') # Signal the end of the stream
|
||||
self.object_events[precomputed_info.name] = publish_to
|
||||
|
||||
def show_image(self, source: str | bytes, center: any, ppmm: int, name: Optional[str] = None,
|
||||
save_mime: str = 'image/jpeg', **kwargs):
|
||||
"""Publishes an image as a quad GLTF object, indicating the center location and pixels per millimeter."""
|
||||
# Convert the image to a GLTF CAD object
|
||||
gltf, name = image_to_gltf(source, center, ppmm, name, save_mime)
|
||||
# Publish it like any other GLTF object
|
||||
self.show_gltf(gltf, name, **kwargs)
|
||||
|
||||
def show_cad(self, obj: Union[CADLike, any], name: Optional[str] = None, **kwargs):
|
||||
"""Publishes a CAD object to the server"""
|
||||
start = time.time()
|
||||
|
||||
# Get the shape of a CAD-like object
|
||||
obj = get_shape(obj)
|
||||
|
||||
# Convert Z-up (OCCT convention) to Y-up (GLTF convention)
|
||||
if isinstance(obj, TopoDS_Shape):
|
||||
obj = Shape(obj).rotate(Axis.X, -90).wrapped
|
||||
elif isinstance(obj, TopLoc_Location):
|
||||
tmp_location = Location(obj)
|
||||
tmp_location.position = Vector(tmp_location.position.X, tmp_location.position.Z, -tmp_location.position.Y)
|
||||
tmp_location.orientation = Vector(tmp_location.orientation.X - 90, tmp_location.orientation.Y,
|
||||
tmp_location.orientation.Z)
|
||||
obj = tmp_location.wrapped
|
||||
|
||||
self._show_common(name, _hashcode(obj, **kwargs), start, obj, kwargs)
|
||||
|
||||
def show_cad_all(self, **kwargs):
|
||||
"""Publishes all CAD objects to the server"""
|
||||
for name, obj in grab_all_cad():
|
||||
self.show_cad(obj, name, **kwargs)
|
||||
|
||||
async def _api_object(self, request: web.Request) -> web.Response:
|
||||
"""Returns the object file with the matching name, building it if necessary."""
|
||||
async with self.frontend_lock:
|
||||
# Export the object (or fail if not found)
|
||||
exported_glb = await self.export(request.match_info['name'])
|
||||
|
||||
# Wrap the GLB in a response and return it
|
||||
response = web.Response(body=exported_glb)
|
||||
response.content_type = 'model/gltf-binary'
|
||||
response.headers['Content-Disposition'] = f'attachment; filename="{request.match_info["name"]}.glb"'
|
||||
return response
|
||||
|
||||
def shown_object_names(self) -> list[str]:
|
||||
"""Returns the names of all objects that have been shown"""
|
||||
return list([obj.name for obj in self.show_events.buffer()])
|
||||
|
||||
def _shown_object(self, name: str) -> Optional[UpdatesApiFullData]:
|
||||
"""Returns the object with the given name, if it exists"""
|
||||
for obj in self.show_events.buffer():
|
||||
if obj.name == name:
|
||||
return obj
|
||||
return None
|
||||
|
||||
async def export(self, name: str) -> bytes:
|
||||
"""Export the given previously-shown object to a single GLB file, building it if necessary."""
|
||||
start = time.time()
|
||||
|
||||
# Check that the object to build exists and grab it if it does
|
||||
event = self._shown_object(name)
|
||||
if not event:
|
||||
raise web.HTTPNotFound(text=f'No object named {name} was previously shown')
|
||||
|
||||
# Use the lock to ensure that we don't build the object twice
|
||||
async with self.object_events_lock:
|
||||
# If there are no object events for this name, we need to build the object
|
||||
if name not in self.object_events:
|
||||
# Prepare the pubsub for the object
|
||||
publish_to = BufferedPubSub[bytes]()
|
||||
self.object_events[name] = publish_to
|
||||
|
||||
def _build_object():
|
||||
# Build and publish the object (once)
|
||||
gltf = tessellate(event.obj, tolerance=event.kwargs.get('tolerance', 0.1),
|
||||
angular_tolerance=event.kwargs.get('angular_tolerance', 0.1),
|
||||
faces=event.kwargs.get('faces', True),
|
||||
edges=event.kwargs.get('edges', True),
|
||||
vertices=event.kwargs.get('vertices', True))
|
||||
glb_list_of_bytes = gltf.save_to_bytes()
|
||||
publish_to.publish_nowait(b''.join(glb_list_of_bytes))
|
||||
logger.info('export(%s) took %.3f seconds, %d parts', name, time.time() - start,
|
||||
len(gltf.meshes[0].primitives))
|
||||
|
||||
# await asyncio.get_running_loop().run_in_executor(None, _build_object)
|
||||
# The previous line has problems with auto-closed loop on script exit
|
||||
# and is cancellable, so instead run blocking code in async context :(
|
||||
logger.debug('Building object %s... %s', name, event.obj)
|
||||
_build_object()
|
||||
|
||||
# In either case return the elements of a subscription to the async generator
|
||||
subscription = self.object_events[name].subscribe()
|
||||
try:
|
||||
return await anext(subscription)
|
||||
finally:
|
||||
await subscription.aclose()
|
||||
|
||||
def export_all(self, folder: str, export_filter: Callable[[str, Optional[CADLike]], bool] = lambda name, obj: True):
|
||||
"""Export all previously-shown objects to GLB files in the given folder"""
|
||||
import asyncio
|
||||
|
||||
async def _export_all():
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
for name in self.shown_object_names():
|
||||
if export_filter(name, self._shown_object(name).obj):
|
||||
with open(os.path.join(folder, f'{name}.glb'), 'wb') as f:
|
||||
f.write(await self.export(name))
|
||||
|
||||
asyncio.run(_export_all())
|
||||
286
yacv_server/yacv.py
Normal file
286
yacv_server/yacv.py
Normal file
@@ -0,0 +1,286 @@
|
||||
import atexit
|
||||
import inspect
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from http.server import ThreadingHTTPServer
|
||||
from threading import Thread
|
||||
from typing import Optional, Dict, Union, Callable
|
||||
|
||||
from OCP.TopLoc import TopLoc_Location
|
||||
from OCP.TopoDS import TopoDS_Shape
|
||||
# noinspection PyProtectedMember
|
||||
from build123d import Shape, Axis, Location, Vector
|
||||
from dataclasses_json import dataclass_json
|
||||
|
||||
from myhttp import HTTPHandler
|
||||
from yacv_server.cad import get_shape, grab_all_cad, image_to_gltf, CADLike
|
||||
from yacv_server.mylogger import logger
|
||||
from yacv_server.pubsub import BufferedPubSub
|
||||
from yacv_server.tessellate import _hashcode, tessellate
|
||||
|
||||
|
||||
@dataclass_json
|
||||
@dataclass
|
||||
class UpdatesApiData:
|
||||
"""Data sent to the client through the updates API"""
|
||||
name: str
|
||||
"""Name of the object. Should be unique unless you want to overwrite the previous object"""
|
||||
hash: str
|
||||
"""Hash of the object, to detect changes without rebuilding the object"""
|
||||
is_remove: bool
|
||||
"""Whether to remove the object from the scene"""
|
||||
|
||||
|
||||
class UpdatesApiFullData(UpdatesApiData):
|
||||
obj: Optional[CADLike]
|
||||
"""The OCCT object, if any (not serialized)"""
|
||||
kwargs: Optional[Dict[str, any]]
|
||||
"""The show_object options, if any (not serialized)"""
|
||||
|
||||
def __init__(self, name: str, _hash: str, is_remove: bool = False, obj: Optional[CADLike] = None,
|
||||
kwargs: Optional[Dict[str, any]] = None):
|
||||
self.name = name
|
||||
self.hash = _hash
|
||||
self.is_remove = is_remove
|
||||
self.obj = obj
|
||||
self.kwargs = kwargs
|
||||
|
||||
def to_json(self) -> str:
|
||||
# noinspection PyUnresolvedReferences
|
||||
return super().to_json()
|
||||
|
||||
|
||||
class YACV:
|
||||
server_thread: Optional[Thread]
|
||||
server: Optional[ThreadingHTTPServer]
|
||||
startup_complete: threading.Event
|
||||
show_events: BufferedPubSub[UpdatesApiFullData]
|
||||
object_events: Dict[str, BufferedPubSub[bytes]]
|
||||
object_events_lock: threading.Lock
|
||||
|
||||
def __init__(self):
|
||||
self.server_thread = None
|
||||
self.server = None
|
||||
self.startup_complete = threading.Event()
|
||||
self.at_least_one_client = threading.Event()
|
||||
self.show_events = BufferedPubSub()
|
||||
self.object_events = {}
|
||||
self.object_events_lock = threading.Lock()
|
||||
self.frontend_lock = threading.Lock()
|
||||
|
||||
def start(self):
|
||||
"""Starts the web server in the background"""
|
||||
print('yacv>start')
|
||||
assert self.server_thread is None, "Server currently running, cannot start another one"
|
||||
assert self.startup_complete.is_set() is False, "Server already started"
|
||||
# Start the server in a separate daemon thread
|
||||
self.server_thread = Thread(target=self._run_server, name='yacv_server', daemon=True)
|
||||
signal.signal(signal.SIGINT | signal.SIGTERM, self.stop)
|
||||
atexit.register(self.stop)
|
||||
self.server_thread.start()
|
||||
logger.info('Server started (requested)...')
|
||||
# Wait for the server to be ready before returning
|
||||
while not self.startup_complete.wait():
|
||||
time.sleep(0.01)
|
||||
logger.info('Server started (received)...')
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def stop(self, *args):
|
||||
"""Stops the web server"""
|
||||
if self.server_thread is None:
|
||||
print('Cannot stop server because it is not running')
|
||||
return
|
||||
|
||||
graceful_secs_connect = float(os.getenv('YACV_GRACEFUL_SECS_CONNECT', 12.0))
|
||||
graceful_secs_request = float(os.getenv('YACV_GRACEFUL_SECS_REQUEST', 5.0))
|
||||
# Make sure we can hold the lock for more than 100ms (to avoid exiting too early)
|
||||
logger.info('Stopping server (waiting for at least one frontend request first, cancel with CTRL+C)...')
|
||||
start = time.time()
|
||||
try:
|
||||
while not self.at_least_one_client.wait(
|
||||
graceful_secs_connect / 10) and time.time() - start < graceful_secs_connect:
|
||||
time.sleep(0.01)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
logger.info('Stopping server (waiting for no more frontend requests)...')
|
||||
start = time.time()
|
||||
try:
|
||||
while time.time() - start < graceful_secs_request:
|
||||
if self.frontend_lock.locked():
|
||||
start = time.time()
|
||||
time.sleep(0.01)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
# Stop the server in the background
|
||||
self.server.shutdown()
|
||||
logger.info('Stopping server (sent)...')
|
||||
|
||||
# Wait for the server to stop gracefully
|
||||
self.server_thread.join(timeout=30)
|
||||
self.server_thread = None
|
||||
logger.info('Stopping server (confirmed)...')
|
||||
if len(args) >= 1 and args[0] in (signal.SIGINT, signal.SIGTERM):
|
||||
sys.exit(0) # Exit with success
|
||||
|
||||
def _run_server(self):
|
||||
"""Runs the web server"""
|
||||
print('yacv>run_server', inspect.stack())
|
||||
logger.info('Starting server...')
|
||||
self.server = ThreadingHTTPServer(
|
||||
(os.getenv('YACV_HOST', 'localhost'), int(os.getenv('YACV_PORT', 32323))),
|
||||
lambda a, b, c: HTTPHandler(a, b, c, yacv=self))
|
||||
# noinspection HttpUrlsUsage
|
||||
logger.info(f'Serving at http://{self.server.server_name}:{self.server.server_port}')
|
||||
self.startup_complete.set()
|
||||
self.server.serve_forever()
|
||||
|
||||
def _show_common(self, name: Optional[str], _hash: str, start: float, obj: Optional[CADLike] = None,
|
||||
kwargs=None):
|
||||
if kwargs.get('auto_clear', True):
|
||||
self.clear()
|
||||
name = name or f'object_{len(self.show_events.buffer())}'
|
||||
# Remove a previous object with the same name
|
||||
for old_event in self.show_events.buffer():
|
||||
if old_event.name == name:
|
||||
self.show_events.delete(old_event)
|
||||
if name in self.object_events:
|
||||
del self.object_events[name]
|
||||
break
|
||||
precomputed_info = UpdatesApiFullData(name=name, _hash=_hash, obj=obj, kwargs=kwargs or {})
|
||||
self.show_events.publish(precomputed_info)
|
||||
logger.info('show_object(%s, %s) took %.3f seconds', name, _hash, time.time() - start)
|
||||
return precomputed_info
|
||||
|
||||
def show(self, any_object: Union[bytes, CADLike, any], name: Optional[str] = None, **kwargs):
|
||||
"""Publishes "any" object to the server"""
|
||||
if isinstance(any_object, bytes):
|
||||
self.show_gltf(any_object, name, **kwargs)
|
||||
else:
|
||||
self.show_cad(any_object, name, **kwargs)
|
||||
|
||||
def show_gltf(self, gltf: bytes, name: Optional[str] = None, **kwargs):
|
||||
"""Publishes any single-file GLTF object to the server."""
|
||||
start = time.time()
|
||||
# Precompute the info and send it to the client as if it was a CAD object
|
||||
precomputed_info = self._show_common(name, _hashcode(gltf, **kwargs), start, kwargs=kwargs)
|
||||
# Also pre-populate the GLTF data for the object API
|
||||
publish_to = BufferedPubSub[bytes]()
|
||||
publish_to.publish(gltf)
|
||||
publish_to.publish(b'') # Signal the end of the stream
|
||||
self.object_events[precomputed_info.name] = publish_to
|
||||
|
||||
def show_image(self, source: str | bytes, center: any, width: Optional[float] = None,
|
||||
height: Optional[float] = None, name: Optional[str] = None, save_mime: str = 'image/jpeg', **kwargs):
|
||||
"""Publishes an image as a quad GLTF object, indicating the center location and pixels per millimeter."""
|
||||
# Convert the image to a GLTF CAD object
|
||||
gltf, name = image_to_gltf(source, center, width, height, name, save_mime)
|
||||
# Publish it like any other GLTF object
|
||||
self.show_gltf(gltf, name, **kwargs)
|
||||
|
||||
def show_cad(self, obj: Union[CADLike, any], name: Optional[str] = None, **kwargs):
|
||||
"""Publishes a CAD object to the server"""
|
||||
start = time.time()
|
||||
|
||||
# Get the shape of a CAD-like object
|
||||
obj = get_shape(obj)
|
||||
|
||||
# Convert Z-up (OCCT convention) to Y-up (GLTF convention)
|
||||
if isinstance(obj, TopoDS_Shape):
|
||||
obj = Shape(obj).rotate(Axis.X, -90).wrapped
|
||||
elif isinstance(obj, TopLoc_Location):
|
||||
tmp_location = Location(obj)
|
||||
tmp_location.position = Vector(tmp_location.position.X, tmp_location.position.Z,
|
||||
-tmp_location.position.Y)
|
||||
tmp_location.orientation = Vector(tmp_location.orientation.X - 90, tmp_location.orientation.Y,
|
||||
tmp_location.orientation.Z)
|
||||
obj = tmp_location.wrapped
|
||||
|
||||
self._show_common(name, _hashcode(obj, **kwargs), start, obj, kwargs)
|
||||
|
||||
def show_cad_all(self, **kwargs):
|
||||
"""Publishes all CAD objects in the current scope to the server"""
|
||||
for name, obj in grab_all_cad():
|
||||
self.show_cad(obj, name, **kwargs)
|
||||
|
||||
def remove(self, name: str):
|
||||
"""Removes a previously-shown object from the scene"""
|
||||
shown_object = self._shown_object(name)
|
||||
if shown_object:
|
||||
shown_object.is_remove = True
|
||||
with self.object_events_lock:
|
||||
if name in self.object_events:
|
||||
del self.object_events[name]
|
||||
self.show_events.publish(shown_object)
|
||||
|
||||
def clear(self):
|
||||
"""Clears all previously-shown objects from the scene"""
|
||||
for event in self.show_events.buffer():
|
||||
self.remove(event.name)
|
||||
|
||||
def shown_object_names(self) -> list[str]:
|
||||
"""Returns the names of all objects that have been shown"""
|
||||
return list([obj.name for obj in self.show_events.buffer()])
|
||||
|
||||
def _shown_object(self, name: str) -> Optional[UpdatesApiFullData]:
|
||||
"""Returns the object with the given name, if it exists"""
|
||||
for obj in self.show_events.buffer():
|
||||
if obj.name == name:
|
||||
return obj
|
||||
return None
|
||||
|
||||
def export(self, name: str) -> Optional[bytes]:
|
||||
"""Export the given previously-shown object to a single GLB file, building it if necessary."""
|
||||
start = time.time()
|
||||
|
||||
# Check that the object to build exists and grab it if it does
|
||||
event = self._shown_object(name)
|
||||
if event is None:
|
||||
return None
|
||||
|
||||
# Use the lock to ensure that we don't build the object twice
|
||||
with self.object_events_lock:
|
||||
# If there are no object events for this name, we need to build the object
|
||||
if name not in self.object_events:
|
||||
# Prepare the pubsub for the object
|
||||
publish_to = BufferedPubSub[bytes]()
|
||||
self.object_events[name] = publish_to
|
||||
|
||||
def _build_object():
|
||||
# Build and publish the object (once)
|
||||
gltf = tessellate(event.obj, tolerance=event.kwargs.get('tolerance', 0.1),
|
||||
angular_tolerance=event.kwargs.get('angular_tolerance', 0.1),
|
||||
faces=event.kwargs.get('faces', True),
|
||||
edges=event.kwargs.get('edges', True),
|
||||
vertices=event.kwargs.get('vertices', True))
|
||||
glb_list_of_bytes = gltf.save_to_bytes()
|
||||
publish_to.publish(b''.join(glb_list_of_bytes))
|
||||
logger.info('export(%s) took %.3f seconds, %d parts', name, time.time() - start,
|
||||
len(gltf.meshes[0].primitives))
|
||||
|
||||
# await asyncio.get_running_loop().run_in_executor(None, _build_object)
|
||||
# The previous line has problems with auto-closed loop on script exit
|
||||
# and is cancellable, so instead run blocking code in async context :(
|
||||
logger.debug('Building object %s... %s', name, event.obj)
|
||||
_build_object()
|
||||
|
||||
# In either case return the elements of a subscription to the async generator
|
||||
subscription = self.object_events[name].subscribe()
|
||||
try:
|
||||
return next(subscription)
|
||||
finally:
|
||||
subscription.close()
|
||||
|
||||
def export_all(self, folder: str,
|
||||
export_filter: Callable[[str, Optional[CADLike]], bool] = lambda name, obj: True):
|
||||
"""Export all previously-shown objects to GLB files in the given folder"""
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
for name in self.shown_object_names():
|
||||
if export_filter(name, self._shown_object(name).obj):
|
||||
with open(os.path.join(folder, f'{name}.glb'), 'wb') as f:
|
||||
f.write(self.export(name))
|
||||
Reference in New Issue
Block a user