fully working example and many fixes

This commit is contained in:
Yeicor
2024-03-05 20:58:14 +01:00
parent 37a1c5de1f
commit 1cbd1987b3
17 changed files with 270 additions and 89 deletions

View File

@@ -12,6 +12,12 @@ updates:
interval: "weekly"
day: "saturday"
time: "09:00"
- package-ecosystem: "pip"
directory: "/example"
schedule:
interval: "weekly"
day: "saturday"
time: "09:00"
- package-ecosystem: "github-actions"
directory: "/.github/workflows/"
schedule:

View File

@@ -21,7 +21,7 @@ jobs:
- uses: "actions/upload-artifact@v4"
with:
name: "frontend"
path: "./dist"
path: "dist"
retention-days: 5
build-backend:
@@ -29,9 +29,6 @@ jobs:
runs-on: "ubuntu-latest"
steps:
- uses: "actions/checkout@v4"
- uses: "actions/setup-node@v4"
with:
cache: "yarn"
- run: "pipx install poetry"
- uses: "actions/setup-python@v5"
with:
@@ -45,9 +42,6 @@ jobs:
runs-on: "ubuntu-latest"
steps:
- uses: "actions/checkout@v4"
- uses: "actions/setup-node@v4"
with:
cache: "yarn"
- run: "pipx install poetry"
- uses: "actions/setup-python@v5"
with:
@@ -59,5 +53,23 @@ jobs:
- uses: "actions/upload-artifact@v4"
with:
name: "logo"
path: "./assets/logo_build"
path: "assets/logo_build"
retention-days: 5
build-example:
name: "Build example"
runs-on: "ubuntu-latest"
steps:
- uses: "actions/checkout@v4"
- uses: "actions/setup-python@v5"
with:
python-version: "3.11"
cache: "pip"
- run: "pip install -r example/requirements.txt"
- run: "cd example && python object.py"
- run: "mv example/export/object.glb example/export/example.glb"
- uses: "actions/upload-artifact@v4"
with:
name: "example"
path: "example/export"
retention-days: 5

View File

@@ -17,6 +17,8 @@ concurrency:
cancel-in-progress: false
jobs:
# TODO: Update versions automatically
deploy-frontend:
runs-on: "ubuntu-latest"
environment:
@@ -35,6 +37,12 @@ jobs:
name: "logo"
path: "./public"
allow_forks: false
- uses: "dawidd6/action-download-artifact@v3"
with:
workflow: "build.yml"
name: "example"
path: "./public"
allow_forks: false
- uses: "actions/configure-pages@v4"
- uses: "actions/upload-pages-artifact@v3"
with:

View File

@@ -13,14 +13,14 @@ in a web browser.
- View and interact with topological entities: faces, edges, vertices and locations.
- Control clipping planes and transparency of each model.
- Select any entity and measure bounding box size and distances.
- Fully-featured [static deployment](#static-deployment): just upload the viewer and models to your server.
- [Live lazy updates](#live-updates) while editing the CAD model (using the `yacv-server` package).
- Fully-featured static deployment: just upload the viewer and models to your server.
- Hot reloading while editing the CAD model (using the `yacv-server` package).
## Usage & demo
## Usage
The [logo](yacv_server/logo.py) also works as an example of how to use the viewer.
The [example](example) is a fully working project that demonstrates how to use the viewer.
### Live updates
### Hot reloading
To see the live updates you will need to run the [yacv_server](yacv_server) and
open [the viewer](https://yeicor-3d.github.io/yet-another-cad-viewer/) with

2
example/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
/venv/
/export/

41
example/README.md Normal file
View File

@@ -0,0 +1,41 @@
# Quickstart of Yet Another CAD Viewer
## Installation
1. Download the contents of this folder.
2. Assuming you have a recent version of Python installed, install the required packages:
```bash
python -m venv venv
pip install -r requirements.txt
# Do this every time you change the terminal:
. venv/bin/activate
```
## Usage
### Development with hot-reloading
To start the viewer, open the [GitHub Pages link](https://yeicor-3d.github.io/yet-another-cad-viewer/) of the frontend.
It will try to connect to the server at `127.0.0.1:32323` by default (this can be changed with the `preload` query
parameter).
Running `python object.py` is enough to push the model to the viewer. However, the recommended way for developing with
minimal latency is to run in cell mode (#%%). This way, the slow imports are only done once, and the server keeps
running. After editing the file you can just re-run the cell with the `show_object` call to push the changes to
the viewer.
### Static final deployment
Once your model is complete, you may want to share it with others using the same viewer.
You can do so by exporting the model as a .glb file as a last step of your script.
This is already done in `object.py` if the environment variable `CI` is set.
Once you have the `object.glb` file, you can host it on any static file server and share the following link with others:
`https://yeicor-3d.github.io/yet-another-cad-viewer/?preload=<link-to-object.glb>`
For the example model, the build process is set up in [build.yml](../.github/workflows/build.yml), the upload process
is set up in [deploy.yml](../.github/workflows/deploy.yml), and the final link is:
https://yeicor-3d.github.io/yet-another-cad-viewer/?preload=example.glb

25
example/object.py Normal file
View File

@@ -0,0 +1,25 @@
import os
from build123d import * # Also works with cadquery objects!
# Optional: enable logging to see what's happening
import logging
logging.basicConfig(level=logging.DEBUG)
from yacv_server import show_object, export_all # Check out all show_* methods for more features!
# %%
# Create a simple object
with BuildPart() as obj:
Box(10, 10, 5)
Cylinder(4, 5, mode=Mode.SUBTRACT)
# Show it in the frontend
show_object(obj, 'object')
# %%
# If running on CI, export the object to a .glb file compatible with the frontend
if 'CI' in os.environ:
export_all('export')

3
example/requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
build123d==0.4.0
yacv-server==0.1.0
asyncio==3.4.3

View File

@@ -6,7 +6,8 @@ import {VContainer, VRow, VCol, VProgressCircular} from "vuetify/lib/components/
<v-container>
<v-row justify="center" style="height: 100%">
<v-col align-self="center">
<v-progress-circular indeterminate style="display: block; margin: 0 auto;"/>
<v-progress-circular indeterminate style="display: block; margin: 0 auto;"></v-progress-circular>
<slot/>
</v-col>
</v-row>
</v-container>

View File

@@ -24,8 +24,10 @@ export class NetworkManager extends EventTarget {
* Updates will be emitted as "update" events, including the download URL and the model name.
*/
async load(url: string) {
if (url.startsWith("ws://") || url.startsWith("wss://")) {
this.monitorWebSocket(url);
if (url.startsWith("dev+")) {
let baseUrl = new URL(url.slice(4));
baseUrl.searchParams.set("api_updates", "true");
this.monitorDevServer(baseUrl);
} else {
// Get the last part of the URL as the "name" of the model
let name = url.split("/").pop();
@@ -38,21 +40,20 @@ export class NetworkManager extends EventTarget {
}
}
private monitorWebSocket(url: string) {
private monitorDevServer(url: string) {
// WARNING: This will spam the console logs with failed requests when the server is down
let ws = new WebSocket(url);
ws.onmessage = (event) => {
let eventSource = new EventSource(url);
eventSource.onmessage = (event) => {
let data = JSON.parse(event.data);
console.debug("WebSocket message", data);
let urlObj = new URL(url);
urlObj.protocol = urlObj.protocol === "ws:" ? "http:" : "https:";
urlObj.searchParams.delete("api_updates");
urlObj.searchParams.set("api_object", data.name);
this.foundModel(data.name, data.hash, urlObj.toString());
};
ws.onerror = () => ws.close();
ws.onclose = () => setTimeout(() => this.monitorWebSocket(url), settings.monitorEveryMs);
let timeoutFaster = setTimeout(() => ws.close(), settings.monitorOpenTimeoutMs);
ws.onopen = () => clearTimeout(timeoutFaster);
eventSource.onerror = () => { // Retry after a very short delay
setTimeout(() => this.monitorDevServer(url), settings.monitorEveryMs);
}
}
private foundModel(name: string, hash: string | null, url: string) {

View File

@@ -10,11 +10,11 @@ export const settings = {
// @ts-ignore
// new URL('../../assets/logo_build/img.jpg.glb', import.meta.url).href,
// Websocket URLs automatically listen for new models from the python backend
"ws://127.0.0.1:32323/"
"dev+http://127.0.0.1:32323/"
],
displayLoadingEveryMs: 1000, /* How often to display partially loaded models */
monitorEveryMs: 100,
monitorOpenTimeoutMs: 100,
monitorOpenTimeoutMs: 1000,
// ModelViewer settings
autoplay: true,
arModes: 'webxr scene-viewer quick-look',

View File

@@ -1,12 +1,9 @@
<script lang="ts">
</script>
<script setup lang="ts">
import {settings} from "../misc/settings";
import {onMounted, inject, type Ref} from "vue";
import {$scene, $renderer} from "@google/model-viewer/lib/model-viewer-base";
import {inject, onMounted, type Ref, ref, watch} from "vue";
import {VList, VListItem} from "vuetify/lib/components/index.mjs";
import {$renderer, $scene} from "@google/model-viewer/lib/model-viewer-base";
import Loading from "../misc/Loading.vue";
import {ref, watch} from "vue";
import {ModelViewerElement} from '@google/model-viewer';
import type {ModelScene} from "@google/model-viewer/lib/three-components/ModelScene";
import {Hotspot} from "@google/model-viewer/lib/three-components/Hotspot";
@@ -152,7 +149,13 @@ watch(disableTap, (value) => {
:ar="settings.arModes.length > 0" :ar-modes="settings.arModes" :skybox-image="settings.background"
:environment-image="settings.background">
<slot></slot> <!-- Controls, annotations, etc. -->
<loading class="annotation initial-load-banner"></loading>
<div class="annotation initial-load-banner">
Trying to load models from...
<v-list v-for="src in settings.preload" :key="src">
<v-list-item>{{ src }}</v-list-item>
</v-list>
<loading></loading>
</div>
</model-viewer>
<!-- The SVG overlay for fake 3D lines attached to the model -->
@@ -202,4 +205,15 @@ watch(disableTap, (value) => {
height: 100dvh;
pointer-events: none;
}
.initial-load-banner {
width: 300px;
margin: auto;
margin-top: 3em;
overflow: hidden;
}
.initial-load-banner .v-list-item {
overflow: hidden;
}
</style>

16
poetry.lock generated
View File

@@ -128,6 +128,20 @@ devtools = ">=0.6"
Pygments = ">=2.2.0"
watchfiles = ">=0.10"
[[package]]
name = "aiohttp-sse"
version = "2.2.0"
description = "Server-sent events support for aiohttp."
optional = false
python-versions = ">=3.8"
files = [
{file = "aiohttp-sse-2.2.0.tar.gz", hash = "sha256:a48dd5774031d3f41a29e159ebdbb93e89c8f37c1e9e83e196296be51885a5c2"},
{file = "aiohttp_sse-2.2.0-py3-none-any.whl", hash = "sha256:339f9803bcf4682a2060e75548760d86abe4424a0d92ba66ff4985de3bd743dc"},
]
[package.dependencies]
aiohttp = ">=3.0"
[[package]]
name = "aiosignal"
version = "1.3.1"
@@ -1685,4 +1699,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "b319fd1a01b16c61a1e5a3050a22e4898d8faea9ab85de6ae4bc82849b1a4025"
content-hash = "734e26c5b174a1b5d0942e9b67d6fff679e1e7e06c5f2fd9978911befc9aec3c"

View File

@@ -19,6 +19,7 @@ ocp-tessellate = "^2.0.6"
# Web
aiohttp = "^3.9.3"
aiohttp-sse = "^2.2.0"
aiohttp-cors = "^0.7.0"
aiohttp-devtools = "^1.1.2"

View File

@@ -3,7 +3,6 @@ import os
import time
from aiohttp import web
from build123d import Vector
from server import Server
@@ -21,6 +20,7 @@ show = server.show
show_object = show
show_image = server.show_image
show_all = server.show_cad_all
export_all = server.export_all
def _get_app() -> web.Application:

View File

@@ -62,3 +62,7 @@ class BufferedPubSub(Generic[T]):
def buffer(self) -> List[T]:
"""Returns a shallow copy of the list of buffered events"""
return self._buffer[:]
def delete(self, event: T):
"""Deletes an event from the buffer"""
self._buffer.remove(event)

View File

@@ -12,6 +12,7 @@ import aiohttp_cors
from OCP.TopLoc import TopLoc_Location
from OCP.TopoDS import TopoDS_Shape
from aiohttp import web
from aiohttp_sse import sse_response
from build123d import Shape, Axis, Location, Vector
from dataclasses_json import dataclass_json
@@ -59,6 +60,7 @@ class UpdatesApiFullData(UpdatesApiData):
self.kwargs = kwargs
def to_json(self) -> str:
# noinspection PyUnresolvedReferences
return super().to_json()
@@ -71,10 +73,13 @@ class Server:
app = web.Application()
runner: web.AppRunner
thread: Optional[Thread] = None
startup_complete = asyncio.Event()
do_shutdown = asyncio.Event()
at_least_one_client = asyncio.Event()
show_events = BufferedPubSub[UpdatesApiFullData]()
object_events: Dict[str, BufferedPubSub[bytes]] = {}
object_events_lock = asyncio.Lock()
frontend_lock = asyncio.Lock() # To avoid exiting too early while frontend makes requests
def __init__(self, *args, **kwargs):
# --- Routes ---
@@ -108,6 +113,11 @@ class Server:
signal.signal(signal.SIGINT | signal.SIGTERM, self.stop)
atexit.register(self.stop)
self.thread.start()
logger.info('Server started (requested)...')
# Wait for the server to be ready before returning
while not self.startup_complete.is_set():
time.sleep(0.01)
logger.info('Server started (received)...')
# noinspection PyUnusedLocal
def stop(self, *args):
@@ -115,10 +125,30 @@ class Server:
if self.thread is None:
print('Cannot stop server because it is not running')
return
# FIXME: Wait for at least one client to confirm ready before stopping in case we are too fast?
# Make sure we can hold the lock for more than 100ms (to avoid exiting too early)
logger.info('Stopping server (waiting for at least one frontend request first, cancel with CTRL+C)...')
try:
while not self.at_least_one_client.is_set():
time.sleep(0.01)
except KeyboardInterrupt:
pass
logger.info('Stopping server (waiting for no more frontend requests)...')
acquired = time.time()
while time.time() - acquired < 1.0:
if self.frontend_lock.locked():
acquired = time.time()
time.sleep(0.01)
# Stop the server in the background
self.loop.call_soon_threadsafe(lambda *a: self.do_shutdown.set())
self.thread.join(timeout=12)
logger.info('Stopping server (sent)...')
# Wait for the server to stop gracefully
self.thread.join(timeout=30)
self.thread = None
logger.info('Stopping server (confirmed)...')
if len(args) >= 1 and args[0] in (signal.SIGINT, signal.SIGTERM):
sys.exit(0) # Exit with success
@@ -135,15 +165,18 @@ class Server:
await runner.setup()
site = web.TCPSite(runner, os.getenv('YACV_HOST', 'localhost'), int(os.getenv('YACV_PORT', 32323)))
await site.start()
# print(f'Server started at {site.name}')
logger.info('Server started (sent)...')
self.startup_complete.set()
# Wait for a signal to stop the server while running
await self.do_shutdown.wait()
# print('Shutting down server...')
await runner.cleanup()
logger.info('Stopping server (received)...')
await runner.shutdown()
# await runner.cleanup() # Gets stuck?
logger.info('Stopping server (done)...')
async def _entrypoint(self, request: web.Request) -> web.StreamResponse:
"""Main entrypoint to the server, which automatically serves the frontend/updates/objects"""
if request.headers.get('Upgrade', '').lower() == 'websocket': # WebSocket -> updates API
if request.query.get('api_updates', '') != '': # ?api_updates -> updates API
return await self._api_updates(request)
elif request.query.get('api_object', '') != '': # ?api_object={name} -> object API
request.match_info['name'] = request.query['api_object']
@@ -151,36 +184,32 @@ class Server:
else: # Anything else -> frontend index.html
return await _index_handler(request)
async def _api_updates(self, request: web.Request) -> web.WebSocketResponse:
async def _api_updates(self, request: web.Request) -> web.StreamResponse:
"""Handles a publish-only websocket connection that send show_object events along with their hashes and URLs"""
ws = web.WebSocketResponse()
await ws.prepare(request)
self.at_least_one_client.set()
async with sse_response(request) as resp:
logger.debug('Client connected: %s', request.remote)
resp.ping_interval = 0.1 # HACK: forces flushing of the buffer
async def _send_api_updates():
subscription = self.show_events.subscribe()
# Send buffered events first, while keeping a lock
async with self.frontend_lock:
for data in self.show_events.buffer():
logger.debug('Sending info about %s to %s: %s', data.name, request.remote, data)
# noinspection PyUnresolvedReferences
await resp.send(data.to_json())
# Send future events over the same connection
subscription = self.show_events.subscribe(include_buffered=False)
try:
async for data in subscription:
logger.debug('Sending info about %s to %s: %s', data.name, request.remote, data)
# noinspection PyUnresolvedReferences
await ws.send_str(data.to_json())
await resp.send(data.to_json())
finally:
await subscription.aclose()
# Start sending updates to the client automatically
send_task = asyncio.create_task(_send_api_updates())
receive_task = asyncio.create_task(ws.receive())
try:
logger.debug('Client connected: %s', request.remote)
# Wait for the client to close the connection (or send a message)
done, pending = await asyncio.wait([send_task, receive_task], return_when=asyncio.FIRST_COMPLETED)
# Make sure to stop sending updates to the client and close the connection
for task in pending:
task.cancel()
finally:
await ws.close()
logger.debug('Client disconnected: %s', request.remote)
return ws
return resp
obj_counter = 0
@@ -188,6 +217,13 @@ class Server:
kwargs=None):
name = name or f'object_{self.obj_counter}'
self.obj_counter += 1
# Remove a previous object with the same name
for old_event in self.show_events.buffer():
if old_event.name == name:
self.show_events.delete(old_event)
if name in self.object_events:
del self.object_events[name]
break
precomputed_info = UpdatesApiFullData(name=name, hash=hash, obj=obj, kwargs=kwargs or {})
self.show_events.publish_nowait(precomputed_info)
logger.info('show_object(%s, %s) took %.3f seconds', name, hash, time.time() - start)
@@ -245,6 +281,7 @@ class Server:
async def _api_object(self, request: web.Request) -> web.Response:
"""Returns the object file with the matching name, building it if necessary."""
async with self.frontend_lock:
# Export the object (or fail if not found)
exported_glb = await self.export(request.match_info['name'])
@@ -258,22 +295,20 @@ class Server:
"""Returns the names of all objects that have been shown"""
return list([obj.name for obj in self.show_events.buffer()])
def _shown_object(self, name: str) -> Optional[UpdatesApiFullData]:
"""Returns the object with the given name, if it exists"""
for obj in self.show_events.buffer():
if obj.name == name:
return obj
return None
async def export(self, name: str) -> bytes:
"""Export the given previously-shown object to a single GLB file, building it if necessary."""
start = time.time()
# Check that the object to build exists and grab it if it does
found = False
obj: Optional[TopoDS_Shape] = None
kwargs: Optional[Dict[str, any]] = None
subscription = self.show_events.buffer()
for data in subscription:
if data.name == name:
obj = data.obj
kwargs = data.kwargs
found = True # Required because obj could be None
break
if not found:
event = self._shown_object(name)
if not event:
raise web.HTTPNotFound(text=f'No object named {name} was previously shown')
# Use the lock to ensure that we don't build the object twice
@@ -286,19 +321,21 @@ class Server:
def _build_object():
# Build and publish the object (once)
gltf = tessellate(obj, tolerance=kwargs.get('tolerance', 0.1),
angular_tolerance=kwargs.get('angular_tolerance', 0.1),
faces=kwargs.get('faces', True),
edges=kwargs.get('edges', True),
vertices=kwargs.get('vertices', True))
gltf = tessellate(event.obj, tolerance=event.kwargs.get('tolerance', 0.1),
angular_tolerance=event.kwargs.get('angular_tolerance', 0.1),
faces=event.kwargs.get('faces', True),
edges=event.kwargs.get('edges', True),
vertices=event.kwargs.get('vertices', True))
glb_list_of_bytes = gltf.save_to_bytes()
publish_to.publish_nowait(b''.join(glb_list_of_bytes))
logger.info('export(%s) took %.3f seconds, %d parts', name, time.time() - start,
len(gltf.meshes[0].primitives))
# We should build it fully even if we are cancelled, so we use a separate task
# Furthermore, building is CPU-bound, so we use the default executor
await asyncio.get_running_loop().run_in_executor(None, _build_object)
# await asyncio.get_running_loop().run_in_executor(None, _build_object)
# The previous line has problems with auto-closed loop on script exit
# and is cancellable, so instead run blocking code in async context :(
logger.debug('Building object %s... %s', name, event.obj)
_build_object()
# In either case return the elements of a subscription to the async generator
subscription = self.object_events[name].subscribe()
@@ -306,3 +343,15 @@ class Server:
return await anext(subscription)
finally:
await subscription.aclose()
def export_all(self, folder: str) -> None:
"""Export all previously-shown objects to GLB files in the given folder"""
import asyncio
async def _export_all():
os.makedirs(folder, exist_ok=True)
for name in self.shown_object_names():
with open(os.path.join(folder, f'{name}.glb'), 'wb') as f:
f.write(await self.export(name))
asyncio.run(_export_all())