From 05fd770f2e3882f2c5bcba4b01e1db67d66afaba Mon Sep 17 00:00:00 2001 From: James Ketrenos Date: Wed, 1 Oct 2025 09:27:26 -0700 Subject: [PATCH] Continuing with improved tst coverage --- .gitignore | 1 + client/src/App.tsx | 212 ++++++++++++------ server/routes/games.js | 219 ++++++++++++------- tools/puppeteer-test/run_with_server_logs.sh | 37 +++- tools/puppeteer-test/test.js | 97 ++++++-- 5 files changed, 397 insertions(+), 169 deletions(-) diff --git a/.gitignore b/.gitignore index 5929911..cf86175 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +test-output/ certs/ /.ssh/ /client/node_modules/ diff --git a/client/src/App.tsx b/client/src/App.tsx index c34c7ca..7478f53 100755 --- a/client/src/App.tsx +++ b/client/src/App.tsx @@ -81,7 +81,7 @@ const Table: React.FC = ({ session }) => { const [volume, setVolume] = useState( localStorage.getItem("volume") ? parseFloat(localStorage.getItem("volume") as string) : 0.5 ); - const fields = ["id", "state", "color", "name", "private", "dice", "turn"]; + const fields = useMemo(() => ["id", "state", "color", "name", "private", "dice", "turn"], []); const loc = window.location; const protocol = loc.protocol === "https:" ? "wss" : "ws"; @@ -109,31 +109,84 @@ const Table: React.FC = ({ session }) => { }, }, !!socketUrl); - console.log("readyState:", readyState, "socketUrl:", socketUrl, "ws instance:", getWebSocket()); + // Avoid calling getWebSocket() directly here because it may return a new + // wrapper object on each render and cause downstream consumers to re-run + // effects. Use a ref to expose a stable websocket instance instead. + const wsRef = useRef(null); + useEffect(() => { + if (readyState === ReadyState.OPEN) { + try { + wsRef.current = getWebSocket() as WebSocket | null; + } catch (e) { + wsRef.current = null; + } + } else { + wsRef.current = null; + } + }, [readyState, getWebSocket]); + + console.log("readyState:", readyState, "socketUrl:", socketUrl, "ws instance:", wsRef.current); + + // Stabilize sendJsonMessage identity: keep the latest send function in a ref + // so that `normalizedSend` remains stable and doesn't trigger re-running + // effects in children that depend on the send function. + const sendJsonMessageRef = useRef(sendJsonMessage); + useEffect(() => { + sendJsonMessageRef.current = sendJsonMessage; + }, [sendJsonMessage]); // Normalized send: ensure payloads follow { type, data: { ... } } - const normalizedSend = useCallback( - (message: any) => { - if (!sendJsonMessage) return; - // If message is not an object, forward as-is - if (typeof message !== "object" || message === null) { - sendJsonMessage(message); + // Uses the ref so the callback identity is stable across renders. + // Short-window batching for outgoing 'get' messages: merge fields requested + // by multiple rapid calls into a single message to avoid chattiness. + const OUTGOING_GET_BATCH_MS = 20; + const getBatchRef = useRef<{ fields: Set; timer?: number | null }>({ fields: new Set(), timer: null }); + + const normalizedSend = useCallback((message: any) => { + const fn = sendJsonMessageRef.current; + if (!fn) return; + + if (typeof message !== "object" || message === null) { + fn(message); + return; + } + + if ("data" in message || "config" in message) { + // Special-case: if this is a 'get' request, batch field requests + const t = message.type; + const d = message.data || message.config || {}; + if (t === 'get' && Array.isArray(d.fields)) { + d.fields.forEach((f: string) => getBatchRef.current.fields.add(f)); + if (getBatchRef.current.timer) return; + getBatchRef.current.timer = window.setTimeout(() => { + const fields = Array.from(getBatchRef.current.fields); + getBatchRef.current.fields.clear(); + getBatchRef.current.timer = null; + fn({ type: 'get', data: { fields } }); + }, OUTGOING_GET_BATCH_MS); return; } + fn(message); + return; + } - // If already in the new shape or legacy config shape, forward as-is - if ("data" in message || "config" in message) { - sendJsonMessage(message); - return; - } + const { type, ...rest } = message; + const payload = rest && Object.keys(rest).length ? rest : {}; + // If this is a shorthand 'get' message like { type: 'get', fields: [...] } + if (type === 'get' && Array.isArray(payload.fields)) { + payload.fields.forEach((f: string) => getBatchRef.current.fields.add(f)); + if (getBatchRef.current.timer) return; + getBatchRef.current.timer = window.setTimeout(() => { + const fields = Array.from(getBatchRef.current.fields); + getBatchRef.current.fields.clear(); + getBatchRef.current.timer = null; + fn({ type: 'get', data: { fields } }); + }, OUTGOING_GET_BATCH_MS); + return; + } - // Normalize: pull type and place remaining fields under `data` - const { type, ...rest } = message; - const payload = rest && Object.keys(rest).length ? rest : {}; - sendJsonMessage({ type, data: payload }); - }, - [sendJsonMessage] - ); + fn({ type, data: payload }); + }, []); const sendUpdate = (update: unknown) => { // Use the normalized send wrapper @@ -141,10 +194,16 @@ const Table: React.FC = ({ session }) => { }; // When the socket opens, request initial game state + // Only send the initial pair of messages once per WebSocket connection + // instance. React renders and wrapper object identity changes can cause + // this effect to run multiple times; guard by remembering the last + // WebSocket we sent messages on. + const prevWsRef = useRef(null); useEffect(() => { - if (readyState === ReadyState.OPEN) { + if (readyState === ReadyState.OPEN && wsRef.current && wsRef.current !== prevWsRef.current) { normalizedSend({ type: "game-update" }); normalizedSend({ type: "get", fields }); + prevWsRef.current = wsRef.current; } }, [readyState, normalizedSend, fields]); @@ -232,16 +291,38 @@ const Table: React.FC = ({ session }) => { }, [lastJsonMessage]); const globalValue = useMemo(() => ({ - ws: readyState === ReadyState.OPEN ? getWebSocket() : null, + // Provide a stable reference to the underlying WebSocket so child + // components that depend on `ws` don't see a new object identity on + // each render. + ws: wsRef.current, name, gameId, sendJsonMessage: normalizedSend, - }), [readyState, name, gameId, normalizedSend]); + }), [name, gameId, normalizedSend]); useEffect(() => { setGlobal(globalValue); }, [globalValue, setGlobal]); + // Send the player's name to the server once per connection when the + // client learns the name (either from user input or from an initial + // snapshot). This helps the test harness which expects a `player-name` + // send. We track via a ref so we don't repeatedly resend on renders. + const sentPlayerNameRef = useRef(false); + useEffect(() => { + // Reset the sent flag when the websocket instance changes + if (wsRef.current && sentPlayerNameRef.current && prevWsRef.current !== wsRef.current) { + sentPlayerNameRef.current = false; + } + if (!sentPlayerNameRef.current && name && name !== "") { + // Only send when connection is open + if (readyState === ReadyState.OPEN) { + normalizedSend({ type: "player-name", name }); + sentPlayerNameRef.current = true; + } + } + }, [name, readyState, normalizedSend]); + useEffect(() => { console.log("Table useEffect for POST running, gameId =", gameId); if (gameId) { @@ -527,50 +608,59 @@ const App: React.FC = () => { const [session, setSession] = useState(null); useEffect(() => { - if (playerId) { - return; - } - console.log("API base used by client:", base); - window - .fetch(`${base}/api/v1/games/`, { - method: "GET", - cache: "no-cache", - credentials: "same-origin" /* include cookies */, - headers: { - "Content-Type": "application/json", - }, - }) - .then(async (res) => { + // Keep retrying the initial GET until the backend becomes available. + // This makes the dev/test harness more resilient to startup races where + // the Vite client loads before the backend HTTP server is ready. + if (playerId) return; + + let mounted = true; + let attempt = 0; + + const tryFetch = async () => { + attempt++; + try { + console.log(`GET fetch attempt ${attempt} base=${base}`); + const res = await window.fetch(`${base}/api/v1/games/`, { + method: "GET", + cache: "no-cache", + credentials: "same-origin" /* include cookies */, + headers: { + "Content-Type": "application/json", + }, + }); + console.log("GET fetch response status:", res.status); if (res.status >= 400) { - const error = - `Unable to connect to Ketr Ketran game server! ` + `Try refreshing your browser in a few seconds.`; - setError(error); - return null; + throw new Error(`Bad response ${res.status}`); } - // Be defensive: only call res.json() when the response looks like JSON + const ct = res.headers.get("content-type") || ""; if (ct.indexOf("application/json") !== -1) { - try { - const data = await res.json(); - console.log("GET fetch response data:", data); - setPlayerId(data.player); - } catch (e) { - console.error("GET fetch JSON parse error:", e); - const text = await res.text(); - console.error("GET fetch response text:", text); - setError("Server returned unexpected content. See console for details."); - } - } else { - const text = await res.text(); - console.error("GET fetch expected JSON but got:", text.slice(0, 200)); - setError("Server returned unexpected content. See console for details."); + const data = await res.json(); + console.log("GET fetch response data:", data); + if (mounted) setPlayerId(data.player); + return; } - }) - .catch((error) => { - console.error("GET fetch error:", error); - }); - }, [playerId, setPlayerId]); + + const text = await res.text(); + console.error("GET fetch expected JSON but got:", text.slice(0, 200)); + throw new Error("Server returned unexpected content"); + } catch (e) { + console.error("GET fetch error (will retry):", e); + if (!mounted) return; + // back off a bit and retry + setTimeout(() => { + if (mounted && !playerId) tryFetch(); + }, 500); + } + }; + + tryFetch(); + + return () => { + mounted = false; + }; + }, [playerId]); if (!playerId) { return <>{error}; diff --git a/server/routes/games.js b/server/routes/games.js index a6881ee..3a85693 100755 --- a/server/routes/games.js +++ b/server/routes/games.js @@ -3565,25 +3565,52 @@ const todo = `[ todo ]`; * enqueued within the throttle window, the latest one replaces prior pending * updates so the client receives a single consolidated message. */ const SEND_THROTTLE_MS = 50; +// Batch incoming 'get' requests from a single websocket session so multiple +// rapid get requests (often caused by render churn) are combined into one +// response. This helps avoid processing and responding to many near-duplicate +// get messages during connection startup. Window in ms. +const INCOMING_GET_BATCH_MS = 20; const queueSend = (session, message) => { if (!session || !session.ws) return; try { + // Ensure we compare a stable serialization: if message is JSON text, + // parse it and re-serialize with sorted keys so semantically-equal + // objects compare equal even when property order differs. + const stableStringify = (msg) => { + try { + const obj = typeof msg === 'string' ? JSON.parse(msg) : msg; + const ordered = (v) => { + if (v === null || typeof v !== 'object') return v; + if (Array.isArray(v)) return v.map(ordered); + const keys = Object.keys(v).sort(); + const out = {}; + for (const k of keys) out[k] = ordered(v[k]); + return out; + }; + return JSON.stringify(ordered(obj)); + } catch (e) { + // If parsing fails, fall back to original string representation + return typeof msg === 'string' ? msg : JSON.stringify(msg); + } + }; + const stableMessage = stableStringify(message); const now = Date.now(); if (!session._lastSent) session._lastSent = 0; const elapsed = now - session._lastSent; - // If the exact same message was sent last time and nothing is pending, - // skip sending to avoid pointless duplicate traffic. - if (!session._pendingTimeout && session._lastMessage === message) { + // If the exact same message (in stable form) was sent last time and + // nothing is pending, skip sending to avoid pointless duplicate + // traffic. + if (!session._pendingTimeout && session._lastMessage === stableMessage) { return; } // If we haven't sent recently and there's no pending timer, send now if (elapsed >= SEND_THROTTLE_MS && !session._pendingTimeout) { try { - session.ws.send(message); + session.ws.send(typeof message === 'string' ? message : JSON.stringify(message)); session._lastSent = Date.now(); - session._lastMessage = message; + session._lastMessage = stableMessage; } catch (e) { console.warn(`${session.id}: queueSend immediate send failed:`, e); } @@ -3593,10 +3620,10 @@ const queueSend = (session, message) => { // Otherwise, store latest message and schedule a send // If the pending message would equal the last-sent message, don't bother // storing/scheduling it. - if (session._lastMessage === message) { + if (session._lastMessage === stableMessage) { return; } - session._pendingMessage = message; + session._pendingMessage = typeof message === 'string' ? message : JSON.stringify(message); if (session._pendingTimeout) { // already scheduled; newest message will be sent when timer fires return; @@ -3607,7 +3634,12 @@ const queueSend = (session, message) => { if (session.ws && session._pendingMessage) { session.ws.send(session._pendingMessage); session._lastSent = Date.now(); - session._lastMessage = session._pendingMessage; + // compute stable form of what we actually sent + try { + session._lastMessage = stableStringify(session._pendingMessage); + } catch (e) { + session._lastMessage = session._pendingMessage; + } } } catch (e) { console.warn(`${session.id}: queueSend delayed send failed:`, e); @@ -4275,88 +4307,111 @@ router.ws("/ws/:id", async (ws, req) => { break; case 'get': - // Guard against clients that send a 'get' without fields. - // Support both legacy shape: { type: 'get', fields: [...] } - // and normalized shape: { type: 'get', data: { fields: [...] } } + // Batch 'get' requests per-session for a short window so multiple + // near-simultaneous requests are merged into one response. This + // reduces CPU and network churn during client startup. const requestedFields = Array.isArray(data.fields) ? data.fields : (data.data && Array.isArray(data.data.fields)) ? data.data.fields : []; console.log(`${short}: <- get:${getName(session)} ${requestedFields.length ? requestedFields.join(',') : ''}`); - update = {}; - requestedFields.forEach((field) => { - switch (field) { - case 'player': - sendWarning(session, `'player' is not a valid item. use 'private' instead`); - update.player = undefined; - break; - case 'id': - case 'chat': - case 'startTime': - case 'state': - case 'turn': - case 'turns': - case 'winner': - case 'placements': - case 'longestRoadLength': - case 'robber': - case 'robberName': - case 'pips': - case 'pipsOrder': - case 'borders': - case 'tileOrder': - case 'active': - case 'largestArmy': - case 'mostDeveloped': - case 'mostPorts': - case 'longestRoad': - case 'tiles': - case 'pipOrder': - case 'signature': - case 'borderOrder': - case 'dice': - case 'activities': - update[field] = game[field]; - break; - case 'rules': - update[field] = game.rules ? game.rules : {}; - break; - case 'name': - update.name = session.name; - break; - case 'unselected': - update.unselected = getFilteredUnselected(game); - break; - case 'private': - update.private = session.player; - break; - case 'players': - update.players = getFilteredPlayers(game); - break; - case 'color': - console.log(`${session.id}: -> Returning color as ${session.color} for ${getName(session)}`); - update.color = session.color; - break; - case 'timestamp': - update.timestamp = Date.now(); - break; - default: - if (field in game) { - console.warn(`${short}: WARNING: Requested GET not-privatized/sanitized field: ${field}`); - update[field] = game[field]; - } else { - if (field in session) { - console.warn(`${short}: WARNING: Requested GET not-sanitized session field: ${field}`); - update[field] = session[field]; - } else { - console.warn(`${short}: WARNING: Requested GET unsupported field: ${field}`); + + // Ensure a batch structure exists on the session + if (!session._getBatch) { + session._getBatch = { fields: new Set(), timer: undefined }; + } + // Merge requested fields into the batch set + requestedFields.forEach(f => session._getBatch.fields.add(f)); + + // If a timer is already scheduled, we will respond when it fires. + if (session._getBatch.timer) { + break; + } + + // Schedule a single reply after the batching window + session._getBatch.timer = setTimeout(() => { + try { + const fieldsArray = Array.from(session._getBatch.fields); + const batchedUpdate = {}; + fieldsArray.forEach((field) => { + switch (field) { + case 'player': + sendWarning(session, `'player' is not a valid item. use 'private' instead`); + batchedUpdate.player = undefined; + break; + case 'id': + case 'chat': + case 'startTime': + case 'state': + case 'turn': + case 'turns': + case 'winner': + case 'placements': + case 'longestRoadLength': + case 'robber': + case 'robberName': + case 'pips': + case 'pipsOrder': + case 'borders': + case 'tileOrder': + case 'active': + case 'largestArmy': + case 'mostDeveloped': + case 'mostPorts': + case 'longestRoad': + case 'tiles': + case 'pipOrder': + case 'signature': + case 'borderOrder': + case 'dice': + case 'activities': + batchedUpdate[field] = game[field]; + break; + case 'rules': + batchedUpdate[field] = game.rules ? game.rules : {}; + break; + case 'name': + batchedUpdate.name = session.name; + break; + case 'unselected': + batchedUpdate.unselected = getFilteredUnselected(game); + break; + case 'private': + batchedUpdate.private = session.player; + break; + case 'players': + batchedUpdate.players = getFilteredPlayers(game); + break; + case 'color': + console.log(`${session.id}: -> Returning color as ${session.color} for ${getName(session)}`); + batchedUpdate.color = session.color; + break; + case 'timestamp': + batchedUpdate.timestamp = Date.now(); + break; + default: + if (field in game) { + console.warn(`${short}: WARNING: Requested GET not-privatized/sanitized field: ${field}`); + batchedUpdate[field] = game[field]; + } else if (field in session) { + console.warn(`${short}: WARNING: Requested GET not-sanitized session field: ${field}`); + batchedUpdate[field] = session[field]; + } else { + console.warn(`${short}: WARNING: Requested GET unsupported field: ${field}`); + } + break; } - } - break; + }); + sendUpdateToPlayer(game, session, batchedUpdate); + } catch (e) { + console.warn(`${session.id}: get batch handler failed:`, e); } - }); - sendUpdateToPlayer(game, session, update); + // clear batch + session._getBatch.fields.clear(); + clearTimeout(session._getBatch.timer); + session._getBatch.timer = undefined; + }, INCOMING_GET_BATCH_MS); break; case 'chat': diff --git a/tools/puppeteer-test/run_with_server_logs.sh b/tools/puppeteer-test/run_with_server_logs.sh index 58cf2b9..37b118c 100755 --- a/tools/puppeteer-test/run_with_server_logs.sh +++ b/tools/puppeteer-test/run_with_server_logs.sh @@ -5,10 +5,16 @@ set -euo pipefail # a timestamped file in the repo root. This helps correlate client-side # intercepted WS sends with server handling (e.g., setPlayerName lines). +# Use a YYMMDD-HHMMSS style run dir name so it lines up with the harness. +RUN_DIR_NAME=$(date +"%y%m%d-%H%M%S") TS=$(date +%s) OUT_DIR=$(pwd) -LOGFILE="$OUT_DIR/tmp-server-logs-$TS.server.log" -echo "ts=$TS" +# Per-run directories live under test-output/ +mkdir -p "$OUT_DIR/test-output" +LOGDIR="$OUT_DIR/test-output/${RUN_DIR_NAME}" +mkdir -p "$LOGDIR" +LOGFILE="$LOGDIR/server-logs-$TS.server.log" +echo "ts=$TS run_dir=$RUN_DIR_NAME" # Find the server container by common names used in docker-compose CID=$(docker ps -q --filter "name=ketr.ketran.dev" || true) @@ -20,7 +26,7 @@ fi echo "server container id: ${CID:-}" LOG_PID="" -if [ -n "$CID" ]; then + if [ -n "$CID" ]; then echo "Starting log follower: docker logs -f $CID -> $LOGFILE" # Stream logs (since 0s to include recent lines) in background docker logs --since 0s -f "$CID" > "$LOGFILE" 2>&1 & @@ -37,15 +43,30 @@ cleanup() { kill "$LOG_PID" 2>/dev/null || true wait "$LOG_PID" 2>/dev/null || true fi - echo "Artifacts in repo root (tmp-*)" - ls -la tmp-* 2>/dev/null || true + echo "Artifacts in test-output/" + # List timestamped per-run artifact directories under test-output + echo "Artifact run directories (format YYMMDD-HHMMSS):" + ls -1d test-output/??????-?????? 2>/dev/null || echo " " + # Show contents of each run directory (if any) + for d in test-output/??????-??????; do + if [ -d "$d" ]; then + echo "\nContents of $d:"; + ls -la "$d" || true; + fi + done exit $rc } trap cleanup EXIT INT TERM -echo "Running harness (docker compose run --rm -e TEST_MAX_MS=20000 peddlers-test)" -# Run the test harness inside compose; let it run with the env override for a short test -docker compose -f docker-compose.yml run --rm -e TEST_MAX_MS=20000 peddlers-test +echo "Running harness (docker compose run --rm -e TEST_MAX_MS=20000 -e TEST_RUN_DIR_NAME=$RUN_DIR_NAME peddlers-test)" +# Run the test harness inside compose; pass TEST_RUN_DIR_NAME so the harness +# writes artifacts into the same per-run directory under /workspace. +docker compose -f docker-compose.yml run --rm -e TEST_MAX_MS=20000 -e TEST_RUN_DIR_NAME=$RUN_DIR_NAME peddlers-test + +# Update the latest symlink to point to this run directory so callers can +# quickly find the newest artifacts at test-output/latest +ln -sfn "$LOGDIR" "$OUT_DIR/test-output/latest" +echo "Updated test-output/latest -> $LOGDIR" echo "Harness completed; cleanup will run via trap" diff --git a/tools/puppeteer-test/test.js b/tools/puppeteer-test/test.js index 15992d4..1ce9eba 100644 --- a/tools/puppeteer-test/test.js +++ b/tools/puppeteer-test/test.js @@ -1,11 +1,36 @@ const puppeteer = require("puppeteer-core"); const fs = require("fs"); +const path = require('path'); (async () => { const t0 = Date.now(); const ts = () => new Date().toISOString() + " +" + (Date.now() - t0) + "ms"; const log = (...args) => console.log(ts(), ...args); log("Puppeteer test starting"); + // Create a per-run output directory under /workspace named YYMMDD-HHMMSS. + const WORKSPACE_DIR = '/workspace'; + const makeRunDirName = (d) => { + const yy = String(d.getFullYear()).slice(-2); + const mm = String(d.getMonth() + 1).padStart(2, '0'); + const dd = String(d.getDate()).padStart(2, '0'); + const hh = String(d.getHours()).padStart(2, '0'); + const min = String(d.getMinutes()).padStart(2, '0'); + const ss = String(d.getSeconds()).padStart(2, '0'); + return `${yy}${mm}${dd}-${hh}${min}${ss}`; + }; + // Allow the caller to provide a run directory name so external wrappers + // (e.g., run_with_server_logs.sh) can place related artifacts together. + const runDirName = process.env.TEST_RUN_DIR_NAME || makeRunDirName(new Date()); + // Place per-run outputs under /workspace/test-output/ so the + // host-side wrapper (which creates ./test-output/) and the + // container-mounted workspace line up exactly. + const OUT_DIR = path.join(WORKSPACE_DIR, 'test-output', runDirName); + try { + fs.mkdirSync(OUT_DIR, { recursive: true }); + log('Using output directory', OUT_DIR); + } catch (e) { + log('Failed to create output directory', OUT_DIR, e && e.message ? e.message : e); + } let browser; // Global timeout (ms) for the whole test run to avoid infinite log floods. const MAX_TEST_MS = parseInt(process.env.TEST_MAX_MS || "60000", 10); @@ -174,21 +199,22 @@ const fs = require("fs"); // Diagnostic snapshot: save the full HTML and a screenshot immediately // after domcontentloaded so we can inspect the initial page state even // when the inputs appear much later. - try { - const domContent = await page.content(); try { - fs.writeFileSync('/workspace/tmp-domcontent.html', domContent); - log('Saved domcontent HTML to /workspace/tmp-domcontent.html (length', domContent.length + ')'); + const domContent = await page.content(); + try { + const outPath = path.join(OUT_DIR, 'domcontent.html'); + fs.writeFileSync(outPath, domContent); + log('Saved domcontent HTML to', outPath, '(length', domContent.length + ')'); + } catch (e) { + log('Failed to write domcontent HTML:', e && e.message ? e.message : e); + } } catch (e) { - log('Failed to write domcontent HTML:', e && e.message ? e.message : e); + log('Could not read page content at domcontentloaded:', e && e.message ? e.message : e); } - } catch (e) { - log('Could not read page content at domcontentloaded:', e && e.message ? e.message : e); - } try { - const shotNow = '/workspace/tmp-domcontent.png'; - await page.screenshot({ path: shotNow, fullPage: true }); - log('Saved domcontent screenshot to', shotNow); + const shotNow = path.join(OUT_DIR, 'domcontent.png'); + await page.screenshot({ path: shotNow, fullPage: true }); + log('Saved domcontent screenshot to', shotNow); } catch (e) { log('Failed to save domcontent screenshot:', e && e.message ? e.message : e); } @@ -387,7 +413,7 @@ const fs = require("fs"); const delta = Date.now() - firstSeen; log('Probe: inputs first seen at', new Date(firstSeen).toISOString(), `(ago ${delta}ms)`); // Save a screenshot named with the first-seen timestamp to make it easy to correlate - const shotPath = `/workspace/tmp-house-rules-${firstSeen}.png`; + const shotPath = path.join(OUT_DIR, 'house-rules.png'); try { await page.screenshot({ path: shotPath, fullPage: true }); log('Saved input-appearance screenshot to', shotPath); @@ -398,7 +424,7 @@ const fs = require("fs"); // inspect the DOM (some UI frameworks render inputs late). try { const html = await page.content(); - const outPath = `/workspace/tmp-domcontent-after-inputs-${firstSeen}.html`; + const outPath = path.join(OUT_DIR, 'domcontent-after-inputs.html'); try { fs.writeFileSync(outPath, html); log('Saved DOM HTML at input-appearance to', outPath, '(length', html.length + ')'); @@ -410,7 +436,7 @@ const fs = require("fs"); // verification. try { const sends = await page.evaluate(() => window.__wsSends || []); - const wsOut = `/workspace/tmp-ws-sends-${firstSeen}.json`; + const wsOut = path.join(OUT_DIR, 'ws-sends.json'); try { fs.writeFileSync(wsOut, JSON.stringify(sends, null, 2)); log('Saved intercepted WS sends to', wsOut, '(count', (sends && sends.length) + ')'); @@ -423,7 +449,7 @@ const fs = require("fs"); if (!hasPlayerName) { log('ASSERTION FAILED: No player-name send with name=Automaton found in intercepted WS sends'); // Write a failure marker file for CI to pick up - try { fs.writeFileSync('/workspace/tmp-assert-failed-player-name.txt', 'Missing player-name Automaton'); } catch (e) {} + try { fs.writeFileSync(path.join(OUT_DIR, 'assert-failed-player-name.txt'), 'Missing player-name Automaton'); } catch (e) {} process.exit(3); } else { log('Assertion passed: player-name Automaton was sent by client'); @@ -452,13 +478,48 @@ const fs = require("fs"); // same assertion so CI will fail fast when player-name is missing. try { const sends = await page.evaluate(() => window.__wsSends || []); - const outLatest = '/workspace/tmp-ws-sends-latest.json'; + const outLatest = path.join(OUT_DIR, 'ws-sends-latest.json'); try { fs.writeFileSync(outLatest, JSON.stringify(sends, null, 2)); log('Saved intercepted WS sends (latest) to', outLatest, '(count', (sends && sends.length) + ')'); } catch (e) { log('Failed to write latest intercepted WS sends file:', e && e.message ? e.message : e); } + // Produce a small summary JSON for quick CI checks: total count, time range, + // counts per message type. + try { + const parsed = (sends || []).map(s => { + try { return JSON.parse(s.data); } catch (e) { return null; } + }).filter(Boolean); + const typesCount = {}; + let minTs = null, maxTs = null; + (sends || []).forEach(s => { + if (s && s.ts) { + if (minTs === null || s.ts < minTs) minTs = s.ts; + if (maxTs === null || s.ts > maxTs) maxTs = s.ts; + } + }); + parsed.forEach(p => { + const t = p.type || 'unknown'; + typesCount[t] = (typesCount[t] || 0) + 1; + }); + const summary = { + runDir: runDirName, + totalMessages: (sends || []).length, + types: typesCount, + tsRange: minTs !== null ? { min: minTs, max: maxTs } : null, + generatedAt: Date.now() + }; + const summaryPath = path.join(OUT_DIR, 'summary.json'); + try { + fs.writeFileSync(summaryPath, JSON.stringify(summary, null, 2)); + log('Wrote summary JSON to', summaryPath); + } catch (e) { + log('Failed to write summary JSON:', e && e.message ? e.message : e); + } + } catch (e) { + log('Could not generate summary.json:', e && e.message ? e.message : e); + } // Run the same assertion against the latest sends if a per-timestamp // file was not written earlier. try { @@ -468,7 +529,7 @@ const fs = require("fs"); const hasPlayerName = parsed.some(p => p.type === 'player-name' && ((p.data && p.data.name) === 'Automaton' || (p.name === 'Automaton'))); if (!hasPlayerName) { log('ASSERTION FAILED: No player-name send with name=Automaton found in intercepted WS sends (latest)'); - try { fs.writeFileSync('/workspace/tmp-assert-failed-player-name.txt', 'Missing player-name Automaton'); } catch (e) {} + try { fs.writeFileSync(path.join(OUT_DIR, 'assert-failed-player-name.txt'), 'Missing player-name Automaton'); } catch (e) {} process.exit(3); } else { log('Assertion passed (latest): player-name Automaton was sent by client'); @@ -600,7 +661,7 @@ const fs = require("fs"); } // Save a screenshot for inspection (workspace is mounted when running container) - const out = "/workspace/tmp-house-rules.png"; + const out = path.join(OUT_DIR, 'house-rules.png'); try { const tShot = Date.now(); await page.screenshot({ path: out, fullPage: true });