Refactoring into a Conversation element

This commit is contained in:
James Ketr 2025-04-21 19:57:07 -07:00
parent a141a1f954
commit 1d1bcb3e13
16 changed files with 1424 additions and 969 deletions

View File

@ -287,6 +287,10 @@ RUN { \
echo ' sleep 3'; \
echo ' done &' ; \
echo ' fi' ; \
echo ' if [[ ! -e src/cert.pem ]]; then' ; \
echo ' echo "Generating self-signed certificate for HTTPS"'; \
echo ' openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout src/key.pem -out src/cert.pem -subj "/C=US/ST=OR/L=Portland/O=Development/CN=localhost"'; \
echo ' fi' ; \
echo ' while true; do'; \
echo ' echo "Launching Backstory server..."'; \
echo ' python src/server.py "${@}" || echo "Backstory server died. Restarting in 3 seconds."'; \

12
frontend/craco.config.js Normal file
View File

@ -0,0 +1,12 @@
module.exports = {
devServer: {
server: {
type: 'https',
// You can also specify custom certificates if needed:
// options: {
// cert: '/path/to/cert.pem',
// key: '/path/to/key.pem',
// }
}
}
};

View File

@ -39,6 +39,7 @@
"web-vitals": "^2.1.4"
},
"devDependencies": {
"@craco/craco": "^7.1.0",
"@types/plotly.js": "^2.35.5"
}
},
@ -1975,6 +1976,52 @@
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
"peer": true
},
"node_modules/@craco/craco": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/@craco/craco/-/craco-7.1.0.tgz",
"integrity": "sha512-oRAcPIKYrfPXp9rSzlsDNeOaVtDiKhoyqSXUoqiK24jCkHr4T8m/a2f74yXIzCbIheoUWDOIfWZyRgFgT+cpqA==",
"dev": true,
"dependencies": {
"autoprefixer": "^10.4.12",
"cosmiconfig": "^7.0.1",
"cosmiconfig-typescript-loader": "^1.0.0",
"cross-spawn": "^7.0.3",
"lodash": "^4.17.21",
"semver": "^7.3.7",
"webpack-merge": "^5.8.0"
},
"bin": {
"craco": "dist/bin/craco.js"
},
"engines": {
"node": ">=6"
},
"peerDependencies": {
"react-scripts": "^5.0.0"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
"devOptional": true,
"dependencies": {
"@jridgewell/trace-mapping": "0.3.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
"devOptional": true,
"dependencies": {
"@jridgewell/resolve-uri": "^3.0.3",
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@csstools/normalize.css": {
"version": "12.1.1",
"resolved": "https://registry.npmjs.org/@csstools/normalize.css/-/normalize.css-12.1.1.tgz",
@ -4379,6 +4426,30 @@
"node": ">=10.13.0"
}
},
"node_modules/@tsconfig/node10": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
"devOptional": true
},
"node_modules/@tsconfig/node12": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
"devOptional": true
},
"node_modules/@tsconfig/node14": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
"devOptional": true
},
"node_modules/@tsconfig/node16": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
"devOptional": true
},
"node_modules/@turf/area": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/@turf/area/-/area-7.2.0.tgz",
@ -6780,6 +6851,20 @@
"wrap-ansi": "^7.0.0"
}
},
"node_modules/clone-deep": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz",
"integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==",
"dev": true,
"dependencies": {
"is-plain-object": "^2.0.4",
"kind-of": "^6.0.2",
"shallow-clone": "^3.0.0"
},
"engines": {
"node": ">=6"
}
},
"node_modules/clsx": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
@ -7222,12 +7307,37 @@
"node": ">=10"
}
},
"node_modules/cosmiconfig-typescript-loader": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz",
"integrity": "sha512-tRuMRhxN4m1Y8hP9SNYfz7jRwt8lZdWxdjg/ohg5esKmsndJIn4yT96oJVcf5x0eA11taXl+sIp+ielu529k6g==",
"dev": true,
"dependencies": {
"cosmiconfig": "^7",
"ts-node": "^10.7.0"
},
"engines": {
"node": ">=12",
"npm": ">=6"
},
"peerDependencies": {
"@types/node": "*",
"cosmiconfig": ">=7",
"typescript": ">=3"
}
},
"node_modules/country-regex": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/country-regex/-/country-regex-1.1.0.tgz",
"integrity": "sha512-iSPlClZP8vX7MC3/u6s3lrDuoQyhQukh5LyABJ3hvfzbQ3Yyayd4fp04zjLnfi267B/B2FkumcWWgrbban7sSA==",
"peer": true
},
"node_modules/create-require": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"devOptional": true
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@ -8151,6 +8261,15 @@
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
"integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
"devOptional": true,
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/diff-sequences": {
"version": "27.5.1",
"resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz",
@ -9919,6 +10038,15 @@
"node": ">=8"
}
},
"node_modules/flat": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz",
"integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==",
"dev": true,
"bin": {
"flat": "cli.js"
}
},
"node_modules/flat-cache": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
@ -11958,6 +12086,18 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/is-plain-object": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz",
"integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==",
"dev": true,
"dependencies": {
"isobject": "^3.0.1"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-potential-custom-element-name": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
@ -12155,6 +12295,15 @@
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="
},
"node_modules/isobject": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz",
"integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==",
"dev": true,
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/istanbul-lib-coverage": {
"version": "3.2.2",
"resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
@ -13590,6 +13739,12 @@
"semver": "bin/semver.js"
}
},
"node_modules/make-error": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"devOptional": true
},
"node_modules/makeerror": {
"version": "1.0.12",
"resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz",
@ -18877,6 +19032,18 @@
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"node_modules/shallow-clone": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz",
"integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==",
"dev": true,
"dependencies": {
"kind-of": "^6.0.2"
},
"engines": {
"node": ">=8"
}
},
"node_modules/shallow-copy": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/shallow-copy/-/shallow-copy-0.0.1.tgz",
@ -20501,6 +20668,67 @@
"resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz",
"integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA=="
},
"node_modules/ts-node": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"devOptional": true,
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7",
"@tsconfig/node12": "^1.0.7",
"@tsconfig/node14": "^1.0.0",
"@tsconfig/node16": "^1.0.2",
"acorn": "^8.4.1",
"acorn-walk": "^8.1.1",
"arg": "^4.1.0",
"create-require": "^1.1.0",
"diff": "^4.0.1",
"make-error": "^1.1.1",
"v8-compile-cache-lib": "^3.0.1",
"yn": "3.1.1"
},
"bin": {
"ts-node": "dist/bin.js",
"ts-node-cwd": "dist/bin-cwd.js",
"ts-node-esm": "dist/bin-esm.js",
"ts-node-script": "dist/bin-script.js",
"ts-node-transpile-only": "dist/bin-transpile.js",
"ts-script": "dist/bin-script-deprecated.js"
},
"peerDependencies": {
"@swc/core": ">=1.2.50",
"@swc/wasm": ">=1.2.50",
"@types/node": "*",
"typescript": ">=2.7"
},
"peerDependenciesMeta": {
"@swc/core": {
"optional": true
},
"@swc/wasm": {
"optional": true
}
}
},
"node_modules/ts-node/node_modules/acorn-walk": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
"integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
"devOptional": true,
"dependencies": {
"acorn": "^8.11.0"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/ts-node/node_modules/arg": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"devOptional": true
},
"node_modules/tsconfig-paths": {
"version": "3.15.0",
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",
@ -21095,6 +21323,12 @@
"uuid": "dist/bin/uuid"
}
},
"node_modules/v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
"devOptional": true
},
"node_modules/v8-to-istanbul": {
"version": "8.1.1",
"resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz",
@ -21436,6 +21670,20 @@
"node": ">=10.13.0"
}
},
"node_modules/webpack-merge": {
"version": "5.10.0",
"resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz",
"integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==",
"dev": true,
"dependencies": {
"clone-deep": "^4.0.1",
"flat": "^5.0.2",
"wildcard": "^2.0.0"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/webpack-sources": {
"version": "3.2.3",
"resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz",
@ -21622,6 +21870,12 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/wildcard": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz",
"integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==",
"dev": true
},
"node_modules/window-size": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz",
@ -22085,6 +22339,15 @@
"node": ">=10"
}
},
"node_modules/yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
"devOptional": true,
"engines": {
"node": ">=6"
}
},
"node_modules/yocto-queue": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View File

@ -9,7 +9,6 @@
"@mui/icons-material": "^7.0.1",
"@mui/material": "^7.0.1",
"@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
"@tensorflow/tfjs-tsne": "^0.2.0",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
@ -29,15 +28,13 @@
"rehype-katex": "^7.0.1",
"remark-gfm": "^4.0.1",
"remark-math": "^6.0.0",
"tsne-js": "^1.0.3",
"typescript": "^4.9.5",
"web-vitals": "^2.1.4"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
"start": "HTTPS=true craco start",
"build": "craco build",
"test": "craco test"
},
"eslintConfig": {
"extends": [

File diff suppressed because it is too large Load Diff

View File

@ -10,9 +10,10 @@ interface ChatBubbleProps {
isFullWidth?: boolean;
children: React.ReactNode;
sx?: SxProps<Theme>;
className?: string;
}
function ChatBubble({ role, isFullWidth, children, sx }: ChatBubbleProps) {
function ChatBubble({ role, isFullWidth, children, sx, className }: ChatBubbleProps) {
const theme = useTheme();
const styles = {
@ -76,7 +77,7 @@ function ChatBubble({ role, isFullWidth, children, sx }: ChatBubbleProps) {
};
return (
<Box sx={{ ...styles[role], ...sx }}>
<Box className={className} sx={{ ...styles[role], ...sx }}>
{children}
</Box>
);

495
frontend/src/Controls.tsx Normal file
View File

@ -0,0 +1,495 @@
import React, { useState, useEffect, ReactElement } from 'react';
import FormGroup from '@mui/material/FormGroup';
import FormControlLabel from '@mui/material/FormControlLabel';
import Switch from '@mui/material/Switch';
import Divider from '@mui/material/Divider';
import TextField from '@mui/material/TextField';
import Accordion from '@mui/material/Accordion';
import AccordionActions from '@mui/material/AccordionActions';
import AccordionSummary from '@mui/material/AccordionSummary';
import AccordionDetails from '@mui/material/AccordionDetails';
import Typography from '@mui/material/Typography';
import Button from '@mui/material/Button';
import Box from '@mui/material/Box';
import ResetIcon from '@mui/icons-material/History';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import { SeverityType } from './Snack';
type Tool = {
type: string,
function?: {
name: string,
description: string,
parameters?: any,
returns?: any
},
name?: string,
description?: string,
enabled: boolean
};
interface ControlsParams {
connectionBase: string,
sessionId: string | undefined,
setSnack: (message: string, severity?: SeverityType) => void,
};
type GPUInfo = {
name: string,
memory: number,
discrete: boolean
}
type SystemInfo = {
"Installed RAM": string,
"Graphics Card": GPUInfo[],
"CPU": string
};
const SystemInfoComponent: React.FC<{ systemInfo: SystemInfo | undefined }> = ({ systemInfo }) => {
const [systemElements, setSystemElements] = useState<ReactElement[]>([]);
const convertToSymbols = (text: string) => {
return text
.replace(/\(R\)/g, '®') // Replace (R) with the ® symbol
.replace(/\(C\)/g, '©') // Replace (C) with the © symbol
.replace(/\(TM\)/g, '™'); // Replace (TM) with the ™ symbol
};
useEffect(() => {
if (systemInfo === undefined) {
return;
}
const elements = Object.entries(systemInfo).flatMap(([k, v]) => {
// If v is an array, repeat for each card
if (Array.isArray(v)) {
return v.map((card, index) => (
<div key={index} className="SystemInfoItem">
<div>{convertToSymbols(k)} {index}</div>
<div>{convertToSymbols(card.name)} {card.discrete ? `w/ ${Math.round(card.memory / (1024 * 1024 * 1024))}GB RAM` : "(integrated)"}</div>
</div>
));
}
// If it's not an array, handle normally
return (
<div key={k} className="SystemInfoItem">
<div>{convertToSymbols(k)}</div>
<div>{convertToSymbols(String(v))}</div>
</div>
);
});
setSystemElements(elements);
}, [systemInfo]);
return <div className="SystemInfo">{systemElements}</div>;
};
const Controls = ({ sessionId, setSnack, connectionBase }: ControlsParams) => {
const [editSystemPrompt, setEditSystemPrompt] = useState<string>("");
const [systemInfo, setSystemInfo] = useState<SystemInfo | undefined>(undefined);
const [tools, setTools] = useState<Tool[]>([]);
const [rags, setRags] = useState<Tool[]>([]);
const [systemPrompt, setSystemPrompt] = useState<string>("");
const [serverSystemPrompt, setServerSystemPrompt] = useState<string>("");
const [messageHistoryLength, setMessageHistoryLength] = useState<number>(5);
useEffect(() => {
if (systemPrompt === serverSystemPrompt || !systemPrompt.trim() || sessionId === undefined) {
return;
}
const sendSystemPrompt = async (prompt: string) => {
try {
const response = await fetch(connectionBase + `/api/tunables/${sessionId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ "system-prompt": prompt }),
});
const data = await response.json();
const newPrompt = data["system-prompt"];
if (newPrompt !== serverSystemPrompt) {
setServerSystemPrompt(newPrompt);
setSystemPrompt(newPrompt)
setSnack("System prompt updated", "success");
}
} catch (error) {
console.error('Fetch error:', error);
setSnack("System prompt update failed", "error");
}
};
sendSystemPrompt(systemPrompt);
}, [systemPrompt, setServerSystemPrompt, serverSystemPrompt, connectionBase, sessionId, setSnack]);
useEffect(() => {
if (sessionId === undefined) {
return;
}
const sendMessageHistoryLength = async (length: number) => {
try {
const response = await fetch(connectionBase + `/api/tunables/${sessionId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ "message-history-length": length }),
});
const data = await response.json();
const newLength = data["message-history-length"];
if (newLength !== messageHistoryLength) {
setMessageHistoryLength(newLength);
setSnack("Message history length updated", "success");
}
} catch (error) {
console.error('Fetch error:', error);
setSnack("Message history length update failed", "error");
}
};
sendMessageHistoryLength(messageHistoryLength);
}, [messageHistoryLength, setMessageHistoryLength, connectionBase, sessionId, setSnack]);
const reset = async (types: ("rags" | "tools" | "history" | "system-prompt" | "message-history-length")[], message: string = "Update successful.") => {
try {
const response = await fetch(connectionBase + `/api/reset/${sessionId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ "reset": types }),
});
if (response.ok) {
const data = await response.json();
if (data.error) {
throw Error()
}
for (const [key, value] of Object.entries(data)) {
switch (key) {
case "rags":
setRags(value as Tool[]);
break;
case "tools":
setTools(value as Tool[]);
break;
case "system-prompt":
setServerSystemPrompt((value as any)["system-prompt"].trim());
setSystemPrompt((value as any)["system-prompt"].trim());
break;
case "history":
console.log('TODO: handle history reset');
break;
}
}
setSnack(message, "success");
} else {
throw Error(`${{ status: response.status, message: response.statusText }}`);
}
} catch (error) {
console.error('Fetch error:', error);
setSnack("Unable to restore defaults", "error");
}
};
// Get the system information
useEffect(() => {
if (systemInfo !== undefined || sessionId === undefined) {
return;
}
fetch(connectionBase + `/api/system-info/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
})
.then(response => response.json())
.then(data => {
setSystemInfo(data);
})
.catch(error => {
console.error('Error obtaining system information:', error);
setSnack("Unable to obtain system information.", "error");
});
}, [systemInfo, setSystemInfo, connectionBase, setSnack, sessionId])
useEffect(() => {
setEditSystemPrompt(systemPrompt);
}, [systemPrompt, setEditSystemPrompt]);
const toggleRag = async (tool: Tool) => {
tool.enabled = !tool.enabled
try {
const response = await fetch(connectionBase + `/api/rags/${sessionId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ "tool": tool?.name, "enabled": tool.enabled }),
});
const rags = await response.json();
setRags([...rags])
setSnack(`${tool?.name} ${tool.enabled ? "enabled" : "disabled"}`);
} catch (error) {
console.error('Fetch error:', error);
setSnack(`${tool?.name} ${tool.enabled ? "enabling" : "disabling"} failed.`, "error");
tool.enabled = !tool.enabled
}
};
const toggleTool = async (tool: Tool) => {
tool.enabled = !tool.enabled
try {
const response = await fetch(connectionBase + `/api/tools/${sessionId}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ "tool": tool?.function?.name, "enabled": tool.enabled }),
});
const tools = await response.json();
setTools([...tools])
setSnack(`${tool?.function?.name} ${tool.enabled ? "enabled" : "disabled"}`);
} catch (error) {
console.error('Fetch error:', error);
setSnack(`${tool?.function?.name} ${tool.enabled ? "enabling" : "disabling"} failed.`, "error");
tool.enabled = !tool.enabled
}
};
// If the tools have not been set, fetch them from the server
useEffect(() => {
if (tools.length || sessionId === undefined) {
return;
}
const fetchTools = async () => {
try {
// Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/tools/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
});
if (!response.ok) {
throw Error();
}
const tools = await response.json();
setTools(tools);
} catch (error: any) {
setSnack("Unable to fetch tools", "error");
console.error(error);
}
}
fetchTools();
}, [sessionId, tools, setTools, setSnack, connectionBase]);
// If the RAGs have not been set, fetch them from the server
useEffect(() => {
if (rags.length || sessionId === undefined) {
return;
}
const fetchRags = async () => {
try {
// Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/rags/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
});
if (!response.ok) {
throw Error();
}
const rags = await response.json();
setRags(rags);
} catch (error: any) {
setSnack("Unable to fetch RAGs", "error");
console.error(error);
}
}
fetchRags();
}, [sessionId, rags, setRags, setSnack, connectionBase]);
// If the systemPrompt has not been set, fetch it from the server
useEffect(() => {
if (serverSystemPrompt !== "" || sessionId === undefined) {
return;
}
const fetchTunables = async () => {
// Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/tunables/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
});
const data = await response.json();
const serverSystemPrompt = data["system-prompt"].trim();
setServerSystemPrompt(serverSystemPrompt);
setSystemPrompt(serverSystemPrompt);
setMessageHistoryLength(data["message-history-length"]);
}
fetchTunables();
}, [sessionId, serverSystemPrompt, setServerSystemPrompt, connectionBase]);
const toggle = async (type: string, index: number) => {
switch (type) {
case "rag":
toggleRag(rags[index])
break;
case "tool":
toggleTool(tools[index]);
}
};
const handleKeyPress = (event: any) => {
if (event.key === 'Enter' && event.ctrlKey) {
switch (event.target.id) {
case 'SystemPromptInput':
setSystemPrompt(editSystemPrompt);
break;
}
}
};
return (<div className="Controls">
<Typography component="span" sx={{ mb: 1 }}>
You can change the information available to the LLM by adjusting the following settings:
</Typography>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Typography component="span">System Prompt</Typography>
</AccordionSummary>
<AccordionActions style={{ flexDirection: "column" }}>
<TextField
variant="outlined"
fullWidth
multiline
type="text"
value={editSystemPrompt}
onChange={(e) => setEditSystemPrompt(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Enter the new system prompt.."
id="SystemPromptInput"
/>
<div style={{ display: "flex", flexDirection: "row", gap: "8px", paddingTop: "8px" }}>
<Button variant="contained" disabled={editSystemPrompt === systemPrompt} onClick={() => { setSystemPrompt(editSystemPrompt); }}>Set</Button>
<Button variant="outlined" onClick={() => { reset(["system-prompt"], "System prompt reset."); }} color="error">Reset</Button>
</div>
</AccordionActions>
</Accordion>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Typography component="span">Tunables</Typography>
</AccordionSummary>
<AccordionActions style={{ flexDirection: "column" }}>
<TextField
id="outlined-number"
label="Message history"
type="number"
helperText="Only use this many messages as context. 0 = All. Keeping this low will reduce context growth and improve performance."
value={messageHistoryLength}
onChange={(e: any) => setMessageHistoryLength(e.target.value)}
slotProps={{
htmlInput: {
min: 0
},
inputLabel: {
shrink: true,
},
}}
/>
</AccordionActions>
</Accordion>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Typography component="span">Tools</Typography>
</AccordionSummary>
<AccordionDetails>
These tools can be made available to the LLM for obtaining real-time information from the Internet. The description provided to the LLM is provided for reference.
</AccordionDetails>
<AccordionActions>
<FormGroup sx={{ p: 1 }}>
{
tools.map((tool, index) =>
<Box key={index}>
<Divider />
<FormControlLabel control={<Switch checked={tool.enabled} />} onChange={() => toggle("tool", index)} label={tool?.function?.name} />
<Typography sx={{ fontSize: "0.8rem", mb: 1 }}>{tool?.function?.description}</Typography>
</Box>
)
}</FormGroup>
</AccordionActions>
</Accordion>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Typography component="span">RAG</Typography>
</AccordionSummary>
<AccordionDetails>
These RAG databases can be enabled / disabled for adding additional context based on the chat request.
</AccordionDetails>
<AccordionActions>
<FormGroup sx={{ p: 1 }}>
{
rags.map((rag, index) =>
<Box key={index}>
<Divider />
<FormControlLabel control={<Switch checked={rag.enabled} />} onChange={() => toggle("rag", index)} label={rag?.name} />
<Typography>{rag?.description}</Typography>
</Box>
)
}</FormGroup>
</AccordionActions>
</Accordion>
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Typography component="span">System Information</Typography>
</AccordionSummary>
<AccordionDetails>
The server is running on the following hardware:
</AccordionDetails>
<AccordionActions>
<SystemInfoComponent systemInfo={systemInfo} />
</AccordionActions>
</Accordion>
<Button startIcon={<ResetIcon />} onClick={() => { reset(["history"], "History cleared."); }}>Clear Backstory History</Button>
<Button onClick={() => { reset(["rags", "tools", "system-prompt", "message-history-length"], "Default settings restored.") }}>Reset system prompt, tunables, and RAG to defaults</Button>
</div>);
}
export type {
ControlsParams
};
export {
Controls
};

View File

@ -0,0 +1,446 @@
import React, { useState, useImperativeHandle, forwardRef, useEffect, useRef, useCallback } from 'react';
import TextField from '@mui/material/TextField';
import Typography from '@mui/material/Typography';
import Tooltip from '@mui/material/Tooltip';
import Button from '@mui/material/Button';
import Box from '@mui/material/Box';
import SendIcon from '@mui/icons-material/Send';
import PropagateLoader from "react-spinners/PropagateLoader";
import { Message, MessageList } from './Message';
import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus';
import { MessageData } from './MessageMeta';
const welcomeMarkdown = `
# Welcome to Backstory
Backstory was written by James Ketrenos in order to provide answers to
questions potential employers may have about his work history.
You can ask things like:
<ChatQuery text="What is James Ketrenos' work history?"/>
<ChatQuery text="What programming languages has James used?"/>
<ChatQuery text="What are James' professional strengths?"/>
<ChatQuery text="What are today's headlines on CNBC.com?"/>
You can click the text above to submit that query, or type it in yourself (or whatever questions you may have.)
Backstory is a RAG enabled expert system with access to real-time data running self-hosted
(no cloud) versions of industry leading Large and Small Language Models (LLM/SLMs).
As with all LLM interactions, the results may not be 100% accurate. If you have questions about my career, I'd love to hear from you. You can send me an email at **james_backstory@ketrenos.com**.`;
const welcomeMessage: MessageData = {
"role": "assistant", "content": welcomeMarkdown
};
const loadingMessage: MessageData = { "role": "assistant", "content": "Instancing chat session..." };
type ConversationMode = 'chat' | 'fact-check' | 'system';
interface ConversationHandle {
submitQuery: () => void;
}
interface ConversationProps {
type: ConversationMode
prompt: string,
connectionBase: string,
sessionId: string | undefined,
setSnack: (message: string, severity: SeverityType) => void,
};
const Conversation = forwardRef<ConversationHandle, ConversationProps>(({prompt, type, sessionId, setSnack, connectionBase} : ConversationProps, ref) => {
const [query, setQuery] = useState<string>("");
const [contextUsedPercentage, setContextUsedPercentage] = useState<number>(0);
const [processing, setProcessing] = useState<boolean>(false);
const [countdown, setCountdown] = useState<number>(0);
const [conversation, setConversation] = useState<MessageList>([]);
const timerRef = useRef<any>(null);
const [lastEvalTPS, setLastEvalTPS] = useState<number>(35);
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
const [contextWarningShown, setContextWarningShown] = useState<boolean>(false);
// Update the context status
const updateContextStatus = useCallback(() => {
const fetchContextStatus = async () => {
try {
const response = await fetch(connectionBase + `/api/context-status/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
});
if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
}
const data = await response.json();
setContextStatus(data);
}
catch (error) {
console.error('Error getting context status:', error);
setSnack("Unable to obtain context status.", "error");
}
};
fetchContextStatus();
}, [setContextStatus, connectionBase, setSnack, sessionId]);
// Set the initial chat history to "loading" or the welcome message if loaded.
useEffect(() => {
if (sessionId === undefined) {
setConversation([loadingMessage]);
} else {
fetch(connectionBase + `/api/history/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
})
.then(response => response.json())
.then(data => {
console.log(`Session id: ${sessionId} -- history returned from server with ${data.length} entries`)
setConversation([
welcomeMessage,
...data
]);
})
.catch(error => {
console.error('Error generating session ID:', error);
setSnack("Unable to obtain chat history.", "error");
});
updateContextStatus();
}
}, [sessionId, setConversation, updateContextStatus, connectionBase, setSnack]);
const isScrolledToBottom = useCallback(()=> {
// Current vertical scroll position
const scrollTop = window.scrollY || document.documentElement.scrollTop;
// Total height of the page content
const scrollHeight = document.documentElement.scrollHeight;
// Height of the visible window
const clientHeight = document.documentElement.clientHeight;
// If we're at the bottom (allowing a small buffer of 16px)
return scrollTop + clientHeight >= scrollHeight - 16;
}, []);
const scrollToBottom = useCallback(() => {
console.log("Scroll to bottom");
window.scrollTo({
top: document.body.scrollHeight,
});
}, []);
const startCountdown = (seconds: number) => {
if (timerRef.current) clearInterval(timerRef.current);
setCountdown(seconds);
timerRef.current = setInterval(() => {
setCountdown((prev) => {
if (prev <= 1) {
clearInterval(timerRef.current);
timerRef.current = null;
if (isScrolledToBottom()) {
setTimeout(() => {
scrollToBottom();
}, 50)
}
return 0;
}
return prev - 1;
});
}, 1000);
};
const submitQuery = (text: string) => {
sendQuery(text);
}
const stopCountdown = () => {
if (timerRef.current) {
clearInterval(timerRef.current);
timerRef.current = null;
setCountdown(0);
}
};
const handleKeyPress = (event: any) => {
if (event.key === 'Enter') {
switch (event.target.id) {
case 'QueryInput':
sendQuery(query);
break;
}
}
};
useImperativeHandle(ref, () => ({
submitQuery: () => {
sendQuery(query);
}
}));
// If context status changes, show a warning if necessary. If it drops
// back below the threshold, clear the warning trigger
useEffect(() => {
const context_used_percentage = Math.round(100 * contextStatus.context_used / contextStatus.max_context);
if (context_used_percentage >= 90 && !contextWarningShown) {
setSnack(`${context_used_percentage}% of context used. You may wish to start a new chat.`, "warning");
setContextWarningShown(true);
}
if (context_used_percentage < 90 && contextWarningShown) {
setContextWarningShown(false);
}
setContextUsedPercentage(context_used_percentage)
}, [contextStatus, setContextWarningShown, contextWarningShown, setContextUsedPercentage, setSnack]);
const sendQuery = async (query: string) => {
if (!query.trim()) return;
//setTab(0);
const userMessage: MessageData[] = [{ role: 'user', content: query }];
let scrolledToBottom;
// Add user message to conversation
const newConversation: MessageList = [
...conversation,
...userMessage
];
setConversation(newConversation);
scrollToBottom();
// Clear input
setQuery('');
try {
scrolledToBottom = isScrolledToBottom();
setProcessing(true);
// Create a unique ID for the processing message
const processingId = Date.now().toString();
// Add initial processing message
setConversation(prev => [
...prev,
{ role: 'assistant', content: 'Processing request...', id: processingId, isProcessing: true }
]);
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
// Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/chat/${sessionId}`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ role: 'user', content: query.trim() }),
});
// We'll guess that the response will be around 500 tokens...
const token_guess = 500;
const estimate = Math.round(token_guess / lastEvalTPS + contextStatus.context_used / lastPromptTPS);
scrolledToBottom = isScrolledToBottom();
setSnack(`Query sent. Response estimated in ${estimate}s.`, "info");
startCountdown(Math.round(estimate));
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
if (!response.ok) {
throw new Error(`Server responded with ${response.status}: ${response.statusText}`);
}
if (!response.body) {
throw new Error('Response body is null');
}
// Set up stream processing with explicit chunking
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
const chunk = decoder.decode(value, { stream: true });
// Process each complete line immediately
buffer += chunk;
let lines = buffer.split('\n');
buffer = lines.pop() || ''; // Keep incomplete line in buffer
for (const line of lines) {
if (!line.trim()) continue;
try {
const update = JSON.parse(line);
// Force an immediate state update based on the message type
if (update.status === 'processing') {
scrolledToBottom = isScrolledToBottom();
// Update processing message with immediate re-render
setConversation(prev => prev.map(msg =>
msg.id === processingId
? { ...msg, content: update.message }
: msg
));
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
} else if (update.status === 'done') {
// Replace processing message with final result
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
update.message
]);
const metadata = update.message.metadata;
const evalTPS = metadata.eval_count * 10 ** 9 / metadata.eval_duration;
const promptTPS = metadata.prompt_eval_count * 10 ** 9 / metadata.prompt_eval_duration;
setLastEvalTPS(evalTPS ? evalTPS : 35);
setLastPromptTPS(promptTPS ? promptTPS : 35);
updateContextStatus();
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
} else if (update.status === 'error') {
// Show error
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
{ role: 'assistant', type: 'error', content: update.message }
]);
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
}
} catch (e) {
setSnack("Error processing query", "error")
console.error('Error parsing JSON:', e, line);
}
}
}
// Process any remaining buffer content
if (buffer.trim()) {
try {
const update = JSON.parse(buffer);
if (update.status === 'done') {
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => msg.id !== processingId),
update.message
]);
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 500);
}
}
} catch (e) {
setSnack("Error processing query", "error")
}
}
scrolledToBottom = isScrolledToBottom();
stopCountdown();
setProcessing(false);
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
} catch (error) {
console.error('Fetch error:', error);
setSnack("Unable to process query", "error");
scrolledToBottom = isScrolledToBottom();
setConversation(prev => [
...prev.filter(msg => !msg.isProcessing),
{ role: 'assistant', type: 'error', content: `Error: ${error}` }
]);
setProcessing(false);
stopCountdown();
if (scrolledToBottom) {
setTimeout(() => { scrollToBottom() }, 50);
}
}
};
return (
<Box className="ConversationContainer" sx={{ display: "flex", flexDirection: "column", height: "100%", overflowY: "auto" }}>
<Box className="Conversation" sx={{ flexGrow: 2, p: 1 }}>
{conversation.map((message, index) => <Message key={index} submitQuery={submitQuery} message={message} />)}
<Box sx={{
display: "flex",
flexDirection: "column",
alignItems: "center",
justifyContent: "center",
mb: 1
}}>
<PropagateLoader
size="10px"
loading={processing}
aria-label="Loading Spinner"
data-testid="loader"
/>
{processing === true && countdown > 0 && (
<Box
sx={{
pt: 1,
fontSize: "0.7rem",
color: "darkgrey"
}}
>Estimated response time: {countdown}s</Box>
)}
</Box>
<Box sx={{ ml: "0.25rem", fontSize: "0.6rem", color: "darkgrey", display: "flex", flexDirection: "row", gap: 1, mt: "auto" }}>
Context used: {contextUsedPercentage}% {contextStatus.context_used}/{contextStatus.max_context}
{
contextUsedPercentage >= 90 ? <Typography sx={{ fontSize: "0.6rem", color: "red" }}>WARNING: Context almost exhausted. You should start a new chat.</Typography>
: (contextUsedPercentage >= 50 ? <Typography sx={{ fontSize: "0.6rem", color: "orange" }}>NOTE: Context is getting long. Queries will be slower, and the LLM may stop issuing tool calls.</Typography>
: <></>)
}
</Box>
</Box>
<Box className="Query" sx={{ display: "flex", flexDirection: "row", p: 1 }}>
<TextField
variant="outlined"
disabled={processing}
fullWidth
type="text"
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyPress}
placeholder="Enter your question..."
id="QueryInput"
/>
<Tooltip title="Send">
<Button sx={{ m: 1 }} variant="contained" onClick={() => { sendQuery(query); }}><SendIcon /></Button>
</Tooltip>
</Box>
</Box>
);
});
export type {
ConversationProps,
ConversationHandle
};
export {
Conversation
};

View File

@ -80,8 +80,8 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
/**
* Trigger resume generation and update UI state
*/
const triggerGeneration = useCallback((jobDescription: string | undefined) => {
if (jobDescription === undefined) {
const triggerGeneration = useCallback((description: string | undefined) => {
if (description === undefined) {
setProcessing(undefined);
setResume(undefined);
setActiveTab(0);
@ -89,7 +89,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
}
setProcessing("resume");
setTimeout(() => { setActiveTab(1); }, 250); // Switch to resume view on mobile
generateResume(jobDescription);
generateResume(description);
}, [generateResume, setProcessing, setActiveTab, setResume]);
/**
@ -108,6 +108,10 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
setTimeout(() => { setActiveTab(2); }, 250); // Switch to resume view on mobile
}, [factCheck, setResume, setProcessing, setActiveTab, setFacts]);
useEffect(() => {
setEditJobDescription(jobDescription);
}, [jobDescription, setEditJobDescription]);
/**
* Switch to resume tab when resume become available
*/
@ -157,10 +161,10 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
};
const renderJobDescriptionView = () => {
const jobDescription = [];
const children = [];
if (resume === undefined && processing === undefined) {
jobDescription.push(
children.push(
<Document key="jobDescription" sx={{ display: "flex", flexGrow: 1 }} title="">
<TextField
variant="outlined"
@ -181,10 +185,10 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
</Document>
);
} else {
jobDescription.push(<MuiMarkdown key="jobDescription" >{editJobDescription}</MuiMarkdown>)
children.push(<MuiMarkdown key="jobDescription" >{editJobDescription}</MuiMarkdown>)
}
jobDescription.push(
children.push(
<Box key="jobActions" sx={{ display: "flex", justifyContent: "center", flexDirection: "row" }}>
<IconButton
sx={{ display: "flex", margin: 'auto 0px' }}
@ -210,7 +214,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
</Box>
);
return jobDescription;
return children;
}
/**
@ -421,7 +425,7 @@ const ResumeActionCard: React.FC<ResumeActionCardProps> = ({ resume, processing,
<Card sx={{ display: "flex", overflow: "auto", minHeight: "fit-content", p: 1, flexDirection: "column" }}>
{resume !== undefined || processing === "resume" ? (
<Typography>
<b>NOTE:</b> As with all LLMs, hallucination is always a possibility. If the generated resume seems too good to be true, <b>Fact Check</b> or, expand the <b>LLM information for this query</b> section (at the end of the resume) and click the links in the <b>Top RAG</b> matches to view the relavent RAG source document to read the details. Or go back to 'Backstory' and ask a question.
<b>NOTE:</b> As with all LLMs, hallucination is always a possibility. Click <b>Fact Check</b> to have the LLM analyze the generated resume vs. the actual resume.
</Typography>
) : (
<Typography>

View File

@ -1,12 +1,15 @@
import { useState } from 'react';
import { useState, useRef } from 'react';
import Box from '@mui/material/Box';
import Button from '@mui/material/Button';
import IconButton from '@mui/material/IconButton';
import CardContent from '@mui/material/CardContent';
import CardActions from '@mui/material/CardActions';
import Collapse from '@mui/material/Collapse';
import Typography from '@mui/material/Typography';
import ExpandMoreIcon from '@mui/icons-material/ExpandMore';
import { ExpandMore } from './ExpandMore';
import ContentCopyIcon from '@mui/icons-material/ContentCopy';
import CheckIcon from '@mui/icons-material/Check';
import { MessageData, MessageMeta } from './MessageMeta';
import { ChatBubble } from './ChatBubble';
@ -38,6 +41,19 @@ const ChatQuery = ({ text, submitQuery }: ChatQueryInterface) => {
const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
const [expanded, setExpanded] = useState<boolean>(false);
const [copied, setCopied] = useState(false);
const textFieldRef = useRef(null);
const handleCopy = () => {
if (message === undefined || message.content === undefined) {
return;
}
navigator.clipboard.writeText(message.content.trim()).then(() => {
setCopied(true);
setTimeout(() => setCopied(false), 2000); // Reset after 2 seconds
});
};
const handleExpandClick = () => {
setExpanded(!expanded);
@ -47,15 +63,43 @@ const Message = ({ message, submitQuery, isFullWidth }: MessageInterface) => {
return (<></>);
}
if (message.content === undefined) {
console.info("Message content is undefined");
return (<></>);
}
const formattedContent = message.content.trim();
return (
<ChatBubble isFullWidth={isFullWidth} role={message.role} sx={{ flexGrow: 1, pb: message.metadata ? 0 : "8px", m: 0, mb: 1, mt: 1, overflowX: "auto" }}>
<CardContent>
<ChatBubble className="Message" isFullWidth={isFullWidth} role={message.role} sx={{ flexGrow: 1, pb: message.metadata ? 0 : "8px", m: 0, mb: 1, mt: 1, overflowX: "auto" }}>
<CardContent ref={textFieldRef} sx={{ position: "relative", display: "flex", flexDirection: "column", overflowX: "auto" }}>
<IconButton
onClick={handleCopy}
sx={{
position: 'absolute',
top: 8,
right: 8,
bgcolor: 'background.paper',
'&:hover': { bgcolor: 'action.hover' },
}}
size="small"
color={copied ? "success" : "default"}
>
{copied ? <CheckIcon /> : <ContentCopyIcon />}
</IconButton>
{message.role !== 'user' ?
<StyledMarkdown {...{ content: formattedContent, submitQuery }} />
<StyledMarkdown
className="MessageContent"
sx={{ display: "flex", color: 'text.secondary' }}
{...{ content: formattedContent, submitQuery }} />
:
<Typography variant="body2" sx={{ color: 'text.secondary' }}>
<Typography
className="MessageContent"
ref={textFieldRef}
variant="body2"
sx={{ display: "flex", color: 'text.secondary' }}>
{message.content}
</Typography>
}
@ -86,6 +130,7 @@ export type {
MessageInterface,
MessageList,
};
export {
Message,
ChatQuery,

View File

@ -100,7 +100,7 @@ const MessageMeta = ({ metadata }: MessageMetaInterface) => {
</Accordion>
}
{
metadata.rag.name !== undefined &&
metadata?.rag?.name !== undefined &&
<Accordion>
<AccordionSummary expandIcon={<ExpandMoreIcon />}>
<Box sx={{ fontSize: "0.8rem" }}>

View File

@ -1,13 +1,11 @@
import { useState, useCallback, } from 'react';
import { useState, useCallback, useEffect } from 'react';
import Box from '@mui/material/Box';
import { SeverityType } from './Snack';
import { ContextStatus } from './ContextStatus';
import { MessageData } from './MessageMeta';
import { MessageData, MessageMetadata } from './MessageMeta';
import { DocumentViewer } from './DocumentViewer';
interface ResumeBuilderProps {
scrollToBottom: () => void,
isScrolledToBottom: () => boolean,
setProcessing: (processing: boolean) => void,
processing: boolean,
connectionBase: string,
@ -17,14 +15,20 @@ interface ResumeBuilderProps {
setResume: (resume: MessageData | undefined) => void,
facts: MessageData | undefined,
setFacts: (facts: MessageData | undefined) => void,
jobDescription: string | undefined,
setJobDescription: (jobDescription: string | undefined) => void
};
const ResumeBuilder = ({ jobDescription, setJobDescription, facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
type Resume = {
resume: MessageData | undefined,
fact_check: MessageData | undefined,
job_description: string,
metadata: MessageMetadata
};
const ResumeBuilder = ({ facts, setFacts, resume, setResume, setProcessing, processing, connectionBase, sessionId, setSnack }: ResumeBuilderProps) => {
const [lastEvalTPS, setLastEvalTPS] = useState<number>(35);
const [lastPromptTPS, setLastPromptTPS] = useState<number>(430);
const [contextStatus, setContextStatus] = useState<ContextStatus>({ context_used: 0, max_context: 0 });
const [jobDescription, setJobDescription] = useState<string | undefined>(undefined);
const updateContextStatus = useCallback(() => {
fetch(connectionBase + `/api/context-status/${sessionId}`, {
@ -43,6 +47,49 @@ const ResumeBuilder = ({ jobDescription, setJobDescription, facts, setFacts, res
});
}, [setContextStatus, connectionBase, setSnack, sessionId]);
// If the jobDescription and resume have not been set, fetch them from the server
useEffect(() => {
if (sessionId === undefined) {
return;
}
if (jobDescription !== undefined) {
return;
}
const fetchResume = async () => {
try {
// Make the fetch request with proper headers
const response = await fetch(connectionBase + `/api/resume/${sessionId}`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
});
if (!response.ok) {
throw Error();
}
const data: Resume[] = await response.json();
if (data.length) {
const lastResume = data[data.length - 1];
console.log(lastResume);
setJobDescription(lastResume['job_description']);
setResume(lastResume.resume);
if (lastResume['fact_check'] !== undefined && lastResume['fact_check'] !== null) {
lastResume['fact_check'].role = 'info';
setFacts(lastResume['fact_check'])
} else {
setFacts(undefined)
}
}
} catch (error: any) {
setSnack("Unable to fetch resume", "error");
console.error(error);
}
}
fetchResume();
}, [sessionId, resume, jobDescription, setResume, setJobDescription, setSnack, setFacts, connectionBase]);
// const startCountdown = (seconds: number) => {
// if (timerRef.current) clearInterval(timerRef.current);
// setCountdown(seconds);
@ -75,8 +122,8 @@ const ResumeBuilder = ({ jobDescription, setJobDescription, facts, setFacts, res
return (<></>);
}
const generateResume = async (jobDescription: string) => {
if (!jobDescription.trim()) return;
const generateResume = async (description: string) => {
if (!description.trim()) return;
setResume(undefined);
setFacts(undefined);
@ -93,7 +140,7 @@ const ResumeBuilder = ({ jobDescription, setJobDescription, facts, setFacts, res
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({ content: jobDescription.trim() }),
body: JSON.stringify({ content: description.trim() }),
});
// We'll guess that the response will be around 500 tokens...

View File

@ -5,12 +5,13 @@ import { Link } from '@mui/material';
import { ChatQuery } from './Message';
interface StyledMarkdownProps {
className?: string,
content: string,
submitQuery?: (query: string) => void,
[key: string]: any, // For any additional props
};
const StyledMarkdown: React.FC<StyledMarkdownProps> = ({ content, submitQuery, ...props }) => {
const StyledMarkdown: React.FC<StyledMarkdownProps> = ({ className, content, submitQuery, ...props }) => {
const theme = useTheme();
let options: any = {
@ -42,7 +43,7 @@ const StyledMarkdown: React.FC<StyledMarkdownProps> = ({ content, submitQuery, .
};
}
return <MuiMarkdown {...options} children={content} {...props}/>;
return <MuiMarkdown className={className} {...options} children={content} {...props} />;
};
export { StyledMarkdown };

View File

@ -307,7 +307,7 @@ const VectorVisualizer: React.FC<VectorVisualizerProps> = ({ setSnack, connectio
</Card>
{ queryEmbedding !== undefined &&
<Card sx={{ display: 'flex', flexDirection: 'column', justifyContent: 'center', alignItems: 'center', mt: 1, pb: 0 }}>
<Typography variant="h6" sx={{ p: 1, pt: 0 }}>
<Typography variant="h6" sx={{ p: 1, pt: 0, maxHeight: '5rem', overflow: 'auto' }}>
Query: {queryEmbedding.query}
</Typography>
</Card>

View File

@ -136,7 +136,17 @@ DEFAULT_HISTORY_LENGTH=5
# %%
# Globals
NAME = "James Ketrenos"
context_tag = "INFO"
resume_intro = f"""
As an AI/ML professional specializing in creating custom solutions to new problem domains, {NAME} developed a custom
language model applications that streamline information processing and content generation. This tailored resume
was created using a Retrieval-Augmented Generation system I built to efficiently match my relevant experience
with your specific needsdemonstrating both my technical capabilities and commitment to intelligent resource
optimization.
"""
system_message = f"""
Launched on {DateTime()}.
@ -163,16 +173,20 @@ When answering queries, follow these steps:
3. Use the [JOB DESCRIPTION] provided to guide the focus, tone, and relevant skills or experience to highlight from the [WORK HISTORY].
4. Identify and emphasisze the experiences, achievements, and responsibilities from the [WORK HISTORY] that best align with the [JOB DESCRIPTION].
5. Do not use the [JOB DESCRIPTION] skills unless listed in [WORK HISTORY].
6. Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
7. Use the [INTRO] to highlight the use of AI in generating this resume.
8. Use the [WORK HISTORY] to create a polished, professional resume.
9. Do not list any locations in the resume.
Structure the resume professionally with the following sections where applicable:
* "Name: Use full name."
* "Professional Summary: A 2-4 sentence overview tailored to the job."
* "Professional Summary: A 2-4 sentence overview tailored to the job, using [INTRO] to highlight the use of AI in generating this resume."
* "Skills: A bullet list of key skills derived from the work history and relevant to the job."
* Professional Experience: A detailed list of roles, achievements, and responsibilities from the work history that relate to the job."
* Education: Include only if available in the work history."
Do not include any information unless it is provided in [WORK HISTORY].
Do not include any information unless it is provided in [WORK HISTORY] or [INTRO].
Ensure the langauge is clear, concise, and aligned with industry standards for professional resumes.
"""
@ -372,9 +386,18 @@ class WebServer:
self.file_watcher = None
self.observer = None
self.ssl_enabled = os.path.exists(defines.key_path) and os.path.exists(defines.cert_path)
if self.ssl_enabled:
allow_origins=["https://battle-linux.ketrenos.com:3000"]
else:
allow_origins=["http://battle-linux.ketrenos.com:3000"]
logging.info(f"Allowed origins: {allow_origins}")
self.app.add_middleware(
CORSMiddleware,
allow_origins=["http://battle-linux.ketrenos.com:3000"],
allow_origins=allow_origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
@ -1053,14 +1076,15 @@ class WebServer:
if chroma_results:
rag_docs.extend(chroma_results["documents"])
metadata["rag"] = { "name": rag["name"], **chroma_results }
preamble = f"The current time is {DateTime()}\n"
preamble = f"""[WORK HISTORY]:\n"""
preamble = f"[INTRO]\n{resume_intro}\n[/INTRO]\n"
preamble += f"""[WORK HISTORY]:\n"""
for doc in rag_docs:
preamble += f"{doc}\n"
resume["rag"] += f"{doc}\n"
preamble += f"\n[/WORK HISTORY]\n"
content = f"{preamble}\nUse the above WORK HISTORY to create the resume for this JOB DESCRIPTION. Do not use the JOB DESCRIPTION skills as skills the user posseses unless listed in WORK HISTORY:\n[JOB DESCRIPTION]\n{content}\n[/JOB DESCRIPTION]\n"
content = f"""{preamble}\n
Use the above [WORK HISTORY] and [INTRO] to create the resume for this [JOB DESCRIPTION]. Do not use the [JOB DESCRIPTION] in the generated resume unless the [WORK HISTORY] mentions them:\n[JOB DESCRIPTION]\n{content}\n[/JOB DESCRIPTION]\n"""
try:
# Estimate token length of new messages
@ -1152,7 +1176,22 @@ class WebServer:
def run(self, host="0.0.0.0", port=WEB_PORT, **kwargs):
try:
uvicorn.run(self.app, host=host, port=port)
if self.ssl_enabled:
logging.info(f"Starting web server at https://{host}:{port}")
uvicorn.run(
self.app,
host=host,
port=port,
ssl_keyfile=defines.key_path,
ssl_certfile=defines.cert_path
)
else:
logging.info(f"Starting web server at http://{host}:{port}")
uvicorn.run(
self.app,
host=host,
port=port
)
except KeyboardInterrupt:
if self.observer:
self.observer.stop()
@ -1181,7 +1220,6 @@ def main():
# print(f"Vectorstore created with {collection.count()} documents")
web_server = WebServer(logging, client, model)
logging.info(f"Starting web server at http://{args.web_host}:{args.web_port}")
web_server.run(host=args.web_host, port=args.web_port, use_reloader=False)

View File

@ -11,4 +11,7 @@ max_context = 2048*8*2
doc_dir = "/opt/backstory/docs/"
session_dir = "/opt/backstory/sessions"
static_content = '/opt/backstory/frontend/deployed'
resume_doc = '/opt/backstory/docs/resume/generic.txt'
resume_doc = '/opt/backstory/docs/resume/generic.txt'
# Only used for testing; backstory-prod will not use this
key_path = '/opt/backstory/src/key.pem'
cert_path = '/opt/backstory/src/cert.pem'