Fixed type-os during migration

This commit is contained in:
James Ketr 2025-04-24 16:55:28 -07:00
parent e6f6aad86a
commit c1df6ffed5
12 changed files with 52 additions and 28 deletions

2
.gitignore vendored
View File

@ -3,5 +3,7 @@ cache/**
jupyter/**
ollama/**
sessions/**
sessions-prod/**
chromadb/**
chromadb-prod/**
dev-keys/**

View File

@ -337,8 +337,11 @@ RUN apt-get update \
WORKDIR /opt/ollama
# Download the nightly ollama release from ipex-llm
ENV OLLAMA_VERSION=https://github.com/intel/ipex-llm/releases/download/v2.2.0/ollama-ipex-llm-2.2.0-ubuntu.tgz
#ENV OLLAMA_VERSION=https://github.com/intel/ipex-llm/releases/download/v2.2.0/ollama-ipex-llm-2.2.0-ubuntu.tgz
#ENV OLLAMA_VERSION=https://github.com/intel/ipex-llm/releases/download/v2.3.0-nightly/ollama-ipex-llm-2.3.0b20250415-ubuntu.tgz
# NOTE: NO longer at github.com/intel -- now at ipex-llm
ENV OLLAMA_VERSION=https://github.com/ipex-llm/ipex-llm/releases/download/v2.2.0/ollama-ipex-llm-2.2.0-ubuntu.tgz
RUN wget -qO - ${OLLAMA_VERSION} | \
tar --strip-components=1 -C . -xzv
@ -411,12 +414,16 @@ RUN { \
&& chmod +x /fetch-models.sh
ENV PYTHONUNBUFFERED=1
# Enable ext_intel_free_memory
ENV ZES_ENABLE_SYSMAN=1
ENV ZES_ENABLE_SYSMAN=1
# Use all GPUs
ENV OLLAMA_NUM_GPU=999
# Use immediate command lists
ENV SYCL_PI_LEVEL_ZERO_USE_IMMEDIATE_COMMANDLISTS=1
# Use persistent cache
ENV SYCL_CACHE_PERSISTENT=1

0
dev-keys/.keep Normal file
View File

View File

@ -25,6 +25,7 @@ services:
- ./cache:/root/.cache # Persist all models and GPU kernel cache
- ./sessions:/opt/backstory/sessions:rw # Persist sessions
- ./chromadb:/opt/backstory/chromadb:rw # Persist ChromaDB
- ./dev-keys:/opt/backstory/keys:ro # Developer keys
- ./docs:/opt/backstory/docs:ro # Live mount of RAG content
- ./src:/opt/backstory/src:rw # Live mount server src
- ./frontend:/opt/backstory/frontend:rw # Live mount frontend src
@ -57,7 +58,7 @@ services:
volumes:
- ./cache:/root/.cache # Persist all models and GPU kernel cache
- ./chromadb-prod:/opt/backstory/chromadb:rw # Persist ChromaDB
- ./sessions:/opt/backstory/sessions:rw # Persist sessions
- ./sessions-prod:/opt/backstory/sessions:rw # Persist sessions
- ./docs:/opt/backstory/docs:ro # Live mount of RAG content
- ./frontend:/opt/backstory/frontend:rw # Live mount frontend src
cap_add: # used for running ze-monitor within container

View File

@ -123,7 +123,6 @@ button {
flex-direction: column;
font-size: 0.9rem;
width: 100%;
/* max-width: 1024px; */
margin: 0 auto;
}

View File

@ -16,6 +16,7 @@ import IconButton from '@mui/material/IconButton';
import Box from '@mui/material/Box';
import CssBaseline from '@mui/material/CssBaseline';
import MenuIcon from '@mui/icons-material/Menu';
import { useTheme } from '@mui/material/styles';
import { ResumeBuilder } from './ResumeBuilder';
@ -83,6 +84,8 @@ const App = () => {
const isDesktop = useMediaQuery('(min-width:650px)');
const prevIsDesktopRef = useRef<boolean>(isDesktop);
const chatRef = useRef<ConversationHandle>(null);
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down('md'));
// Set the snack pop-up and open it
const setSnack: SetSnackType = useCallback<SetSnackType>((message: string, severity: SeverityType = "success") => {
@ -154,7 +157,7 @@ What would you like to know about James?
];
const chatQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<Box sx={{ display: "flex", flexDirection: isMobile ? "column" : "row" }}>
<ChatQuery text="What is James Ketrenos' work history?" submitQuery={handleSubmitChatQuery} />
<ChatQuery text="What programming languages has James used?" submitQuery={handleSubmitChatQuery} />
<ChatQuery text="What are James' professional strengths?" submitQuery={handleSubmitChatQuery} />

View File

@ -172,6 +172,9 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
} catch (error) {
console.error('Error generating session ID:', error);
setProcessingMessage({ role: "error", content: "Unable to obtain history from server." });
setTimeout(() => {
setProcessingMessage(undefined);
}, 5000);
setSnack("Unable to obtain chat history.", "error");
}
};
@ -429,6 +432,10 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
// Show error
scrolledToBottom = isScrolledToBottom();
setProcessingMessage({ role: 'error', content: update.message });
setTimeout(() => {
setProcessingMessage(undefined);
}, 5000);
// Add a small delay to ensure React has time to update the UI
await new Promise(resolve => setTimeout(resolve, 0));
if (scrolledToBottom) {
@ -477,6 +484,10 @@ const Conversation = forwardRef<ConversationHandle, ConversationProps>(({
setSnack("Unable to process query", "error");
scrolledToBottom = isScrolledToBottom();
setProcessingMessage({ role: 'error', content: "Unable to process query" });
setTimeout(() => {
setProcessingMessage(undefined);
}, 5000);
setProcessing(false);
stopCountdown();
if (scrolledToBottom) {

View File

@ -222,9 +222,9 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
return message;
}, []);
const renderJobDescriptionView = useCallback(() => {
const renderJobDescriptionView = useCallback((small: boolean) => {
const jobDescriptionQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<Box sx={{ display: "flex", flexDirection: small ? "column" : "row" }}>
<ChatQuery text="What are the key skills necessary for this position?" submitQuery={handleJobQuery} />
<ChatQuery text="How much should this position pay (accounting for inflation)?" submitQuery={handleJobQuery} />
</Box>,
@ -267,9 +267,9 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
/**
* Renders the resume view with loading indicator
*/
const renderResumeView = useCallback(() => {
const renderResumeView = useCallback((small: boolean) => {
const resumeQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<Box sx={{ display: "flex", flexDirection: small ? "column" : "row" }}>
<ChatQuery text="Is this resume a good fit for the provided job description?" submitQuery={handleResumeQuery} />
<ChatQuery text="Provide a more concise resume." submitQuery={handleResumeQuery} />
</Box>,
@ -310,9 +310,9 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
/**
* Renders the fact check view
*/
const renderFactCheckView = useCallback(() => {
const renderFactCheckView = useCallback((small: boolean) => {
const factsQuestions = [
<Box sx={{ display: "flex", flexDirection: "row" }}>
<Box sx={{ display: "flex", flexDirection: small ? "column" : "row" }}>
<ChatQuery text="Rewrite the resume to address any discrepancies." submitQuery={handleFactsQuery} />
</Box>,
];
@ -346,7 +346,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
const children = [];
children.push(
<Box key="JobDescription" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${otherRatio}%`, width: `${otherRatio}%`, maxWidth: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
{renderJobDescriptionView()}
{renderJobDescriptionView(false)}
</Box>);
/* Resume panel - conditionally rendered if resume defined, or processing is in progress */
@ -354,7 +354,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
children.push(
<Box key="ResumeView" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${resumeRatio}%`, width: `${resumeRatio}%`, maxWidth: `${resumeRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
<Divider orientation="vertical" flexItem />
{renderResumeView()}
{renderResumeView(false)}
</Box>
);
}
@ -364,7 +364,7 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
children.push(
<Box key="FactCheckView" className="ChatBox" sx={{ display: 'flex', flexDirection: 'column', minWidth: `${otherRatio}%`, width: `${otherRatio}%`, maxWidth: `${otherRatio}%`, p: 0, flexGrow: 1, overflowY: 'auto' }}>
<Divider orientation="vertical" flexItem />
{renderFactCheckView()}
{renderFactCheckView(false)}
</Box>
);
}
@ -417,13 +417,13 @@ const DocumentViewer: React.FC<DocumentViewerProps> = ({
const getActiveMobileContent = () => {
switch (activeTab) {
case 0:
return renderJobDescriptionView();
return renderJobDescriptionView(true);
case 1:
return renderResumeView();
return renderResumeView(true);
case 2:
return renderFactCheckView();
return renderFactCheckView(true);
default:
return renderJobDescriptionView();
return renderJobDescriptionView(true);
}
};

View File

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3"><g fill="#61DAFB"><path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/><circle cx="420.9" cy="296.5" r="45.7"/><path d="M520.5 78.1z"/></g></svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

0
sessions-prod/.keep Normal file
View File

View File

@ -542,11 +542,12 @@ class WebServer:
data = await request.json()
try:
session = context["sessions"][type]
response = {}
for reset in data["reset"]:
match reset:
case "system_prompt":
context["sessions"][type]["system_prompt"] = system_message
session["system_prompt"] = system_message
response["system_prompt"] = { "system_prompt": system_message }
case "rags":
context["rags"] = rags.copy()
@ -555,11 +556,11 @@ class WebServer:
context["tools"] = default_tools(tools)
response["tools"] = context["tools"]
case "history":
context["sessions"][type]["llm_history"] = []
context["sessions"][type]["user_history"] = []
context["sessions"][type]["context_tokens"] = round(len(str(context["system"])) * 3 / 4) # Estimate context usage
session["llm_history"] = []
session["user_history"] = []
session["context_tokens"] = round(len(str(session["system_prompt"])) * 3 / 4) # Estimate context usage
response["history"] = []
response["context_used"] = context["sessions"][type]["context_tokens"]
response["context_used"] = session["context_tokens"]
case "message_history_length":
context["message_history_length"] = DEFAULT_HISTORY_LENGTH
response["message_history_length"] = DEFAULT_HISTORY_LENGTH
@ -580,13 +581,14 @@ class WebServer:
return JSONResponse({"error": "Invalid context_id"}, status_code=400)
context = self.upsert_context(context_id)
data = await request.json()
session = context["sessions"]["chat"]
for k in data.keys():
match k:
case "system_prompt":
system_prompt = data[k].strip()
if not system_prompt:
return JSONResponse({ "status": "error", "message": "System prompt can not be empty." })
context["system"] = [{"role": "system", "content": system_prompt}]
session["system_prompt"] = system_prompt
self.save_context(context_id)
return JSONResponse({ "system_prompt": system_prompt })
case "message_history_length":
@ -604,7 +606,7 @@ class WebServer:
return JSONResponse({"error": "Invalid context_id"}, status_code=400)
context = self.upsert_context(context_id)
return JSONResponse({
"system_prompt": context["system"][0]["content"],
"system_prompt": context["sessions"]["chat"]["system_prompt"],
"message_history_length": context["message_history_length"]
})

View File

@ -13,5 +13,5 @@ session_dir = "/opt/backstory/sessions"
static_content = '/opt/backstory/frontend/deployed'
resume_doc = '/opt/backstory/docs/resume/generic.txt'
# Only used for testing; backstory-prod will not use this
key_path = '/opt/backstory/src/key.pem'
cert_path = '/opt/backstory/src/cert.pem'
key_path = '/opt/backstory/keys/key.pem'
cert_path = '/opt/backstory/keys/cert.pem'