Spaces:
Sleeping
Sleeping
| """ | |
| NiceGUI frontend for Rubric AI. | |
| This module defines all UI pages and components. The init_frontend() function | |
| must be called to register the page routes before running the app. | |
| """ | |
| import os | |
| import asyncio | |
| import time | |
| from datetime import datetime | |
| from dotenv import load_dotenv | |
| from nicegui import app, ui | |
| from supabase import create_client, Client | |
| from supabase.client import ClientOptions | |
| from frontend.components.layout import ( | |
| base_layout, breadcrumbs, page_header, add_head_resources, material_icon | |
| ) | |
| from frontend.components.workspace import ( | |
| workspace_pane, button_group, workspace_list, AssessmentItemPicker | |
| ) | |
| from frontend.components.ai_assistant import TabbedAssistant | |
| from frontend.components.rubric_preview import RubricPreviewModal | |
| from frontend.components.stage2_alignment import Stage2AlignmentWorkspace | |
| from frontend.components.stage3_scoring import Stage3ScoringWorkspace | |
| from frontend.components.progress_stepper import ProgressStepper, map_project_status_to_stage | |
| # Backend imports for direct function calls (unified app) | |
| def _get_existing_group_data(title: str, existing_rubric_data: dict) -> dict | None: | |
| """ | |
| Extract existing data for a prompt group by title. | |
| Args: | |
| title: Group title to search for | |
| existing_rubric_data: Existing rubric_data from database | |
| Returns: | |
| Existing group dict if found, None otherwise | |
| """ | |
| if not existing_rubric_data or not isinstance(existing_rubric_data, dict): | |
| return None | |
| prompt_groups = existing_rubric_data.get('prompt_groups', []) | |
| for group in prompt_groups: | |
| if group.get('title') == title: | |
| return group | |
| return None | |
| def build_rubric_data_skeleton(prompt_groups: dict, existing_rubric_data: dict = None) -> dict: | |
| """ | |
| Build the rubric_data JSON skeleton from prompt groups. | |
| Contains static scoring levels and dynamic prompt group placeholders. | |
| Args: | |
| prompt_groups: Dictionary of current prompt groups from Stage 1 | |
| existing_rubric_data: Optional existing rubric_data from database. | |
| If provided, preserves user-entered values. | |
| Returns: | |
| Complete rubric_data structure with preserved existing data | |
| """ | |
| scoring_levels_data = { | |
| "main_header": "Scoring Notes and Sample Student Responses", | |
| "levels": [ | |
| { | |
| "level": 0, | |
| "title": "No Evidence - 0", | |
| "generic_descriptor": "Response does not address the question, expresses uncertainty (e.g., \"I don't know\"), or describes phenomena or processes not relevant to explaining what is being asked." | |
| }, | |
| { | |
| "level": 1, | |
| "title": "Foundational Pieces - 1", | |
| "generic_descriptor": "Response identifies observable components, patterns, or correlations relevant to the phenomenon. Demonstrates observational reasoning and the ability to recognize what is happening, notice relationships between factors, or identify starting and ending points in a process." | |
| }, | |
| { | |
| "level": 2, | |
| "title": "Causal Connections - 2", | |
| "generic_descriptor": "Response describes specific processes, systems, structures, or components and describes cause-and-effect relationships that contribute to the phenomenon. Demonstrates the ability to go beyond observation to identify relative/proportional relationships between components and/or how different parts work together, trace sequences or pathways, and recognize which factors produce particular outcomes." | |
| }, | |
| { | |
| "level": 3, | |
| "title": "Mechanistic Explanation - 3", | |
| "generic_descriptor": "Response includes an explanation that traces the phenomenon through multiple logical steps by invoking unobservable processes, structures, or mechanisms (such as transformations at the molecular level, chemical reactions or changes, energy transfers, or forces acting on objects). Demonstrates more sophisticated scientific reasoning that explains not just what happens or which systems are involved, but how and why the phenomenon occurs at the mechanistic level, often integrating evidence or models to support the causal chain." | |
| } | |
| ] | |
| } | |
| prompt_groups_data = [] | |
| for group_name, group_data in prompt_groups.items(): | |
| # Extract items with their prompts and responses | |
| items_data = [] | |
| for item in group_data.get('items', []): | |
| item_entry = { | |
| "id": item.get('id', ''), | |
| "prompt_text": item.get('prompt_text', ''), | |
| "question_type": item.get('question_type', ''), | |
| } | |
| # Include options for multiple choice | |
| if item.get('options'): | |
| item_entry["options"] = item.get('options', []) | |
| # Include correct/ideal answer | |
| if item.get('correct_answer_text'): | |
| item_entry["correct_answer_text"] = item.get('correct_answer_text', '') | |
| items_data.append(item_entry) | |
| # Check if this group exists in the existing rubric_data | |
| existing_group = _get_existing_group_data(group_name, existing_rubric_data) | |
| # Build group entry with preserved or default values | |
| group_entry = { | |
| "title": group_name, | |
| "items": items_data, | |
| # Preserve existing values if they exist, otherwise use defaults | |
| "alignment": existing_group.get('alignment', '') if existing_group else '', | |
| "purpose": existing_group.get('purpose', '') if existing_group else '', | |
| "selected_standards": existing_group.get('selected_standards', []) if existing_group else [], | |
| "alignment_data": existing_group.get('alignment_data', []) if existing_group else [], | |
| "group_scoring_data": existing_group.get('group_scoring_data', []) if existing_group else [] | |
| } | |
| prompt_groups_data.append(group_entry) | |
| return { | |
| "scoring_levels_data": scoring_levels_data, | |
| "prompt_groups": prompt_groups_data | |
| } | |
| # Backend imports for direct function calls (unified app) | |
| from backend.api.models import ChatRequest | |
| from backend.api.routes import chat as chat_endpoint | |
| from backend.services.chat_history import ( | |
| create_conversation, | |
| save_message, | |
| update_conversation_timestamp, | |
| generate_conversation_title, | |
| get_user_conversations, | |
| get_conversation_messages, | |
| delete_conversation, | |
| ) | |
| load_dotenv() | |
| # Supabase client setup | |
| SUPABASE_URL = os.getenv("SUPABASE_URL") | |
| SUPABASE_ANON_KEY = os.getenv("SUPABASE_ANON_KEY") | |
| if SUPABASE_URL and SUPABASE_ANON_KEY: | |
| supabase: Client = create_client(SUPABASE_URL, SUPABASE_ANON_KEY) | |
| else: | |
| supabase = None | |
| def get_authed_client() -> Client | None: | |
| """ | |
| Gets the user session and returns an authenticated Supabase client. | |
| Automatically refreshes expired JWT tokens using refresh_token. | |
| Returns None if not authenticated or if refresh fails - caller is | |
| responsible for handling this (e.g., by rendering login UI inline | |
| instead of redirecting). | |
| """ | |
| if not SUPABASE_URL or not SUPABASE_ANON_KEY: | |
| return None | |
| session = app.storage.user.get('session') | |
| if not session: | |
| return None | |
| access_token = session.get('access_token') | |
| if not access_token: | |
| app.storage.user.clear() | |
| return None | |
| # Check if token is expired or about to expire (within 60 seconds) | |
| expires_at = session.get('expires_at') | |
| if expires_at is not None: | |
| current_time = time.time() | |
| if current_time >= (expires_at - 60): | |
| # Token expired or about to expire - attempt refresh | |
| refresh_token = session.get('refresh_token') | |
| if refresh_token and supabase: | |
| try: | |
| # Attempt to refresh the session | |
| auth_response = supabase.auth.refresh_session(refresh_token) | |
| # Update session with new tokens | |
| new_session = auth_response.session.model_dump() | |
| app.storage.user['session'] = new_session | |
| access_token = new_session['access_token'] | |
| print(f"JWT token refreshed successfully. New expiry: {new_session.get('expires_at')}") | |
| except Exception as e: | |
| # Refresh failed (e.g., refresh token also expired) | |
| print(f"Failed to refresh JWT token: {e}") | |
| app.storage.user.clear() | |
| return None | |
| else: | |
| # No refresh token available or supabase not configured | |
| app.storage.user.clear() | |
| return None | |
| client_options = ClientOptions( | |
| headers={"Authorization": f"Bearer {access_token}"} | |
| ) | |
| return create_client(SUPABASE_URL, SUPABASE_ANON_KEY, options=client_options) | |
| def handle_logout(): | |
| """Log the user out and clear their session.""" | |
| app.storage.user.clear() | |
| ui.notify('You have been logged out.', color='info') | |
| ui.navigate.to('/login') | |
| def build_login_ui(on_success=None): | |
| """ | |
| Build the login form UI. Can be used standalone or embedded in other pages. | |
| Args: | |
| on_success: Optional callback after successful login. If None, refreshes page. | |
| """ | |
| email_ref = {'input': None} | |
| password_ref = {'input': None} | |
| async def handle_login(): | |
| if not supabase: | |
| ui.notify("Supabase not configured", color='negative') | |
| return | |
| try: | |
| session = supabase.auth.sign_in_with_password({ | |
| "email": email_ref['input'].value, | |
| "password": password_ref['input'].value | |
| }) | |
| app.storage.user['session'] = session.session.model_dump() | |
| ui.notify('Login successful!', color='positive') | |
| if on_success: | |
| on_success() | |
| else: | |
| # Refresh the current page to show authenticated content | |
| ui.navigate.to('/') | |
| except Exception as e: | |
| ui.notify(f"Login Failed: {e}", color='negative') | |
| with ui.element('div').classes('auth-container'): | |
| with ui.element('div').classes('auth-card'): | |
| with ui.element('div').classes('flex justify-center mb-6'): | |
| with ui.element('div').classes('ai-icon-wrapper p-4'): | |
| material_icon('edit_document', 'text-3xl') | |
| ui.html('<h1 class="auth-title">Welcome to Rubric AI</h1>', sanitize=False) | |
| email_ref['input'] = ui.input('Email').classes('w-full mb-4').props('outlined') | |
| password_ref['input'] = ui.input('Password', password=True).classes('w-full mb-4').props('outlined') | |
| ui.button('Login', on_click=handle_login).classes('btn btn-primary w-full mb-4') | |
| with ui.element('div').classes('flex justify-center gap-2'): | |
| ui.label("Don't have an account?").classes('text-secondary text-sm') | |
| ui.link('Sign up', '/signup').classes('text-primary text-sm font-medium') | |
| async def render_stage2_alignment(auth_client, project, project_id, conversation_id_to_load=None): | |
| """Render Stage 2: Alignment workspace.""" | |
| rubric_data = project.get('rubric_data', {}) | |
| # Debug: Log what's in rubric_data when Stage 2 loads | |
| print(f"DEBUG: render_stage2_alignment - Loading project {project_id}") | |
| print(f"DEBUG: rubric_data has {len(rubric_data.get('prompt_groups', []))} prompt groups") | |
| for i, group in enumerate(rubric_data.get('prompt_groups', [])): | |
| alignment_data = group.get('alignment_data', []) | |
| print(f"DEBUG: Group {i} from DATABASE has {len(alignment_data)} alignment_data items") | |
| task_data = project.get('tasks', {}).get('task_data', {}) | |
| task_metadata = task_data.get('task_metadata', {}) | |
| # Get user info for conversations | |
| user_session = app.storage.user.get('session') | |
| user_id = user_session['user']['id'] | |
| # Fetch conversations for this project | |
| conversations = [] | |
| try: | |
| conversations = await get_user_conversations(auth_client, project_id=project_id) | |
| except Exception as e: | |
| print(f"Error loading conversations: {e}") | |
| state = { | |
| 'stage2_workspace': None, | |
| 'assistant': None, | |
| 'is_first_message_in_conversation': True, | |
| } | |
| async def handle_save(updated_data): | |
| """Save rubric data to database.""" | |
| try: | |
| print(f"DEBUG: handle_save called, updating project {project_id}") | |
| print(f"DEBUG: rubric_data has {len(updated_data.get('prompt_groups', []))} prompt groups") | |
| for i, group in enumerate(updated_data.get('prompt_groups', [])): | |
| alignment_data = group.get('alignment_data', []) | |
| print(f"DEBUG: Group {i} has {len(alignment_data)} alignment_data items") | |
| result = auth_client.table("projects") \ | |
| .update({"rubric_data": updated_data}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| print(f"DEBUG: Supabase update result: {result}") | |
| print(f"DEBUG: Save completed successfully") | |
| # CRITICAL: Re-fetch immediately to verify data persisted | |
| verify_result = auth_client.table("projects") \ | |
| .select("rubric_data") \ | |
| .eq("id", project_id) \ | |
| .single() \ | |
| .execute() | |
| verify_data = verify_result.data.get('rubric_data', {}) | |
| print(f"DEBUG: VERIFICATION - Re-fetched from DB immediately after save:") | |
| for i, group in enumerate(verify_data.get('prompt_groups', [])): | |
| alignment_data = group.get('alignment_data', []) | |
| print(f"DEBUG: VERIFICATION - Group {i} has {len(alignment_data)} alignment_data items") | |
| if len(alignment_data) == 0: | |
| print(f"DEBUG: ⚠️ WARNING - Data was LOST in database! Supabase rejected the update!") | |
| except Exception as e: | |
| print(f"DEBUG: Error in handle_save: {e}") | |
| ui.notify(f"Error saving: {e}", color='negative') | |
| def handle_back(): | |
| """Go back to Stage 1.""" | |
| auth_client.table("projects") \ | |
| .update({"status": "Stage 1: Grouping"}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.navigate.to(f'/project/{project_id}') | |
| async def handle_next(): | |
| """Advance to Stage 3.""" | |
| workspace = state.get('stage2_workspace') | |
| if workspace: | |
| updated_data = workspace.get_updated_rubric_data() | |
| auth_client.table("projects") \ | |
| .update({ | |
| "rubric_data": updated_data, | |
| "status": "Stage 3: Scoring" | |
| }) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.notify("Saved! Moving to Stage 3.", color='positive') | |
| ui.navigate.to(f'/project/{project_id}') | |
| async def get_recent_messages(auth_client, conversation_id: str, limit: int) -> list[dict]: | |
| """ | |
| Retrieve the N most recent messages from a conversation. | |
| Args: | |
| auth_client: Authenticated Supabase client | |
| conversation_id: UUID of current conversation | |
| limit: Number of messages to retrieve | |
| Returns: | |
| List of message dicts with role, content, created_at (chronologically ordered) | |
| """ | |
| if not conversation_id: | |
| return [] | |
| try: | |
| # Get messages ordered by created_at descending, limit to N | |
| response = auth_client.table("chat_messages") \ | |
| .select("role, content, created_at") \ | |
| .eq("conversation_id", conversation_id) \ | |
| .order("created_at", desc=True) \ | |
| .limit(limit) \ | |
| .execute() | |
| if response.data: | |
| # Reverse to chronological order for LLM prompt | |
| messages = list(reversed(response.data)) | |
| return [ | |
| { | |
| "role": msg["role"], | |
| "content": msg["content"], | |
| "created_at": msg.get("created_at") | |
| } | |
| for msg in messages | |
| ] | |
| return [] | |
| except Exception as e: | |
| print(f"Error retrieving recent messages: {e}") | |
| return [] | |
| async def handle_chat(message: str): | |
| """Handle chat messages with conversation persistence.""" | |
| state['assistant'].show_loading() | |
| try: | |
| conversation_id = state['assistant'].current_conversation_id | |
| is_new_conversation = conversation_id is None | |
| if is_new_conversation: | |
| conversation_id = await create_conversation( | |
| auth_client, user_id, project_id, stage="stage2" | |
| ) | |
| if conversation_id: | |
| state['assistant'].current_conversation_id = conversation_id | |
| state['is_first_message_in_conversation'] = True | |
| if conversation_id: | |
| await save_message(auth_client, conversation_id, user_id, 'user', message) | |
| # Retrieve recent message history for context | |
| from backend.core.config import settings | |
| message_history = [] | |
| if conversation_id and not is_new_conversation: | |
| # Get N previous messages (excludes current message just saved) | |
| message_history = await get_recent_messages( | |
| auth_client, | |
| conversation_id, | |
| settings.CHAT_MEMORY_WINDOW_SIZE | |
| ) | |
| output_mode = state['assistant'].output_mode | |
| custom_system_prompt = None | |
| if output_mode == 'custom': | |
| custom_system_prompt = state['assistant'].system_prompt_input.value | |
| request = ChatRequest( | |
| message=message, | |
| task_id=project['task_id'], | |
| project_title=project['project_title'], | |
| current_stage="Stage 2: Alignment", | |
| output_mode=output_mode, | |
| prompt_group_config={}, | |
| custom_system_prompt=custom_system_prompt, | |
| conversation_id=conversation_id, | |
| message_history=message_history, | |
| ) | |
| response = await chat_endpoint(request) | |
| ai_response = response.response | |
| if conversation_id: | |
| await save_message(auth_client, conversation_id, user_id, 'ai', ai_response) | |
| await update_conversation_timestamp(auth_client, conversation_id) | |
| if is_new_conversation and state['is_first_message_in_conversation']: | |
| asyncio.create_task( | |
| generate_conversation_title(auth_client, conversation_id, message, ai_response) | |
| ) | |
| state['assistant'].add_conversation_to_list({ | |
| 'id': conversation_id, | |
| 'title': None, | |
| 'stage': 'stage2', | |
| 'updated_at': datetime.now().isoformat(), | |
| }) | |
| state['is_first_message_in_conversation'] = False | |
| state['assistant'].add_ai_message(ai_response) | |
| except Exception as e: | |
| state['assistant'].add_ai_message(f"Error: {e}") | |
| async def handle_load_conversation(conversation_id: str): | |
| """Load messages from a conversation into the chat history.""" | |
| try: | |
| messages = await get_conversation_messages(auth_client, conversation_id) | |
| if state['assistant'] and state['assistant'].chat_history: | |
| state['assistant'].chat_history.clear() | |
| for msg in messages: | |
| role = msg.get('role') | |
| content = msg.get('content', '') | |
| if role == 'user': | |
| state['assistant'].chat_history.add_user_message(content) | |
| elif role == 'ai': | |
| state['assistant'].chat_history.add_ai_message(content) | |
| elif role == 'system': | |
| state['assistant'].chat_history.add_system_message(content) | |
| state['is_first_message_in_conversation'] = False | |
| except Exception as e: | |
| ui.notify(f"Error loading conversation: {e}", color='negative') | |
| async def handle_delete_conversation(conversation_id: str) -> bool: | |
| """Delete a conversation.""" | |
| try: | |
| success = await delete_conversation(auth_client, conversation_id) | |
| if success: | |
| ui.notify("Conversation deleted", color='positive') | |
| return success | |
| except Exception as e: | |
| ui.notify(f"Error deleting conversation: {e}", color='negative') | |
| return False | |
| def handle_new_chat(): | |
| """Start a new chat.""" | |
| state['is_first_message_in_conversation'] = True | |
| ui.notify("Starting new chat", color='info') | |
| def handle_navigate(target_stage: str, conversation_id: str): | |
| """Navigate to a different stage to load a conversation.""" | |
| if target_stage == "stage1": | |
| # Store conversation ID in client storage for next page load | |
| app.storage.client['load_conversation'] = conversation_id | |
| # Navigate to Stage 1 | |
| auth_client.table("projects") \ | |
| .update({"status": "Stage 1: Grouping"}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.navigate.to(f'/project/{project_id}') | |
| with base_layout(active_page='rubrics', on_logout=handle_logout): | |
| def open_rubric_preview(): | |
| """Open the rubric preview modal with current data.""" | |
| modal = RubricPreviewModal(rubric_data) | |
| modal.show() | |
| # Compact header with title and micro-stepper | |
| with ui.element('div').classes('compact-header'): | |
| breadcrumbs([ | |
| ('Rubrics', '/'), | |
| (project['tasks']['title'], None), | |
| ('Stage 2: Alignment', None) | |
| ]) | |
| # Top row: title and button | |
| with ui.element('div').classes('compact-header-top-row'): | |
| ui.html(f'<h1 class="compact-header-title">{project["tasks"]["title"]}: {project["project_title"]}</h1>', sanitize=False) | |
| ui.button('View Rubric', on_click=open_rubric_preview).classes('btn btn-secondary').props('size=sm icon=visibility') | |
| # Second row: Progress stepper (left-aligned) | |
| project_status = project.get('status', 'Stage 2: Building') | |
| current_stage = map_project_status_to_stage(project_status) | |
| stepper = ProgressStepper(current_stage=current_stage, project_id=project_id) | |
| ui.html(stepper.render_html(), sanitize=False) | |
| ui.add_body_html(stepper.get_navigation_script()) | |
| with ui.element('div').classes('workspace-grid'): | |
| # Left pane - Stage 2 Alignment Workspace | |
| with ui.element('div').classes('pane'): | |
| with ui.element('div').classes('pane-header'): | |
| with ui.element('div').classes('flex justify-between items-center w-full'): | |
| ui.html('<h2 class="pane-title">Stage 2: Alignment</h2>', sanitize=False) | |
| # Save button (right-aligned) | |
| async def handle_stage2_save(): | |
| if state.get('stage2_workspace'): | |
| await handle_save(state['stage2_workspace'].get_updated_rubric_data()) | |
| ui.notify('Progress saved successfully', type='positive') | |
| ui.button('Save Progress', on_click=handle_stage2_save).classes('btn btn-secondary').props('icon=save') | |
| with ui.element('div').classes('pane-content'): | |
| state['stage2_workspace'] = Stage2AlignmentWorkspace( | |
| rubric_data=rubric_data, | |
| task_metadata=task_metadata, | |
| on_save=handle_save, | |
| on_back=handle_back, | |
| on_next=handle_next, | |
| ) | |
| state['stage2_workspace'].render() | |
| # Right pane - AI Assistant (full TabbedAssistant) | |
| state['assistant'] = TabbedAssistant( | |
| initial_message=f"Hello! I'm here to help you with alignment for '{project['project_title']}'. " | |
| "Ask me about NGSS standards or how to write effective alignment statements.", | |
| on_send=handle_chat, | |
| conversations=conversations, | |
| on_load_conversation=handle_load_conversation, | |
| on_delete_conversation=handle_delete_conversation, | |
| on_new_chat=handle_new_chat, | |
| stage_context="stage2", | |
| on_navigate=handle_navigate, | |
| ) | |
| # Auto-load conversation if specified in client storage | |
| if conversation_id_to_load: | |
| async def load_and_clear(): | |
| await handle_load_conversation(conversation_id_to_load) | |
| # Clear the storage after loading | |
| if 'load_conversation' in app.storage.client: | |
| del app.storage.client['load_conversation'] | |
| ui.timer(0.1, load_and_clear, once=True) | |
| async def render_stage3_scoring(auth_client: Client, project: dict, project_id: str, conversation_id_to_load: str = None): | |
| """Render Stage 3: Scoring Criteria page.""" | |
| task_data = project['tasks']['task_data'] | |
| rubric_data = project.get('rubric_data', {}) | |
| user_session = app.storage.user.get('session') | |
| user_id = user_session['user']['id'] | |
| # Fetch conversations for this project | |
| conversations = [] | |
| try: | |
| conversations = await get_user_conversations(auth_client, project_id=project_id) | |
| except Exception as e: | |
| print(f"Error loading conversations: {e}") | |
| state = { | |
| 'stage3_workspace': None, | |
| 'assistant': None, | |
| 'is_first_message_in_conversation': True, | |
| } | |
| def handle_logout(): | |
| app.storage.user.clear() | |
| ui.navigate.to('/login') | |
| async def handle_save(updated_rubric_data: dict): | |
| """Save rubric_data to database.""" | |
| try: | |
| auth_client.table("projects") \ | |
| .update({"rubric_data": updated_rubric_data}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| except Exception as e: | |
| ui.notify(f"Error saving: {e}", color='negative') | |
| def handle_back(): | |
| """Navigate back to Stage 2.""" | |
| auth_client.table("projects") \ | |
| .update({"status": "Stage 2: Building"}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.navigate.to(f'/project/{project_id}') | |
| def handle_next(): | |
| """Navigate to Stage 4 (placeholder for now).""" | |
| ui.notify('Stage 4 not yet implemented', color='info') | |
| # Future: Navigate to Stage 4 | |
| async def get_recent_messages(auth_client, conversation_id: str, limit: int) -> list[dict]: | |
| """ | |
| Retrieve the N most recent messages from a conversation. | |
| Args: | |
| auth_client: Authenticated Supabase client | |
| conversation_id: UUID of current conversation | |
| limit: Number of messages to retrieve | |
| Returns: | |
| List of message dicts with role, content, created_at (chronologically ordered) | |
| """ | |
| if not conversation_id: | |
| return [] | |
| try: | |
| # Get messages ordered by created_at descending, limit to N | |
| response = auth_client.table("chat_messages") \ | |
| .select("role, content, created_at") \ | |
| .eq("conversation_id", conversation_id) \ | |
| .order("created_at", desc=True) \ | |
| .limit(limit) \ | |
| .execute() | |
| if response.data: | |
| # Reverse to chronological order for LLM prompt | |
| messages = list(reversed(response.data)) | |
| return [ | |
| { | |
| "role": msg["role"], | |
| "content": msg["content"], | |
| "created_at": msg.get("created_at") | |
| } | |
| for msg in messages | |
| ] | |
| return [] | |
| except Exception as e: | |
| print(f"Error retrieving recent messages: {e}") | |
| return [] | |
| async def handle_chat(message: str): | |
| """Handle chat messages with Stage 3 context.""" | |
| try: | |
| conversation_id = state['assistant'].current_conversation_id | |
| is_new_conversation = conversation_id is None | |
| if is_new_conversation: | |
| conversation_id = await create_conversation( | |
| auth_client, user_id, project_id, stage="stage3" | |
| ) | |
| if conversation_id: | |
| state['assistant'].current_conversation_id = conversation_id | |
| state['is_first_message_in_conversation'] = True | |
| if conversation_id: | |
| await save_message(auth_client, conversation_id, user_id, 'user', message) | |
| # Retrieve recent message history for context | |
| from backend.core.config import settings | |
| message_history = [] | |
| if conversation_id and not is_new_conversation: | |
| message_history = await get_recent_messages( | |
| auth_client, | |
| conversation_id, | |
| settings.CHAT_MEMORY_WINDOW_SIZE | |
| ) | |
| output_mode = state['assistant'].output_mode | |
| custom_system_prompt = None | |
| if output_mode == 'custom': | |
| custom_system_prompt = state['assistant'].system_prompt_input.value | |
| request = ChatRequest( | |
| message=message, | |
| task_id=project['task_id'], | |
| project_title=project['project_title'], | |
| current_stage="Stage 3: Scoring", | |
| output_mode=output_mode, | |
| prompt_group_config={}, | |
| custom_system_prompt=custom_system_prompt, | |
| conversation_id=conversation_id, | |
| message_history=message_history, | |
| ) | |
| response = await chat_endpoint(request) | |
| ai_response = response.response | |
| if conversation_id: | |
| await save_message(auth_client, conversation_id, user_id, 'ai', ai_response) | |
| await update_conversation_timestamp(auth_client, conversation_id) | |
| if is_new_conversation and state['is_first_message_in_conversation']: | |
| asyncio.create_task( | |
| generate_conversation_title(auth_client, conversation_id, message, ai_response) | |
| ) | |
| state['assistant'].add_conversation_to_list({ | |
| 'id': conversation_id, | |
| 'title': None, | |
| 'stage': 'stage3', | |
| 'updated_at': datetime.now().isoformat(), | |
| }) | |
| state['is_first_message_in_conversation'] = False | |
| state['assistant'].add_ai_message(ai_response) | |
| except Exception as e: | |
| state['assistant'].add_ai_message(f"Error: {e}") | |
| async def handle_load_conversation(conversation_id: str): | |
| """Load messages from a conversation into the chat history.""" | |
| try: | |
| messages = await get_conversation_messages(auth_client, conversation_id) | |
| if state['assistant'] and state['assistant'].chat_history: | |
| state['assistant'].chat_history.clear() | |
| for msg in messages: | |
| role = msg.get('role') | |
| content = msg.get('content', '') | |
| if role == 'user': | |
| state['assistant'].chat_history.add_user_message(content) | |
| elif role == 'ai': | |
| state['assistant'].chat_history.add_ai_message(content) | |
| elif role == 'system': | |
| state['assistant'].chat_history.add_system_message(content) | |
| state['is_first_message_in_conversation'] = False | |
| except Exception as e: | |
| ui.notify(f"Error loading conversation: {e}", color='negative') | |
| async def handle_delete_conversation(conversation_id: str) -> bool: | |
| """Delete a conversation.""" | |
| try: | |
| success = await delete_conversation(auth_client, conversation_id) | |
| if success: | |
| ui.notify("Conversation deleted", color='positive') | |
| return success | |
| except Exception as e: | |
| ui.notify(f"Error deleting conversation: {e}", color='negative') | |
| return False | |
| def handle_new_chat(): | |
| """Start a new chat.""" | |
| state['is_first_message_in_conversation'] = True | |
| ui.notify("Starting new chat", color='info') | |
| def handle_navigate(target_stage: str, conversation_id: str): | |
| """Navigate to a different stage to load a conversation.""" | |
| if target_stage == "stage1": | |
| app.storage.client['load_conversation'] = conversation_id | |
| auth_client.table("projects") \ | |
| .update({"status": "Stage 1: Grouping"}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.navigate.to(f'/project/{project_id}') | |
| elif target_stage == "stage2": | |
| app.storage.client['load_conversation'] = conversation_id | |
| auth_client.table("projects") \ | |
| .update({"status": "Stage 2: Building"}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.navigate.to(f'/project/{project_id}') | |
| with base_layout(active_page='rubrics', on_logout=handle_logout): | |
| def open_rubric_preview(): | |
| """Open the rubric preview modal with current data.""" | |
| modal = RubricPreviewModal(rubric_data) | |
| modal.show() | |
| # Compact header with title and micro-stepper | |
| with ui.element('div').classes('compact-header'): | |
| breadcrumbs([ | |
| ('Rubrics', '/'), | |
| (project['tasks']['title'], None), | |
| ('Stage 3: Scoring', None) | |
| ]) | |
| # Top row: title and button | |
| with ui.element('div').classes('compact-header-top-row'): | |
| ui.html(f'<h1 class="compact-header-title">{project["tasks"]["title"]}: {project["project_title"]}</h1>', sanitize=False) | |
| ui.button('View Rubric', on_click=open_rubric_preview).classes('btn btn-secondary').props('size=sm icon=visibility') | |
| # Second row: Progress stepper (left-aligned) | |
| project_status = project.get('status', 'Stage 3: Scoring') | |
| current_stage = map_project_status_to_stage(project_status) | |
| stepper = ProgressStepper(current_stage=current_stage, project_id=project_id) | |
| ui.html(stepper.render_html(), sanitize=False) | |
| ui.add_body_html(stepper.get_navigation_script()) | |
| with ui.element('div').classes('workspace-grid'): | |
| # Left pane - Stage 3 Scoring Workspace | |
| with ui.element('div').classes('pane'): | |
| with ui.element('div').classes('pane-header'): | |
| with ui.element('div').classes('flex justify-between items-center w-full'): | |
| ui.html('<h2 class="pane-title">Stage 3: Scoring Criteria</h2>', sanitize=False) | |
| # Save button (right-aligned) | |
| async def handle_stage3_save(): | |
| if state.get('stage3_workspace'): | |
| await handle_save(state['stage3_workspace'].get_updated_rubric_data()) | |
| ui.notify('Progress saved successfully', type='positive') | |
| ui.button('Save Progress', on_click=handle_stage3_save).classes('btn btn-secondary').props('icon=save') | |
| with ui.element('div').classes('pane-content'): | |
| state['stage3_workspace'] = Stage3ScoringWorkspace( | |
| rubric_data=rubric_data, | |
| on_save=handle_save, | |
| on_back=handle_back, | |
| on_next=handle_next, | |
| ) | |
| state['stage3_workspace'].render() | |
| # Right pane - AI Assistant | |
| state['assistant'] = TabbedAssistant( | |
| initial_message=f"Hello! I'm here to help you define scoring criteria for '{project['project_title']}'. " | |
| "Ask me about writing effective scoring rubrics.", | |
| on_send=handle_chat, | |
| conversations=conversations, | |
| on_load_conversation=handle_load_conversation, | |
| on_delete_conversation=handle_delete_conversation, | |
| on_new_chat=handle_new_chat, | |
| stage_context="stage3", | |
| on_navigate=handle_navigate, | |
| ) | |
| # Auto-load conversation if specified in client storage | |
| if conversation_id_to_load: | |
| async def load_and_clear(): | |
| await handle_load_conversation(conversation_id_to_load) | |
| if 'load_conversation' in app.storage.client: | |
| del app.storage.client['load_conversation'] | |
| ui.timer(0.1, load_and_clear, once=True) | |
| def init_frontend(): | |
| """ | |
| Initialize frontend routes. | |
| This function registers all NiceGUI page routes. Must be called | |
| before running the application. | |
| """ | |
| def login_page(): | |
| """Styled login page.""" | |
| add_head_resources() | |
| build_login_ui() | |
| def signup_page(): | |
| """Styled signup page.""" | |
| add_head_resources() | |
| email_ref = {'input': None} | |
| password_ref = {'input': None} | |
| async def handle_signup(): | |
| if not supabase: | |
| ui.notify("Supabase not configured", color='negative') | |
| return | |
| try: | |
| supabase.auth.sign_up({ | |
| "email": email_ref['input'].value, | |
| "password": password_ref['input'].value | |
| }) | |
| ui.notify('Signup successful! Please check your email to confirm.', color='positive') | |
| ui.navigate.to('/login') | |
| except Exception as e: | |
| ui.notify(f"Signup Failed: {e}", color='negative') | |
| with ui.element('div').classes('auth-container'): | |
| with ui.element('div').classes('auth-card'): | |
| with ui.element('div').classes('flex justify-center mb-6'): | |
| with ui.element('div').classes('ai-icon-wrapper p-4'): | |
| material_icon('edit_document', 'text-3xl') | |
| ui.html('<h1 class="auth-title">Create an Account</h1>', sanitize=False) | |
| email_ref['input'] = ui.input('Email').classes('w-full mb-4').props('outlined') | |
| password_ref['input'] = ui.input('Password', password=True).classes('w-full mb-4').props('outlined') | |
| ui.button('Sign Up', on_click=handle_signup).classes('btn btn-primary w-full mb-4') | |
| with ui.element('div').classes('flex justify-center gap-2'): | |
| ui.label("Already have an account?").classes('text-secondary text-sm') | |
| ui.link('Login', '/login').classes('text-primary text-sm font-medium') | |
| async def dashboard_page(): | |
| """Main dashboard showing user's projects.""" | |
| add_head_resources() | |
| auth_client = get_authed_client() | |
| if not auth_client: | |
| # Render login form inline instead of redirecting | |
| # This avoids cross-origin navigation issues in HF Spaces iframe | |
| build_login_ui() | |
| return | |
| user_session = app.storage.user.get('session') | |
| user_id = user_session['user']['id'] | |
| task_options = {} | |
| try: | |
| response = auth_client.table("tasks").select("id", "title").order("id").execute() | |
| if response.data: | |
| task_options = {task['id']: task['title'] for task in response.data} | |
| except Exception as e: | |
| ui.notify(f"Error fetching tasks: {e}", color='negative') | |
| refs = {'task_select': None, 'title_input': None} | |
| async def create_project(): | |
| task_id = refs['task_select'].value | |
| title = refs['title_input'].value | |
| if not task_id or not title: | |
| ui.notify("Please select a task and enter a project title.", color='warning') | |
| return | |
| try: | |
| new_project_data = { | |
| 'user_id': user_id, | |
| 'task_id': task_id, | |
| 'project_title': title, | |
| 'status': 'Stage 1: Grouping', | |
| 'prompt_group_config': {} | |
| } | |
| response = auth_client.table("projects").insert(new_project_data).execute() | |
| if response.data: | |
| ui.navigate.to(f'/project/{response.data[0]["id"]}') | |
| else: | |
| ui.notify("Error creating project.", color='negative') | |
| except Exception as e: | |
| ui.notify(f"Error: {e}", color='negative') | |
| with base_layout(active_page='dashboard', on_logout=handle_logout): | |
| breadcrumbs([('Dashboard', None)]) | |
| page_header('Your Projects', 'Create and manage your rubric projects') | |
| with ui.element('div').classes('pane p-6'): | |
| ui.html('<h3 class="pane-title mb-4">Create New Project</h3>', sanitize=False) | |
| with ui.element('div').classes('flex gap-4 flex-wrap'): | |
| refs['title_input'] = ui.input('Project Title').classes('flex-grow').props('outlined') | |
| refs['task_select'] = ui.select(options=task_options, label='Task').classes('min-w-48').props('outlined') | |
| ui.button('Create Project', on_click=create_project).classes('btn btn-primary') | |
| with ui.element('div').classes('pane'): | |
| with ui.element('div').classes('pane-header'): | |
| ui.html('<h2 class="pane-title">Your Rubrics</h2>', sanitize=False) | |
| with ui.element('div').classes('pane-content'): | |
| try: | |
| project_response = auth_client.table("projects") \ | |
| .select("*, tasks(title)") \ | |
| .eq("user_id", user_id) \ | |
| .order("created_at", desc=True) \ | |
| .execute() | |
| if project_response.data: | |
| for project in project_response.data: | |
| with ui.element('div').classes('workspace-item cursor-pointer').on( | |
| 'click', lambda p=project: ui.navigate.to(f'/project/{p["id"]}') | |
| ): | |
| material_icon('edit_document', 'text-primary') | |
| with ui.element('div').classes('flex-grow'): | |
| ui.label(project['project_title']).classes('font-bold') | |
| ui.label(f"Task: {project['tasks']['title']} | {project['status']}").classes('text-sm text-secondary') | |
| material_icon('chevron_right') | |
| else: | |
| ui.label('No projects yet. Create one above!').classes('text-secondary') | |
| except Exception as e: | |
| ui.notify(f"Error loading projects: {e}", color='negative') | |
| async def project_workspace_page(project_id: str): | |
| """Main workspace for editing a rubric project.""" | |
| add_head_resources() | |
| auth_client = get_authed_client() | |
| if not auth_client: | |
| # Render login form inline instead of redirecting | |
| build_login_ui() | |
| return | |
| # Check for status update from stepper navigation | |
| from nicegui import context | |
| status_param = context.client.request.query_params.get('status') | |
| if status_param: | |
| try: | |
| auth_client.table("projects") \ | |
| .update({"status": status_param}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| # Redirect to clean URL without query param | |
| ui.navigate.to(f'/project/{project_id}') | |
| return | |
| except Exception as e: | |
| print(f"Error updating project status: {e}") | |
| try: | |
| project_response = auth_client.table("projects") \ | |
| .select("*, tasks(title, task_data)") \ | |
| .eq("id", project_id) \ | |
| .single() \ | |
| .execute() | |
| project = project_response.data | |
| if not project: | |
| ui.label('Project not found.').classes('text-2xl') | |
| return | |
| except Exception as e: | |
| ui.label(f'Error loading project: {e}').classes('text-2xl') | |
| return | |
| task_data = project.get('tasks', {}).get('task_data', {}) | |
| saved_config = project.get('prompt_group_config', {}) or {} | |
| # Fetch user's conversations for this project | |
| user_session = app.storage.user.get('session') | |
| user_id = user_session['user']['id'] | |
| conversations = [] | |
| try: | |
| conversations = await get_user_conversations(auth_client, project_id=project_id) | |
| except Exception as e: | |
| print(f"Error loading conversations: {e}") | |
| # Check for conversation to load from query params | |
| load_conversation_id = app.storage.client.get('load_conversation') | |
| # Check project status and route to appropriate stage | |
| project_status = project.get('status', 'Stage 1: Grouping') | |
| if project_status == 'Stage 2: Building': | |
| await render_stage2_alignment(auth_client, project, project_id, load_conversation_id) | |
| return | |
| elif project_status == 'Stage 3: Scoring': | |
| await render_stage3_scoring(auth_client, project, project_id, load_conversation_id) | |
| return | |
| # --- Stage 1: Define Prompt Groups (below) --- | |
| def get_assessment_items_by_ids(item_ids: list) -> list: | |
| """Get full assessment item data for given IDs.""" | |
| items = [] | |
| content_stream = task_data.get('content_stream', []) | |
| for content in content_stream: | |
| if content.get('type') == 'assessment_item': | |
| item_id = content.get('id', str(content.get('sequence_id', ''))) | |
| if item_id in item_ids: | |
| item_data = { | |
| 'id': item_id, | |
| 'prompt_text': content.get('prompt_text', ''), | |
| 'question_type': content.get('question_type', 'unknown'), | |
| 'is_scorable': content.get('is_scorable', False), | |
| 'sequence_id': content.get('sequence_id', 0), | |
| 'options': content.get('options', []), | |
| 'correct_answer_text': content.get('correct_answer_text', ''), | |
| } | |
| items.append(item_data) | |
| return items | |
| state = { | |
| 'prompt_groups': {}, | |
| 'workspace_list': None, | |
| 'assistant': None, | |
| 'is_first_message_in_conversation': True, | |
| } | |
| def truncate_text(text: str, max_length: int = 60) -> str: | |
| text = ' '.join(text.split()) | |
| if len(text) <= max_length: | |
| return text | |
| return text[:max_length].rsplit(' ', 1)[0] + '...' | |
| def build_prompt_group_config() -> dict: | |
| config = {} | |
| for group_name, group_data in state['prompt_groups'].items(): | |
| group_key = group_name.lower().replace(' ', '_') | |
| config[group_key] = { | |
| 'name': group_name, | |
| 'items_from_task': [item.get('id') for item in group_data.get('items', [])] | |
| } | |
| return config | |
| async def save_prompt_group_config(): | |
| config = build_prompt_group_config() | |
| try: | |
| auth_client.table("projects") \ | |
| .update({"prompt_group_config": config}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| except Exception as e: | |
| ui.notify(f"Error saving: {e}", color='negative') | |
| async def add_prompt_group(group_name: str, items: list, silent: bool = False): | |
| if group_name in state['prompt_groups']: | |
| if not silent: | |
| ui.notify(f"'{group_name}' already exists.", color='warning') | |
| return | |
| group_id = f"group-{group_name.replace(' ', '-').lower()}" | |
| items_container_ref = {'container': None, 'items': list(items)} | |
| async def remove_item_from_group(item_id: str, item_element): | |
| items_container_ref['items'] = [i for i in items_container_ref['items'] if i.get('id') != item_id] | |
| item_element.delete() | |
| state['prompt_groups'][group_name]['items'] = items_container_ref['items'] | |
| await save_prompt_group_config() | |
| ui.notify(f"Removed item from '{group_name}'") | |
| def show_add_item_picker(): | |
| all_used = get_used_item_ids() | |
| async def handle_add_to_group(new_group_name: str, new_items: list): | |
| for item in new_items: | |
| items_container_ref['items'].append(item) | |
| create_item_element(item, items_container_ref['container']) | |
| state['prompt_groups'][group_name]['items'] = items_container_ref['items'] | |
| await save_prompt_group_config() | |
| picker = AssessmentItemPicker( | |
| task_data=task_data, | |
| on_add_group=handle_add_to_group, | |
| used_item_ids=all_used | |
| ) | |
| picker.show() | |
| if picker.group_name_input: | |
| picker.group_name_input.set_value(group_name) | |
| picker.group_name_input.props('readonly') | |
| def create_item_element(item, container): | |
| item_id = item.get('id', '') | |
| with container: | |
| with ui.element('div').classes('workspace-item draggable-item group/item') as item_el: | |
| item_el._props['data-item-id'] = item_id | |
| material_icon('drag_indicator', 'drag-handle text-slate-400 cursor-grab') | |
| with ui.element('div').classes('flex-grow'): | |
| ui.label(truncate_text(item.get('prompt_text', ''))).classes( | |
| 'text-sm text-slate-700' | |
| ) | |
| ui.button( | |
| icon='close', | |
| on_click=lambda e, iid=item_id, iel=item_el: remove_item_from_group(iid, iel) | |
| ).props('flat round dense size=sm').classes( | |
| 'opacity-0 group-hover/item:opacity-100 transition-opacity text-slate-400 hover:text-red-500' | |
| ) | |
| with state['workspace_list']: | |
| with ui.element('div').classes('mb-4') as group_element: | |
| async def make_delete_handler(g): | |
| await remove_group(g) | |
| with ui.element('div').classes('flex items-center gap-2 mb-2'): | |
| ui.label(f"Prompt Group {group_name}").classes('text-lg font-bold text-slate-800') | |
| ui.button(icon='add_circle_outline', on_click=show_add_item_picker).props( | |
| 'flat round dense size=sm' | |
| ).classes('text-slate-400 hover:text-primary') | |
| ui.element('div').classes('flex-grow') | |
| ui.button(icon='delete_outline', on_click=lambda g=group_name: make_delete_handler(g)).props( | |
| 'flat round dense size=sm' | |
| ).classes('text-slate-400 hover:text-red-500') | |
| with ui.element('div').classes('sortable-group min-h-12 border-2 border-dashed border-transparent rounded-lg') as items_container: | |
| items_container._props['id'] = group_id | |
| items_container_ref['container'] = items_container | |
| for item in items: | |
| create_item_element(item, items_container) | |
| state['prompt_groups'][group_name] = { | |
| 'element': group_element, | |
| 'items': items_container_ref['items'], | |
| 'container_id': group_id | |
| } | |
| ui.run_javascript(f''' | |
| if (typeof Sortable !== 'undefined') {{ | |
| const el = document.getElementById('{group_id}'); | |
| if (el && !el.sortableInstance) {{ | |
| el.sortableInstance = new Sortable(el, {{ | |
| group: 'prompt-groups', | |
| animation: 150, | |
| handle: '.drag-handle', | |
| ghostClass: 'sortable-ghost', | |
| chosenClass: 'sortable-chosen', | |
| dragClass: 'sortable-drag', | |
| }}); | |
| }} | |
| }} | |
| ''') | |
| if not silent: | |
| ui.notify(f"Added '{group_name}' with {len(items)} items.", color='positive') | |
| await save_prompt_group_config() | |
| async def remove_group(name: str): | |
| if name in state['prompt_groups']: | |
| group_data = state['prompt_groups'].pop(name) | |
| if group_data.get('element'): | |
| group_data['element'].delete() | |
| ui.notify(f"Removed '{name}'") | |
| await save_prompt_group_config() | |
| def get_used_item_ids() -> list: | |
| used_ids = [] | |
| for group_data in state['prompt_groups'].values(): | |
| for item in group_data.get('items', []): | |
| used_ids.append(item.get('id')) | |
| return used_ids | |
| def show_assessment_picker(): | |
| if not task_data: | |
| ui.notify("No task data available.", color='warning') | |
| return | |
| async def handle_add_group(group_name: str, items: list): | |
| await add_prompt_group(group_name, items) | |
| picker = AssessmentItemPicker( | |
| task_data=task_data, | |
| on_add_group=handle_add_group, | |
| used_item_ids=get_used_item_ids() | |
| ) | |
| picker.show() | |
| async def save_and_continue(): | |
| groups = list(state['prompt_groups'].keys()) | |
| if not groups: | |
| ui.notify("Please add at least one prompt group.", color='warning') | |
| return | |
| try: | |
| # Fetch existing rubric_data to preserve user work | |
| existing_rubric_data = project.get('rubric_data', None) | |
| # Build skeleton with preservation of existing data | |
| rubric_data = build_rubric_data_skeleton( | |
| state['prompt_groups'], | |
| existing_rubric_data=existing_rubric_data | |
| ) | |
| auth_client.table("projects") \ | |
| .update({ | |
| "prompt_groups": groups, | |
| "status": "Stage 2: Building", | |
| "rubric_data": rubric_data | |
| }) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.notify("Saved! Moving to Stage 2.", color='positive') | |
| ui.navigate.to(f'/project/{project_id}') | |
| except Exception as e: | |
| ui.notify(f"Error: {e}", color='negative') | |
| async def get_recent_messages(auth_client, conversation_id: str, limit: int) -> list[dict]: | |
| """ | |
| Retrieve the N most recent messages from a conversation. | |
| Args: | |
| auth_client: Authenticated Supabase client | |
| conversation_id: UUID of current conversation | |
| limit: Number of messages to retrieve | |
| Returns: | |
| List of message dicts with role, content, created_at (chronologically ordered) | |
| """ | |
| if not conversation_id: | |
| return [] | |
| try: | |
| # Get messages ordered by created_at descending, limit to N | |
| response = auth_client.table("chat_messages") \ | |
| .select("role, content, created_at") \ | |
| .eq("conversation_id", conversation_id) \ | |
| .order("created_at", desc=True) \ | |
| .limit(limit) \ | |
| .execute() | |
| if response.data: | |
| # Reverse to chronological order for LLM prompt | |
| messages = list(reversed(response.data)) | |
| return [ | |
| { | |
| "role": msg["role"], | |
| "content": msg["content"], | |
| "created_at": msg.get("created_at") | |
| } | |
| for msg in messages | |
| ] | |
| return [] | |
| except Exception as e: | |
| print(f"Error retrieving recent messages: {e}") | |
| return [] | |
| async def handle_chat(message: str): | |
| """Handle chat messages with conversation persistence.""" | |
| state['assistant'].show_loading() | |
| try: | |
| # Get current conversation ID from assistant | |
| conversation_id = state['assistant'].current_conversation_id | |
| # If no conversation exists, create one | |
| is_new_conversation = conversation_id is None | |
| if is_new_conversation: | |
| conversation_id = await create_conversation( | |
| auth_client, user_id, project_id, stage="stage1" | |
| ) | |
| if conversation_id: | |
| state['assistant'].current_conversation_id = conversation_id | |
| state['is_first_message_in_conversation'] = True | |
| # Save user message | |
| if conversation_id: | |
| await save_message(auth_client, conversation_id, user_id, 'user', message) | |
| # Retrieve recent message history for context | |
| from backend.core.config import settings | |
| message_history = [] | |
| if conversation_id and not is_new_conversation: | |
| # Get N previous messages (excludes current message just saved) | |
| message_history = await get_recent_messages( | |
| auth_client, | |
| conversation_id, | |
| settings.CHAT_MEMORY_WINDOW_SIZE | |
| ) | |
| # Gather context based on mode | |
| output_mode = state['assistant'].output_mode | |
| # Get custom system prompt only if in custom mode | |
| custom_system_prompt = None | |
| if output_mode == 'custom': | |
| custom_system_prompt = state['assistant'].system_prompt_input.value | |
| # Get user's current prompt group config | |
| prompt_group_config = build_prompt_group_config() | |
| request = ChatRequest( | |
| message=message, | |
| task_id=project['task_id'], | |
| project_title=project['project_title'], | |
| current_stage="Stage 1: Define Prompt Groups", | |
| output_mode=output_mode, | |
| prompt_group_config=prompt_group_config, | |
| custom_system_prompt=custom_system_prompt, | |
| conversation_id=conversation_id, | |
| message_history=message_history, | |
| ) | |
| response = await chat_endpoint(request) | |
| ai_response = response.response | |
| # Save AI response | |
| if conversation_id: | |
| await save_message(auth_client, conversation_id, user_id, 'ai', ai_response) | |
| await update_conversation_timestamp(auth_client, conversation_id) | |
| # Generate title for new conversations (fire and forget) | |
| if is_new_conversation and state['is_first_message_in_conversation']: | |
| asyncio.create_task( | |
| generate_conversation_title(auth_client, conversation_id, message, ai_response) | |
| ) | |
| # Add to conversations list with temporary title | |
| state['assistant'].add_conversation_to_list({ | |
| 'id': conversation_id, | |
| 'title': None, | |
| 'stage': 'stage1', | |
| 'updated_at': datetime.now().isoformat(), | |
| }) | |
| state['is_first_message_in_conversation'] = False | |
| state['assistant'].add_ai_message(ai_response) | |
| except Exception as e: | |
| state['assistant'].add_ai_message(f"Error: {e}") | |
| async def handle_load_conversation(conversation_id: str): | |
| """Load messages from a conversation into the chat history.""" | |
| try: | |
| messages = await get_conversation_messages(auth_client, conversation_id) | |
| if state['assistant'] and state['assistant'].chat_history: | |
| state['assistant'].chat_history.clear() | |
| for msg in messages: | |
| role = msg.get('role') | |
| content = msg.get('content', '') | |
| if role == 'user': | |
| state['assistant'].chat_history.add_user_message(content) | |
| elif role == 'ai': | |
| state['assistant'].chat_history.add_ai_message(content) | |
| elif role == 'system': | |
| state['assistant'].chat_history.add_system_message(content) | |
| state['is_first_message_in_conversation'] = False | |
| except Exception as e: | |
| ui.notify(f"Error loading conversation: {e}", color='negative') | |
| async def handle_delete_conversation(conversation_id: str) -> bool: | |
| """Delete a conversation.""" | |
| try: | |
| success = await delete_conversation(auth_client, conversation_id) | |
| if success: | |
| ui.notify("Conversation deleted", color='positive') | |
| return success | |
| except Exception as e: | |
| ui.notify(f"Error deleting conversation: {e}", color='negative') | |
| return False | |
| def handle_new_chat(): | |
| """Start a new chat.""" | |
| state['is_first_message_in_conversation'] = True | |
| ui.notify("Starting new chat", color='info') | |
| def handle_navigate(target_stage: str, conversation_id: str): | |
| """Navigate to a different stage to load a conversation.""" | |
| if target_stage == "stage2": | |
| # Store conversation ID in client storage for next page load | |
| app.storage.client['load_conversation'] = conversation_id | |
| # Update project status to Stage 2 and navigate | |
| auth_client.table("projects") \ | |
| .update({"status": "Stage 2: Building"}) \ | |
| .eq("id", project_id) \ | |
| .execute() | |
| ui.navigate.to(f'/project/{project_id}') | |
| def open_rubric_preview(): | |
| """Open the rubric preview modal with live workspace data.""" | |
| # Use saved rubric_data if available, otherwise build from current workspace | |
| rubric_data = project.get('rubric_data', {}) | |
| # If we have prompt groups in the workspace, build a live preview | |
| if state['prompt_groups']: | |
| rubric_data = build_rubric_data_skeleton(state['prompt_groups']) | |
| modal = RubricPreviewModal(rubric_data) | |
| modal.show() | |
| with base_layout(active_page='rubrics', on_logout=handle_logout): | |
| # Compact header with title and micro-stepper | |
| with ui.element('div').classes('compact-header'): | |
| breadcrumbs([ | |
| ('Rubrics', '/'), | |
| (project['tasks']['title'], None), | |
| ('Define Prompt Groups', None) | |
| ]) | |
| # Top row: title and button | |
| with ui.element('div').classes('compact-header-top-row'): | |
| ui.html(f'<h1 class="compact-header-title">{project["tasks"]["title"]}: {project["project_title"]}</h1>', sanitize=False) | |
| ui.button('View Rubric', on_click=open_rubric_preview).classes('btn btn-secondary').props('size=sm icon=visibility') | |
| # Second row: Progress stepper (left-aligned) | |
| project_status = project.get('status', 'Stage 1: Grouping') | |
| current_stage = map_project_status_to_stage(project_status) | |
| stepper = ProgressStepper(current_stage=current_stage, project_id=project_id) | |
| ui.html(stepper.render_html(), sanitize=False) | |
| ui.add_body_html(stepper.get_navigation_script()) | |
| with ui.element('div').classes('workspace-grid'): | |
| with workspace_pane('Stage 1: Define Prompt Groups', None, | |
| action_button=('Add Prompt Group', show_assessment_picker, 'add')): | |
| with workspace_list() as wlist: | |
| state['workspace_list'] = wlist | |
| async def load_saved_groups(): | |
| for group_key, group_data in saved_config.items(): | |
| group_name = group_data.get('name', group_key) | |
| item_ids = group_data.get('items_from_task', []) | |
| items = get_assessment_items_by_ids(item_ids) | |
| if items: | |
| await add_prompt_group(group_name, items, silent=True) | |
| if saved_config: | |
| ui.timer(0.1, load_saved_groups, once=True) | |
| button_group( | |
| back_label='Back', | |
| next_label='Next: Alignment', | |
| on_back=lambda: ui.navigate.to('/'), | |
| on_next=save_and_continue | |
| ) | |
| state['assistant'] = TabbedAssistant( | |
| initial_message=f"Hello! I'm here to help you build the rubric for '{project['project_title']}'. " | |
| "Ask me to 'suggest prompt groups' to get started, or add your own in the workspace.", | |
| on_send=handle_chat, | |
| conversations=conversations, | |
| on_load_conversation=handle_load_conversation, | |
| on_delete_conversation=handle_delete_conversation, | |
| on_new_chat=handle_new_chat, | |
| stage_context="stage1", | |
| on_navigate=handle_navigate, | |
| ) | |
| # Auto-load conversation if specified in client storage | |
| if load_conversation_id: | |
| async def load_and_clear(): | |
| await handle_load_conversation(load_conversation_id) | |
| # Clear the storage after loading | |
| if 'load_conversation' in app.storage.client: | |
| del app.storage.client['load_conversation'] | |
| ui.timer(0.1, load_and_clear, once=True) | |
| def reference_docs_page(): | |
| """Reference documentation page showing framework and alignment rules.""" | |
| add_head_resources() | |
| auth_client = get_authed_client() | |
| if not auth_client: | |
| build_login_ui() | |
| return | |
| # Load markdown files | |
| from pathlib import Path | |
| backend_dir = Path(__file__).parent.parent / 'backend' | |
| framework_path = backend_dir / 'prompts' / 'context' / 'framework_definition.md' | |
| alignment_path = backend_dir / 'prompts' / 'context' / 'alignment_rules.md' | |
| framework_content = "" | |
| alignment_content = "" | |
| try: | |
| with open(framework_path, 'r', encoding='utf-8') as f: | |
| framework_content = f.read() | |
| except FileNotFoundError: | |
| framework_content = "Framework definition file not found." | |
| try: | |
| with open(alignment_path, 'r', encoding='utf-8') as f: | |
| alignment_content = f.read() | |
| except FileNotFoundError: | |
| alignment_content = "Alignment rules file not found." | |
| with base_layout(active_page='reference', on_logout=handle_logout): | |
| breadcrumbs([('Reference Docs', None)]) | |
| page_header('Reference Documentation', 'Framework definitions and alignment guidelines') | |
| with ui.element('div').classes('pane'): | |
| with ui.element('div').classes('pane-content p-6'): | |
| # Framework Definition Accordion | |
| with ui.expansion('Framework Definition: What is a "Prompt Group"?', icon='description').classes('reference-accordion'): | |
| with ui.element('div').classes('markdown-content'): | |
| ui.markdown(framework_content) | |
| # Alignment Rules Accordion | |
| with ui.expansion('Alignment Philosophy & Rules', icon='rule').classes('reference-accordion'): | |
| with ui.element('div').classes('markdown-content'): | |
| ui.markdown(alignment_content) | |
| # Storage secret for NiceGUI sessions | |
| app.storage.secret = os.getenv('NICEGUI_STORAGE_SECRET', 'rubric-ai-secret-key') | |