inplace-demo-streamlit / inplace_chat.py
Chae
fix: parse diffs by word
ae55733
# inplace_chat.py
import os
import json
import difflib
import streamlit as st
from huggingface_hub import InferenceClient
# === Page header ===
st.header("In-place feedback chatbot demo")
# === Theme primary color (from .streamlit/config.toml) ===
PRIMARY = "#cb785c" # keep in sync with [theme] primaryColor
# HF Inference client
hf_token = os.getenv("HF_TOKEN")
client = InferenceClient(model="openai/gpt-oss-20b", token=hf_token)
####### initialize session states ######
# These are managed globally in streamlit_app.py, but we ensure they exist
if "current_chat_id" not in st.session_state:
st.session_state.current_chat_id = None
if "conversations" not in st.session_state:
st.session_state.conversations = {}
# Get or create current conversation
chat_id = st.session_state.current_chat_id
if chat_id is None or chat_id not in st.session_state.conversations:
# This shouldn't happen if streamlit_app.py is the entry point, but just in case
import uuid
chat_id = str(uuid.uuid4())
st.session_state.conversations[chat_id] = {
"title": "New chat",
"messages": [],
"editable": False,
"prev_text": "",
"edited_text": "",
"original_user_prompt": "",
"edit_history": []
}
st.session_state.current_chat_id = chat_id
conv = st.session_state.conversations[chat_id]
# Ensure all required keys exist in the current conversation
conv.setdefault("messages", [])
conv.setdefault("editable", False)
conv.setdefault("prev_text", "")
conv.setdefault("edited_text", "")
conv.setdefault("original_user_prompt", "")
conv.setdefault("title", "New chat")
conv.setdefault("edit_history", []) # List of {removed, added, timestamp}
# Shorthand references
chat_history = conv["messages"]
# === Utility: compact "removed vs added" summary for the inplace_prefix ===
### later separate this into parse_diff.py
def summarize_edit(old: str, new: str) -> tuple[str, str]:
"""
Produce coarse 'removed_text' and 'edited_text' by joining all deletions and insertions.
Good enough for a single bullet like: - Replaced {removed_text} with {edited_text}
"""
old_words = old.split()
new_words = new.split()
sm = difflib.SequenceMatcher(a=old_words, b=new_words)
removed_chunks, added_chunks = [], []
for tag, i1, i2, j1, j2 in sm.get_opcodes():
if tag in ("delete", "replace"):
chunk = " ".join(old_words[i1:i2]).strip()
if chunk:
removed_chunks.append(chunk)
if tag in ("insert", "replace"):
chunk = " ".join(new_words[j1:j2]).strip()
if chunk:
added_chunks.append(chunk)
removed_text = " / ".join(removed_chunks) if removed_chunks else "(none)"
edited_text = " / ".join(added_chunks) if added_chunks else "(none)"
return removed_text, edited_text
def get_detailed_diff(old: str, new: str) -> list[dict]:
"""
Returns a list of diff chunks with tags: 'equal', 'delete', 'insert', 'replace'
Each chunk has: {'tag': str, 'text': str}
"""
old_words = old.split()
new_words = new.split()
sm = difflib.SequenceMatcher(a=old_words, b=new_words)
diff_chunks = []
for tag, i1, i2, j1, j2 in sm.get_opcodes():
if tag == 'equal':
diff_chunks.append({'tag': 'equal', 'text': ' '.join(old_words[i1:i2])})
elif tag == 'delete':
diff_chunks.append({'tag': 'delete', 'text': ' '.join(old_words[i1:i2])})
elif tag == 'insert':
diff_chunks.append({'tag': 'insert', 'text': ' '.join(new_words[j1:j2])})
elif tag == 'replace':
diff_chunks.append({'tag': 'delete', 'text': ' '.join(old_words[i1:i2])})
diff_chunks.append({'tag': 'insert', 'text': ' '.join(new_words[j1:j2])})
return diff_chunks
# === Render current conversation ===
editable = conv["editable"]
for i, msg in enumerate(chat_history):
if editable and i == len(chat_history) - 1 and msg["role"] == "assistant":
continue
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
if prompt := st.chat_input("Send a message"):
conv["original_user_prompt"] = prompt
chat_history.append({"role": "user", "content": prompt})
# Update conversation title with first user message
if conv["title"] == "New chat" and len(chat_history) == 1:
conv["title"] = prompt[:50] # Use first 50 chars of first message
with st.chat_message("user"):
st.markdown(prompt)
messages_for_api = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": prompt},
]
with st.chat_message("assistant"):
with st.spinner("Thinking…"):
try:
response = client.chat_completion(
messages=messages_for_api,
max_tokens=60000,
temperature=0.7,
top_p=0.95,
stream=False,
)
reply = response.choices[0].message["content"]
st.markdown(reply)
chat_history.append(
{"role": "assistant", "content": reply}
)
conv["prev_text"] = reply
except Exception as e:
st.error(f"Error: {e}")
# Edit features
if chat_history and chat_history[-1]["role"] == "assistant":
# button
with st.chat_message("assistant"):
col_spacer, col_edit = st.columns([0.93, 0.07])
with col_edit:
if not conv["editable"]:
if st.button("✏️", key="edit_btn", help="Edit response"):
conv["editable"] = True
st.rerun()
######## Edit mode #########
if conv["editable"]:
st.markdown('<div class="editable-on">', unsafe_allow_html=True)
with st.chat_message("assistant"):
st.caption("Editing the last response…")
with st.form("edit_form", clear_on_submit=False):
conv["edited_text"] = st.text_area(
" ",
value=conv["prev_text"],
height=500,
label_visibility="collapsed",
key="edit_textarea",
)
finished_edit = st.form_submit_button(
"Finish edit",
icon=":material/edit:",
use_container_width=True
)
st.markdown("</div>", unsafe_allow_html=True)
# === Handle edit submission (backend: in-place continuation) ===
if conv["editable"] and finished_edit:
if chat_history and chat_history[-1]["role"] == "assistant":
chat_history.pop()
removed_text, added_text = summarize_edit(conv["prev_text"], conv["edited_text"])
# Save to edit history
import datetime
diff_chunks = get_detailed_diff(conv["prev_text"], conv["edited_text"])
conv["edit_history"].append({
"removed": removed_text,
"added": added_text,
"timestamp": datetime.datetime.now().strftime("%H:%M:%S"),
"original": conv["prev_text"],
"edited": conv["edited_text"],
"diff_chunks": diff_chunks
})
system_prompt = "The user edited your previous answer. Only continue from the assistant message above. Do NOT rewrite or repeat it."
# Exit edit mode before generation
conv["editable"] = False
with st.chat_message("assistant"):
with st.spinner("Continuing from your edit…"):
try:
resp = client.chat_completion(
messages=[
{"role": "system", "content": system_prompt},
{"role": "assistant", "content": conv["edited_text"]},
{"role": "user", "content": "Continue exactly from the assistant message above. Do not restate any of it; just append."},
],
max_tokens=60000,
temperature=0.7,
top_p=0.95,
)
generated = resp.choices[0].message["content"]
combined = conv["edited_text"] + '\n\n'+generated
conv["prev_text"] = combined
chat_history.append(
{"role": "assistant", "content": combined}
)
st.rerun()
except Exception as e:
st.error(f"Error while continuing from edit: {e}")
chat_history.append(
{"role": "assistant", "content": conv["prev_text"]}
)
conv["editable"] = False