Spaces:
Running
Running
File size: 8,850 Bytes
78a3087 fec7628 78a3087 ae55733 78a3087 ae55733 78a3087 ae55733 78a3087 fec7628 ae55733 fec7628 ae55733 fec7628 ae55733 fec7628 ae55733 fec7628 ae55733 fec7628 78a3087 fec7628 6766c9d 78a3087 fec7628 78a3087 fec7628 78a3087 fec7628 78a3087 fec7628 78a3087 fec7628 78a3087 fec7628 6766c9d fec7628 6766c9d fec7628 78a3087 fec7628 78a3087 6766c9d fec7628 6766c9d fec7628 6766c9d 78a3087 6766c9d 78a3087 fec7628 6766c9d fec7628 6766c9d fec7628 78a3087 6766c9d fec7628 6766c9d 78a3087 fec7628 78a3087 fec7628 6766c9d 78a3087 fec7628 78a3087 fec7628 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 |
# inplace_chat.py
import os
import json
import difflib
import streamlit as st
from huggingface_hub import InferenceClient
# === Page header ===
st.header("In-place feedback chatbot demo")
# === Theme primary color (from .streamlit/config.toml) ===
PRIMARY = "#cb785c" # keep in sync with [theme] primaryColor
# HF Inference client
hf_token = os.getenv("HF_TOKEN")
client = InferenceClient(model="openai/gpt-oss-20b", token=hf_token)
####### initialize session states ######
# These are managed globally in streamlit_app.py, but we ensure they exist
if "current_chat_id" not in st.session_state:
st.session_state.current_chat_id = None
if "conversations" not in st.session_state:
st.session_state.conversations = {}
# Get or create current conversation
chat_id = st.session_state.current_chat_id
if chat_id is None or chat_id not in st.session_state.conversations:
# This shouldn't happen if streamlit_app.py is the entry point, but just in case
import uuid
chat_id = str(uuid.uuid4())
st.session_state.conversations[chat_id] = {
"title": "New chat",
"messages": [],
"editable": False,
"prev_text": "",
"edited_text": "",
"original_user_prompt": "",
"edit_history": []
}
st.session_state.current_chat_id = chat_id
conv = st.session_state.conversations[chat_id]
# Ensure all required keys exist in the current conversation
conv.setdefault("messages", [])
conv.setdefault("editable", False)
conv.setdefault("prev_text", "")
conv.setdefault("edited_text", "")
conv.setdefault("original_user_prompt", "")
conv.setdefault("title", "New chat")
conv.setdefault("edit_history", []) # List of {removed, added, timestamp}
# Shorthand references
chat_history = conv["messages"]
# === Utility: compact "removed vs added" summary for the inplace_prefix ===
### later separate this into parse_diff.py
def summarize_edit(old: str, new: str) -> tuple[str, str]:
"""
Produce coarse 'removed_text' and 'edited_text' by joining all deletions and insertions.
Good enough for a single bullet like: - Replaced {removed_text} with {edited_text}
"""
old_words = old.split()
new_words = new.split()
sm = difflib.SequenceMatcher(a=old_words, b=new_words)
removed_chunks, added_chunks = [], []
for tag, i1, i2, j1, j2 in sm.get_opcodes():
if tag in ("delete", "replace"):
chunk = " ".join(old_words[i1:i2]).strip()
if chunk:
removed_chunks.append(chunk)
if tag in ("insert", "replace"):
chunk = " ".join(new_words[j1:j2]).strip()
if chunk:
added_chunks.append(chunk)
removed_text = " / ".join(removed_chunks) if removed_chunks else "(none)"
edited_text = " / ".join(added_chunks) if added_chunks else "(none)"
return removed_text, edited_text
def get_detailed_diff(old: str, new: str) -> list[dict]:
"""
Returns a list of diff chunks with tags: 'equal', 'delete', 'insert', 'replace'
Each chunk has: {'tag': str, 'text': str}
"""
old_words = old.split()
new_words = new.split()
sm = difflib.SequenceMatcher(a=old_words, b=new_words)
diff_chunks = []
for tag, i1, i2, j1, j2 in sm.get_opcodes():
if tag == 'equal':
diff_chunks.append({'tag': 'equal', 'text': ' '.join(old_words[i1:i2])})
elif tag == 'delete':
diff_chunks.append({'tag': 'delete', 'text': ' '.join(old_words[i1:i2])})
elif tag == 'insert':
diff_chunks.append({'tag': 'insert', 'text': ' '.join(new_words[j1:j2])})
elif tag == 'replace':
diff_chunks.append({'tag': 'delete', 'text': ' '.join(old_words[i1:i2])})
diff_chunks.append({'tag': 'insert', 'text': ' '.join(new_words[j1:j2])})
return diff_chunks
# === Render current conversation ===
editable = conv["editable"]
for i, msg in enumerate(chat_history):
if editable and i == len(chat_history) - 1 and msg["role"] == "assistant":
continue
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
if prompt := st.chat_input("Send a message"):
conv["original_user_prompt"] = prompt
chat_history.append({"role": "user", "content": prompt})
# Update conversation title with first user message
if conv["title"] == "New chat" and len(chat_history) == 1:
conv["title"] = prompt[:50] # Use first 50 chars of first message
with st.chat_message("user"):
st.markdown(prompt)
messages_for_api = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": prompt},
]
with st.chat_message("assistant"):
with st.spinner("Thinking…"):
try:
response = client.chat_completion(
messages=messages_for_api,
max_tokens=60000,
temperature=0.7,
top_p=0.95,
stream=False,
)
reply = response.choices[0].message["content"]
st.markdown(reply)
chat_history.append(
{"role": "assistant", "content": reply}
)
conv["prev_text"] = reply
except Exception as e:
st.error(f"Error: {e}")
# Edit features
if chat_history and chat_history[-1]["role"] == "assistant":
# button
with st.chat_message("assistant"):
col_spacer, col_edit = st.columns([0.93, 0.07])
with col_edit:
if not conv["editable"]:
if st.button("✏️", key="edit_btn", help="Edit response"):
conv["editable"] = True
st.rerun()
######## Edit mode #########
if conv["editable"]:
st.markdown('<div class="editable-on">', unsafe_allow_html=True)
with st.chat_message("assistant"):
st.caption("Editing the last response…")
with st.form("edit_form", clear_on_submit=False):
conv["edited_text"] = st.text_area(
" ",
value=conv["prev_text"],
height=500,
label_visibility="collapsed",
key="edit_textarea",
)
finished_edit = st.form_submit_button(
"Finish edit",
icon=":material/edit:",
use_container_width=True
)
st.markdown("</div>", unsafe_allow_html=True)
# === Handle edit submission (backend: in-place continuation) ===
if conv["editable"] and finished_edit:
if chat_history and chat_history[-1]["role"] == "assistant":
chat_history.pop()
removed_text, added_text = summarize_edit(conv["prev_text"], conv["edited_text"])
# Save to edit history
import datetime
diff_chunks = get_detailed_diff(conv["prev_text"], conv["edited_text"])
conv["edit_history"].append({
"removed": removed_text,
"added": added_text,
"timestamp": datetime.datetime.now().strftime("%H:%M:%S"),
"original": conv["prev_text"],
"edited": conv["edited_text"],
"diff_chunks": diff_chunks
})
system_prompt = "The user edited your previous answer. Only continue from the assistant message above. Do NOT rewrite or repeat it."
# Exit edit mode before generation
conv["editable"] = False
with st.chat_message("assistant"):
with st.spinner("Continuing from your edit…"):
try:
resp = client.chat_completion(
messages=[
{"role": "system", "content": system_prompt},
{"role": "assistant", "content": conv["edited_text"]},
{"role": "user", "content": "Continue exactly from the assistant message above. Do not restate any of it; just append."},
],
max_tokens=60000,
temperature=0.7,
top_p=0.95,
)
generated = resp.choices[0].message["content"]
combined = conv["edited_text"] + '\n\n'+generated
conv["prev_text"] = combined
chat_history.append(
{"role": "assistant", "content": combined}
)
st.rerun()
except Exception as e:
st.error(f"Error while continuing from edit: {e}")
chat_history.append(
{"role": "assistant", "content": conv["prev_text"]}
)
conv["editable"] = False
|