Spaces:
Sleeping
Sleeping
Commit
·
40952ad
1
Parent(s):
4c7cebf
Make Blocks init backward compatible with any Gradio version
Browse files- app.py +49 -17
- requirements.txt +5 -6
app.py
CHANGED
|
@@ -81,7 +81,7 @@ def get_available_files() -> List[str]:
|
|
| 81 |
return sorted(files)
|
| 82 |
|
| 83 |
|
| 84 |
-
def chat_with_docs(message: str, history: List
|
| 85 |
"""Process user message and generate AI response."""
|
| 86 |
if not ANTHROPIC_API_KEY:
|
| 87 |
return "⚠️ Please set your ANTHROPIC_API_KEY in the .env file."
|
|
@@ -89,11 +89,20 @@ def chat_with_docs(message: str, history: List[dict], system_prompt: str = None)
|
|
| 89 |
# Load documentation context
|
| 90 |
docs_context = load_documentation()
|
| 91 |
|
| 92 |
-
# Build conversation history
|
| 93 |
messages = []
|
| 94 |
-
for
|
| 95 |
-
if
|
| 96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
|
| 98 |
messages.append({"role": "user", "content": message})
|
| 99 |
|
|
@@ -527,11 +536,19 @@ label, .label {
|
|
| 527 |
}
|
| 528 |
"""
|
| 529 |
|
| 530 |
-
# Create the Gradio interface with dark theme
|
| 531 |
-
|
| 532 |
-
css
|
| 533 |
-
|
| 534 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 535 |
|
| 536 |
# Header with modern dark gradient
|
| 537 |
gr.HTML("""
|
|
@@ -551,11 +568,18 @@ with gr.Blocks(
|
|
| 551 |
</div>
|
| 552 |
""")
|
| 553 |
|
| 554 |
-
|
| 555 |
-
|
| 556 |
-
|
| 557 |
-
|
| 558 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 559 |
|
| 560 |
with gr.Row():
|
| 561 |
msg = gr.Textbox(
|
|
@@ -703,8 +727,16 @@ with gr.Blocks(
|
|
| 703 |
return "", chat_history
|
| 704 |
|
| 705 |
response = chat_with_docs(message, chat_history, system_prompt if system_prompt.strip() else None)
|
| 706 |
-
|
| 707 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 708 |
return "", chat_history
|
| 709 |
|
| 710 |
msg.submit(respond, [msg, chatbot, custom_system_prompt], [msg, chatbot])
|
|
|
|
| 81 |
return sorted(files)
|
| 82 |
|
| 83 |
|
| 84 |
+
def chat_with_docs(message: str, history: List, system_prompt: str = None) -> str:
|
| 85 |
"""Process user message and generate AI response."""
|
| 86 |
if not ANTHROPIC_API_KEY:
|
| 87 |
return "⚠️ Please set your ANTHROPIC_API_KEY in the .env file."
|
|
|
|
| 89 |
# Load documentation context
|
| 90 |
docs_context = load_documentation()
|
| 91 |
|
| 92 |
+
# Build conversation history - support both tuple and dict formats
|
| 93 |
messages = []
|
| 94 |
+
for item in history:
|
| 95 |
+
if isinstance(item, dict):
|
| 96 |
+
# Messages format (Gradio 6.x)
|
| 97 |
+
if item.get("role") in ["user", "assistant"]:
|
| 98 |
+
messages.append({"role": item["role"], "content": item["content"]})
|
| 99 |
+
elif isinstance(item, (list, tuple)) and len(item) == 2:
|
| 100 |
+
# Tuple format (Gradio 4.x)
|
| 101 |
+
user_msg, assistant_msg = item
|
| 102 |
+
if user_msg:
|
| 103 |
+
messages.append({"role": "user", "content": user_msg})
|
| 104 |
+
if assistant_msg:
|
| 105 |
+
messages.append({"role": "assistant", "content": assistant_msg})
|
| 106 |
|
| 107 |
messages.append({"role": "user", "content": message})
|
| 108 |
|
|
|
|
| 536 |
}
|
| 537 |
"""
|
| 538 |
|
| 539 |
+
# Create the Gradio interface with dark theme - backward compatible
|
| 540 |
+
try:
|
| 541 |
+
# Try Gradio 4.x+ with css parameter
|
| 542 |
+
demo = gr.Blocks(
|
| 543 |
+
css=custom_css,
|
| 544 |
+
title="Docs Navigator MCP - AI Documentation Assistant"
|
| 545 |
+
)
|
| 546 |
+
except TypeError:
|
| 547 |
+
# Fallback for older Gradio versions
|
| 548 |
+
demo = gr.Blocks(title="Docs Navigator MCP - AI Documentation Assistant")
|
| 549 |
+
# CSS will be injected via HTML if needed
|
| 550 |
+
|
| 551 |
+
with demo:
|
| 552 |
|
| 553 |
# Header with modern dark gradient
|
| 554 |
gr.HTML("""
|
|
|
|
| 568 |
</div>
|
| 569 |
""")
|
| 570 |
|
| 571 |
+
try:
|
| 572 |
+
chatbot = gr.Chatbot(
|
| 573 |
+
height=550,
|
| 574 |
+
show_label=False,
|
| 575 |
+
type="messages"
|
| 576 |
+
)
|
| 577 |
+
except TypeError:
|
| 578 |
+
# Fallback for older Gradio versions
|
| 579 |
+
chatbot = gr.Chatbot(
|
| 580 |
+
height=550,
|
| 581 |
+
show_label=False
|
| 582 |
+
)
|
| 583 |
|
| 584 |
with gr.Row():
|
| 585 |
msg = gr.Textbox(
|
|
|
|
| 727 |
return "", chat_history
|
| 728 |
|
| 729 |
response = chat_with_docs(message, chat_history, system_prompt if system_prompt.strip() else None)
|
| 730 |
+
|
| 731 |
+
# Detect format and append accordingly
|
| 732 |
+
if chat_history and isinstance(chat_history[0], dict):
|
| 733 |
+
# Messages format
|
| 734 |
+
chat_history.append({"role": "user", "content": message})
|
| 735 |
+
chat_history.append({"role": "assistant", "content": response})
|
| 736 |
+
else:
|
| 737 |
+
# Tuple format
|
| 738 |
+
chat_history.append((message, response))
|
| 739 |
+
|
| 740 |
return "", chat_history
|
| 741 |
|
| 742 |
msg.submit(respond, [msg, chatbot, custom_system_prompt], [msg, chatbot])
|
requirements.txt
CHANGED
|
@@ -1,6 +1,5 @@
|
|
| 1 |
-
anthropic
|
| 2 |
-
python-dotenv
|
| 3 |
-
PyPDF2
|
| 4 |
-
mcp[cli]
|
| 5 |
-
gradio
|
| 6 |
-
huggingface-hub>=0.26.0
|
|
|
|
| 1 |
+
anthropic
|
| 2 |
+
python-dotenv
|
| 3 |
+
PyPDF2
|
| 4 |
+
mcp[cli]
|
| 5 |
+
gradio
|
|
|