Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| import os | |
| # Use Inference API - no need to load model locally | |
| MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3" | |
| client = InferenceClient(token=os.environ.get("HF_TOKEN")) | |
| print(f"Using model: {MODEL_NAME} via Inference API") | |
| def chat_with_ai(message, history): | |
| """Chat with the AI model via Inference API.""" | |
| if not message.strip(): | |
| return history | |
| # Convert history to messages format | |
| messages = [] | |
| for user_msg, assistant_msg in history: | |
| if user_msg: | |
| messages.append({"role": "user", "content": user_msg}) | |
| if assistant_msg: | |
| messages.append({"role": "assistant", "content": assistant_msg}) | |
| messages.append({"role": "user", "content": message}) | |
| try: | |
| # Call the Inference API | |
| response = client.chat.completions.create( | |
| model=MODEL_NAME, | |
| messages=messages, | |
| max_tokens=512, | |
| temperature=0.7, | |
| top_p=0.9 | |
| ) | |
| assistant_response = response.choices[0].message.content | |
| except Exception as e: | |
| error_msg = str(e) | |
| if "loading" in error_msg.lower() or "unavailable" in error_msg.lower(): | |
| assistant_response = "⏳ The model is loading, please try again in a minute..." | |
| else: | |
| assistant_response = f"❌ Error: {error_msg}" | |
| history.append((message, assistant_response)) | |
| return history | |
| # Create the Gradio interface | |
| with gr.Blocks( | |
| title="AI Chat", | |
| theme=gr.themes.Soft( | |
| primary_hue="purple", | |
| secondary_hue="blue", | |
| ) | |
| ) as demo: | |
| gr.Markdown("# 🤖 AI Chat Assistant") | |
| gr.Markdown("Powered by **Mistral-7B-Instruct**") | |
| chatbot = gr.Chatbot( | |
| label="Chat", | |
| height=500, | |
| show_copy_button=True, | |
| bubble_full_width=False | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| label="Message", | |
| placeholder="Type your message here...", | |
| scale=9, | |
| container=False | |
| ) | |
| submit_btn = gr.Button("Send", scale=1, variant="primary") | |
| with gr.Row(): | |
| clear_btn = gr.Button("Clear Chat", variant="secondary") | |
| gr.Markdown("---") | |
| gr.Markdown("*Space made by: you can already see it*") | |
| # Event handlers | |
| msg.submit(chat_with_ai, [msg, chatbot], [chatbot]).then( | |
| lambda: "", None, [msg] | |
| ) | |
| submit_btn.click(chat_with_ai, [msg, chatbot], [chatbot]).then( | |
| lambda: "", None, [msg] | |
| ) | |
| clear_btn.click(lambda: [], None, [chatbot]) | |
| if __name__ == "__main__": | |
| demo.launch() | |