OpenRouter-UI / app.py
Alibrown's picture
Create app.py
f87cc9b verified
import os
import streamlit as st
import tempfile
import requests
import json
# ----------------------------------------------------
# ๐Ÿšจ CRITICAL FIXES FOR PERMISSION ERRORS
# Forces Streamlit to write temporary/config files
# to the writable /tmp directory.
# ----------------------------------------------------
# 1. Create a temporary, writable path
TEMP_STREAMLIT_HOME = os.path.join(tempfile.gettempdir(), "st_config_workaround")
os.makedirs(TEMP_STREAMLIT_HOME, exist_ok=True)
# 2. Set environment variables
os.environ["STREAMLIT_HOME"] = TEMP_STREAMLIT_HOME
os.environ["STREAMLIT_GATHER_USAGE_STATS"] = "false"
# 3. Create minimal config.toml to prevent write attempts elsewhere
CONFIG_PATH = os.path.join(TEMP_STREAMLIT_HOME, "config.toml")
CONFIG_CONTENT = """
[browser]
gatherUsageStats = false
"""
if not os.path.exists(CONFIG_PATH):
try:
with open(CONFIG_PATH, "w") as f:
f.write(CONFIG_CONTENT)
except Exception as e:
print(f"WARNING: Could not write config.toml: {e}")
# ----------------------------------------------------
# End of Workarounds
# ----------------------------------------------------
# --- Configuration ---
st.set_page_config(page_title="OpenRouter Minimal Chat UI", layout="wide")
OPENROUTER_API_BASE = "https://openrouter.ai/api/v1"
# --- Page Title ---
st.title("๐Ÿ’ธ OpenRouter Minimal Chat Interface")
st.markdown("""
**Welcome to the OpenRouter Minimal Chat Interface!**
Chat with **Free-Tier** models via the OpenRouter API. Text-only chat.
""")
# --- Session State Management ---
if "messages" not in st.session_state:
st.session_state.messages = []
# --- Context-Length Fetch ---
def fetch_model_contexts(api_key):
"""Fetches all models and their respective context_length."""
if not api_key:
return {}
headers = {"Authorization": f"Bearer {api_key}"}
try:
res = requests.get(f"{OPENROUTER_API_BASE}/models", headers=headers, timeout=10)
contexts = {}
if res.status_code == 200:
for m in res.json().get("data", []):
mid = m.get("id")
ctx = m.get("context_length", 4096)
contexts[mid] = ctx
return contexts
except Exception as e:
return {}
# --- Sidebar ---
with st.sidebar:
st.header("โš™๏ธ API Settings")
api_key = st.text_input("OpenRouter API Key", type="password")
# --- Manual Model Selection ---
FREE_MODEL_LIST = [
"cognitivecomputations/dolphin-mistral-24b-venice-edition:free",
"deepseek/deepseek-chat-v3.1:free",
"nvidia/nemotron-nano-9b-v2:free",
"google/gemma-3-27b-it:free",
"openai/gpt-oss-20b:free",
"qwen/qwen3-coder:free",
"qwen/qwen2.5-vl-72b-instruct:free",
"nousresearch/deephermes-3-llama-3-8b-preview:free",
]
model = st.selectbox("Select a Model", FREE_MODEL_LIST, index=0)
# Get context length with fallback
model_contexts = fetch_model_contexts(api_key)
default_ctx = model_contexts.get(model, 4096)
temperature = st.slider("Temperature", 0.0, 1.0, 0.7)
max_tokens = st.slider(
f"Max Output Tokens (max {default_ctx})",
1,
min(default_ctx, 128000),
min(512, default_ctx)
)
st.caption(f"๐Ÿ”ข Model Context Length (Fallback 4096): {default_ctx}")
if st.button("๐Ÿ”„ Reset Chat"):
st.session_state.messages = []
st.success("Chat history cleared.")
st.experimental_rerun()
st.markdown("""
---
**Minimal UI:** Text chat only.
""")
# --- Display Chat History ---
for msg in st.session_state.messages:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
# --- API Request Function ---
def call_openrouter(model, messages, temp, max_tok, key):
headers = {
"Authorization": f"Bearer {key}",
"Content-Type": "application/json",
"Referer": "https://aicodecraft.io",
"X-Title": "OpenRouter-Minimal-Interface",
}
payload = {
"model": model,
"messages": messages,
"temperature": temp,
"max_tokens": max_tok,
}
res = requests.post(f"{OPENROUTER_API_BASE}/chat/completions", headers=headers, data=json.dumps(payload))
if res.status_code == 200:
try:
return res.json()["choices"][0]["message"]["content"]
except (KeyError, IndexError):
raise Exception("Invalid API response: Could not extract response text.")
else:
try:
err = res.json()
msg = err.get("error", {}).get("message", res.text)
except:
msg = res.text
raise Exception(f"API Error {res.status_code}: {msg}")
# --- Chat Input ---
if prompt := st.chat_input("Your message..."):
if not api_key:
st.warning("Please enter your OpenRouter API Key in the sidebar.")
st.stop()
# Add and display user message
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
# Prepare API message history
messages = [{"role": m["role"], "content": m["content"]} for m in st.session_state.messages]
# Generate response
with st.chat_message("assistant"):
with st.spinner(f"Querying {model}..."):
try:
reply = call_openrouter(model, messages, temperature, max_tokens, api_key)
st.markdown(reply)
st.session_state.messages.append({"role": "assistant", "content": reply})
except Exception as e:
st.error(str(e))
st.session_state.messages.append({"role": "assistant", "content": f"โŒ {str(e)}"})