| import os |
| import streamlit as st |
| from transformers import AutoModel, AutoTokenizer |
|
|
| st.title("HuggingFace Model Loader & Saver") |
| st.write("Load a model from HuggingFace and save it locally. Edit parameters below:") |
|
|
| |
| model_name = st.text_input("Model Name", value="openai-gpt", help="Enter the HuggingFace model name (e.g., openai-gpt)") |
| save_dir = st.text_input("Save Directory", value="./hugging", help="Local directory to save the model") |
| additional_models = st.multiselect( |
| "Additional Models", |
| options=["bert-base-uncased", "gpt2", "roberta-base"], |
| help="Select additional models to load and save" |
| ) |
|
|
| if st.button("Load and Save Model"): |
| st.write("### Processing Primary Model") |
| try: |
| st.write(f"Loading **{model_name}** ...") |
| model = AutoModel.from_pretrained(model_name) |
| tokenizer = AutoTokenizer.from_pretrained(model_name) |
| |
| model_save_path = os.path.join(save_dir, model_name.replace("/", "_")) |
| os.makedirs(model_save_path, exist_ok=True) |
| model.save_pretrained(model_save_path) |
| st.success(f"Model **{model_name}** saved to `{model_save_path}`") |
| except Exception as e: |
| st.error(f"Error loading/saving model **{model_name}**: {e}") |
|
|
| if additional_models: |
| st.write("### Processing Additional Models") |
| for m in additional_models: |
| try: |
| st.write(f"Loading **{m}** ...") |
| model = AutoModel.from_pretrained(m) |
| tokenizer = AutoTokenizer.from_pretrained(m) |
| model_save_path = os.path.join(save_dir, m.replace("/", "_")) |
| os.makedirs(model_save_path, exist_ok=True) |
| model.save_pretrained(model_save_path) |
| st.success(f"Model **{m}** saved to `{model_save_path}`") |
| except Exception as e: |
| st.error(f"Error loading/saving model **{m}**: {e}") |