Spaces:
Running on Zero
Running on Zero
| #!/usr/bin/env python | |
| """Download and cache tiny-sd model for offline use.""" | |
| import os | |
| import sys | |
| from pathlib import Path | |
| # Set cache directory | |
| cache_dir = Path("d:/VSC Codes/Bild/.cache/hf") | |
| cache_dir.mkdir(parents=True, exist_ok=True) | |
| os.environ['HF_HOME'] = str(cache_dir) | |
| print(f"Cache directory: {cache_dir}") | |
| print() | |
| try: | |
| import torch | |
| from diffusers import StableDiffusionPipeline | |
| device = 'cuda' if torch.cuda.is_available() else 'cpu' | |
| print(f'PyTorch: {torch.__version__}') | |
| print(f'CUDA available: {torch.cuda.is_available()}') | |
| print(f'Device: {device}') | |
| print() | |
| model_id = 'segmind/tiny-sd' | |
| print(f'Downloading {model_id}...') | |
| print('This may take a few minutes...') | |
| print() | |
| pipe = StableDiffusionPipeline.from_pretrained( | |
| model_id, | |
| torch_dtype=torch.float16 if device == 'cuda' else torch.float32, | |
| local_files_only=False | |
| ) | |
| pipe.to(device) | |
| print('✓ SUCCESS: Model cached successfully!') | |
| print(f'Pipeline: {type(pipe).__name__}') | |
| print(f'Model cache location: {cache_dir}') | |
| print() | |
| print('Backend can now run offline with local cache.') | |
| sys.exit(0) | |
| except Exception as e: | |
| print(f'✗ FAILED: {type(e).__name__}: {e}') | |
| import traceback | |
| traceback.print_exc() | |
| sys.exit(1) | |