File size: 1,330 Bytes
32c5da4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#!/usr/bin/env python
"""Download and cache tiny-sd model for offline use."""

import os
import sys
from pathlib import Path

# Set cache directory
cache_dir = Path("d:/VSC Codes/Bild/.cache/hf")
cache_dir.mkdir(parents=True, exist_ok=True)
os.environ['HF_HOME'] = str(cache_dir)

print(f"Cache directory: {cache_dir}")
print()

try:
    import torch
    from diffusers import StableDiffusionPipeline
    
    device = 'cuda' if torch.cuda.is_available() else 'cpu'
    print(f'PyTorch: {torch.__version__}')
    print(f'CUDA available: {torch.cuda.is_available()}')
    print(f'Device: {device}')
    print()
    
    model_id = 'segmind/tiny-sd'
    print(f'Downloading {model_id}...')
    print('This may take a few minutes...')
    print()
    
    pipe = StableDiffusionPipeline.from_pretrained(
        model_id,
        torch_dtype=torch.float16 if device == 'cuda' else torch.float32,
        local_files_only=False
    )
    pipe.to(device)
    
    print('✓ SUCCESS: Model cached successfully!')
    print(f'Pipeline: {type(pipe).__name__}')
    print(f'Model cache location: {cache_dir}')
    print()
    print('Backend can now run offline with local cache.')
    sys.exit(0)
    
except Exception as e:
    print(f'✗ FAILED: {type(e).__name__}: {e}')
    import traceback
    traceback.print_exc()
    sys.exit(1)