File size: 4,409 Bytes
de6a540
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
import csv
import random
import zipfile
import requests
from pathlib import Path
from collections import Counter

import torch
from torch.utils.data import DataLoader, Dataset
from torchvision import transforms, models, datasets
from PIL import Image


# ----------------------------
# CONFIG
# ----------------------------
ZIP_FILE = "Dataset.zip"     # Path to dataset zip
DATASET_DIR = Path("dataset")  # Unzipped folder
SUBMISSION_FILE = "submission.csv"
LABELS = ["RAR", "Taming", "VAR", "SD", "outlier"] # Donot change this

# Leaderboard submission
SERVER_URL = "http://34.122.51.94:80"
API_KEY = None  # teams insert their assigned token here
TASK_ID = "05-iar-attribution"

# ----------------------------
# UNZIP DATASET
# ----------------------------
if not DATASET_DIR.exists():
    print("Unzipping dataset...")
    with zipfile.ZipFile(ZIP_FILE, "r") as zip_ref:
        zip_ref.extractall(DATASET_DIR)
else:
    print("Dataset already extracted.")


# ----------------------------
# TRANSFORMS
# ----------------------------
transform = transforms.Compose([
    transforms.Resize((224, 224)),
    transforms.ToTensor(),
])


# ----------------------------s
# DATASETS & DATALOADERS
# ----------------------------
print("Loading datasets...")

train_dataset = datasets.ImageFolder(root=DATASET_DIR / "train", transform=transform)
val_dataset   = datasets.ImageFolder(root=DATASET_DIR / "val", transform=transform)

# Custom dataset for unlabeled test images
class TestDataset(Dataset):
    def __init__(self, root, transform=None):
        self.root = Path(root)
        self.files = sorted(list(self.root.glob("*.*")))  # all files
        self.transform = transform

    def __len__(self):
        return len(self.files)

    def __getitem__(self, idx):
        img_path = self.files[idx]
        image = Image.open(img_path).convert("RGB")
        if self.transform:
            image = self.transform(image)
        return {"image": image, "image_name": img_path.name}

test_dataset = TestDataset(DATASET_DIR / "test", transform=transform)

train_loader = DataLoader(train_dataset, batch_size=32, shuffle=True, num_workers=4)
val_loader   = DataLoader(val_dataset, batch_size=32, shuffle=False, num_workers=4)
test_loader  = DataLoader(test_dataset, batch_size=32, shuffle=False, num_workers=4)

# Print classes and per-class counts for train/val
def _print_class_stats(name: str, ds):
    counts = Counter(getattr(ds, "targets", []))
    print(f"{name} classes: {ds.classes}")
    for cls, idx in ds.class_to_idx.items():
        print(f"  {cls}: {counts.get(idx, 0)}")

_print_class_stats("Train", train_dataset)
_print_class_stats("Val", val_dataset)

print(f"Train size: {len(train_dataset)} | Val size: {len(val_dataset)} | Test size: {len(test_dataset)}")


# ----------------------------
# EXAMPLE MODEL (ResNet18)
# ----------------------------
print("Building dummy model...")
model = models.resnet18(weights=None, num_classes=len(LABELS))  # untrained
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = model.to(device)


# ----------------------------
# DUMMY INFERENCE ON TEST / DUMMY SUBMISSION
# ----------------------------
print("Generating random predictions for submission...")
preds = []
for batch in test_loader:
    for fname in batch["image_name"]:
        label = random.choice(LABELS)  # random baseline
        preds.append([fname, label])

# ----------------------------
# SAVE SUBMISSION
# ----------------------------
with open(SUBMISSION_FILE, "w", newline="", encoding="utf-8") as f:
    writer = csv.writer(f)
    writer.writerow(["image_name", "label"])
    writer.writerows(preds)

print(f"Saved submission file to {SUBMISSION_FILE}")
print("   Format: image_name,label | Allowed labels: RAR, Taming, VAR, SD, outlier")


# ----------------------------
# SUBMIT TO LEADERBOARD SERVER
# ----------------------------
if API_KEY is None:
    print("No TOKEN provided. Please set your team TOKEN in this script to submit.")
else:
    print("Submitting to leaderboard server...")

    response = requests.post(
        f"{SERVER_URL}/submit/{TASK_ID}",
        files={"file": open(SUBMISSION_FILE, "rb")},
        headers={"X-API-Key": API_KEY},
    )
    print("Server response:", response.json())