lfj-code / transfer /code /CCFM /scripts /diagnose_distributions.py
ethan1115's picture
Upload folder using huggingface_hub
0161e74 verified
"""
Diagnostic script: check expression vs latent value distributions in CCFM training.
Loads data + scGPT cache, runs a few batches, prints distribution stats.
Usage (login node is fine — no GPU needed for cached features):
cd /home/hp250092/ku50001222/qian/aivc/lfj/transfer/code/CCFM
python scripts/diagnose_distributions.py
"""
import sys
import os
_PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, _PROJECT_ROOT)
import _bootstrap_scdfm # noqa: F401
import torch
import numpy as np
from torch.utils.data import DataLoader
from src.data.data import get_data_classes
from src.data.scgpt_cache import ScGPTFeatureCache
from src._scdfm_imports import AffineProbPath, CondOTScheduler, process_vocab
from src.utils import GeneVocab
_REPO_ROOT = os.path.dirname(_PROJECT_ROOT) # transfer/code/
def describe(name, t):
"""Print distribution stats for a tensor."""
t_flat = t.float().flatten()
nonzero = t_flat[t_flat.abs() > 1e-8]
print(f" {name:30s} | shape {str(list(t.shape)):20s} | "
f"mean={t_flat.mean():.4f} std={t_flat.std():.4f} "
f"min={t_flat.min():.4f} max={t_flat.max():.4f} "
f"median={t_flat.median():.4f} "
f"zero_frac={1 - len(nonzero)/len(t_flat):.2%}")
if len(nonzero) > 0 and len(nonzero) < len(t_flat):
print(f" {' (nonzero only)':30s} | "
f"mean={nonzero.mean():.4f} std={nonzero.std():.4f} "
f"min={nonzero.min():.4f} max={nonzero.max():.4f}")
def main():
device = torch.device("cpu")
data_name = "norman"
n_top_genes = 5000
infer_top_gene = 1000
batch_size = 48
cache_path = os.path.join(_PROJECT_ROOT, "scgpt_cache_norman.h5")
# --- Load data (reuse scDFM) ---
Data, PerturbationDataset, TrainSampler, TestDataset = get_data_classes()
scdfm_data_path = os.path.join(_REPO_ROOT, "scDFM", "data")
data_manager = Data(scdfm_data_path)
data_manager.load_data(data_name)
if "gene_name" in data_manager.adata.var.columns and data_manager.adata.var_names[0].startswith("ENSG"):
data_manager.adata.var_names = data_manager.adata.var["gene_name"].values
data_manager.adata.var_names_make_unique()
data_manager.process_data(
n_top_genes=n_top_genes, split_method="additive",
fold=1, use_negative_edge=True, k=30,
)
train_sampler, _, _ = data_manager.load_flow_data(batch_size=batch_size)
train_dataset = PerturbationDataset(train_sampler, batch_size)
dataloader = DataLoader(train_dataset, batch_size=1, shuffle=False, num_workers=0)
# --- Load scGPT cache ---
scgpt_cache = ScGPTFeatureCache(cache_path, target_std=1.0)
print(f"Cache shape: {scgpt_cache.features.shape}")
print(f"Cache norm_mean: mean={scgpt_cache.norm_mean.mean():.4f}, std={scgpt_cache.norm_mean.std():.4f}")
print(f"Cache norm_var: mean={scgpt_cache.norm_var.mean():.4f}, std={scgpt_cache.norm_var.std():.4f}")
# --- Flow path ---
flow_path = AffineProbPath(scheduler=CondOTScheduler())
# --- Run a few batches ---
n_batches = 5
print(f"\n{'='*90}")
print(f"Running {n_batches} batches (batch_size={batch_size}, infer_top_gene={infer_top_gene})")
print(f"{'='*90}")
for i, batch_data in enumerate(dataloader):
if i >= n_batches:
break
source = batch_data["src_cell_data"].squeeze(0) # (B, G_full)
target = batch_data["tgt_cell_data"].squeeze(0)
tgt_cell_names = [n[0] if isinstance(n, (tuple, list)) else n for n in batch_data["tgt_cell_id"]]
# Random gene subset
input_gene_ids = torch.randperm(source.shape[-1])[:infer_top_gene]
source_sub = source[:, input_gene_ids]
target_sub = target[:, input_gene_ids]
# scGPT latent features (from cache)
z_target = scgpt_cache.lookup(tgt_cell_names, input_gene_ids, device=device)
# Noise
noise_expr = torch.randn_like(source_sub)
noise_latent = torch.randn_like(z_target)
# Sample time steps
t = torch.sigmoid(torch.randn(source_sub.shape[0])) # logit-normal
# Flow path: expression
path_expr = flow_path.sample(t=t, x_0=noise_expr, x_1=target_sub)
# Flow path: latent (flatten for AffineProbPath)
B, G, D = z_target.shape
z_target_flat = z_target.reshape(B, G * D)
noise_latent_flat = noise_latent.reshape(B, G * D)
path_latent_flat = flow_path.sample(t=t, x_0=noise_latent_flat, x_1=z_target_flat)
dx_t_expr = path_expr.dx_t
dx_t_latent = path_latent_flat.dx_t.reshape(B, G, D)
print(f"\n--- Batch {i} ---")
print(f"[Raw Values]")
describe("source_sub (control expr)", source_sub)
describe("target_sub (perturbed expr)", target_sub)
describe("z_target (scGPT latent)", z_target)
print(f"\n[Noise]")
describe("noise_expr", noise_expr)
describe("noise_latent", noise_latent)
print(f"\n[Flow Path x_t (interpolated)]")
describe("x_t_expr", path_expr.x_t)
describe("z_t_latent", path_latent_flat.x_t.reshape(B, G, D))
print(f"\n[Velocity Targets dx_t]")
describe("dx_t_expr", dx_t_expr)
describe("dx_t_latent", dx_t_latent)
print(f"\n[Velocity MSE (hypothetical)]")
mse_expr = (dx_t_expr ** 2).mean().item()
mse_latent = (dx_t_latent ** 2).mean().item()
print(f" mean(dx_t_expr^2) = {mse_expr:.4f}")
print(f" mean(dx_t_latent^2) = {mse_latent:.4f}")
print(f" ratio expr/latent = {mse_expr / max(mse_latent, 1e-8):.2f}x")
scgpt_cache.close()
print(f"\n{'='*90}")
print("Done.")
if __name__ == "__main__":
main()