{ "activation": "gelu", "affine": false, "attn_dropout": 0.0, "c_in": 1, "c_out": null, "classification": true, "d_ff": 2048, "d_model": 512, "decomposition": false, "dropout": 0.05, "individual": false, "kernel_size": 25, "n_heads": 8, "n_layers": 2, "norm": "BatchNorm", "padding_patch": true, "patch_len": 16, "pre_norm": false, "pred_dim": 2, "res_attention": true, "revin": true, "seq_len": 82, "store_attn": false, "stride": 8, "subtract_last": false }