| { | |
| "mask_token": 530, | |
| "null_token": 531, | |
| "pad_token": 0, | |
| "vocab_size": 529, | |
| "embedding_dim": 1024, | |
| "transformer_activation": "relu", | |
| "transformer_heads": 4, | |
| "transformer_blocks": 10, | |
| "transformer_feedforward_dim": 1024, | |
| "continuous_head_activation": "sigmoid", | |
| "include_head": "True", | |
| "dropout_rate": 0.1 | |
| } |