{ "add_cross_attention": false, "architectures": [ "MyXSyntaxV2" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "dtype": "float32", "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "B-abb", "1": "B-adj", "2": "B-adv", "3": "B-conj", "4": "B-int", "5": "B-n", "6": "B-part", "7": "B-ppm", "8": "B-pron", "9": "B-sb", "10": "B-tn", "11": "B-v", "12": "E-abb", "13": "E-adj", "14": "E-adv", "15": "E-conj", "16": "E-int", "17": "E-n", "18": "E-part", "19": "E-ppm", "20": "E-pron", "21": "E-sb", "22": "E-tn", "23": "E-v", "24": "I-abb", "25": "I-adj", "26": "I-adv", "27": "I-conj", "28": "I-int", "29": "I-n", "30": "I-part", "31": "I-ppm", "32": "I-pron", "33": "I-sb", "34": "I-tn", "35": "I-v", "36": "S-adj", "37": "S-adv", "38": "S-conj", "39": "S-int", "40": "S-n", "41": "S-part", "42": "S-ppm", "43": "S-pron", "44": "S-punc", "45": "S-tn", "46": "S-v" }, "initializer_range": 0.02, "intermediate_size": 3072, "is_decoder": false, "label2id": { "B-abb": 0, "B-adj": 1, "B-adv": 2, "B-conj": 3, "B-int": 4, "B-n": 5, "B-part": 6, "B-ppm": 7, "B-pron": 8, "B-sb": 9, "B-tn": 10, "B-v": 11, "E-abb": 12, "E-adj": 13, "E-adv": 14, "E-conj": 15, "E-int": 16, "E-n": 17, "E-part": 18, "E-ppm": 19, "E-pron": 20, "E-sb": 21, "E-tn": 22, "E-v": 23, "I-abb": 24, "I-adj": 25, "I-adv": 26, "I-conj": 27, "I-int": 28, "I-n": 29, "I-part": 30, "I-ppm": 31, "I-pron": 32, "I-sb": 33, "I-tn": 34, "I-v": 35, "S-adj": 36, "S-adv": 37, "S-conj": 38, "S-int": 39, "S-n": 40, "S-part": 41, "S-ppm": 42, "S-pron": 43, "S-punc": 44, "S-tn": 45, "S-v": 46 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "xlm-roberta", "num_attention_heads": 12, "num_hidden_layers": 12, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "tie_word_embeddings": true, "transformers_version": "5.2.0", "type_vocab_size": 1, "use_cache": false, "vocab_size": 250002 }