| """ NER dataset compiled by T-NER library https://github.com/asahi417/tner/tree/master/tner """ |
| import json |
| from itertools import chain |
| import datasets |
|
|
| logger = datasets.logging.get_logger(__name__) |
| _DESCRIPTION = """[BioNLP2004 NER dataset](https://aclanthology.org/W04-1213.pdf)""" |
| _NAME = "bionlp2" |
| _VERSION = "1.0.0" |
| _CITATION = """ |
| @inproceedings{collier-kim-2004-introduction, |
| title = "Introduction to the Bio-entity Recognition Task at {JNLPBA}", |
| author = "Collier, Nigel and |
| Kim, Jin-Dong", |
| booktitle = "Proceedings of the International Joint Workshop on Natural Language Processing in Biomedicine and its Applications ({NLPBA}/{B}io{NLP})", |
| month = aug # " 28th and 29th", |
| year = "2004", |
| address = "Geneva, Switzerland", |
| publisher = "COLING", |
| url = "https://aclanthology.org/W04-1213", |
| pages = "73--78", |
| } |
| https://huggingface.co/datasets/chintagunta85/bionlp/raw/main/test_bionlp.json |
| """ |
|
|
| _HOME_PAGE = "https://huggingface.co/datasets/chintagunta85" |
| |
| _URL = f'https://huggingface.co/datasets/chintagunta85/{_NAME}/raw/main' |
| _URLS = { |
| str(datasets.Split.TEST): [f'{_URL}/test_bionlp.json'], |
| str(datasets.Split.TRAIN): [f'{_URL}/train_bionlp.json'], |
| str(datasets.Split.VALIDATION): [f'{_URL}/valid_bionlp.json'], |
| } |
|
|
|
|
|
|
| def map_ner_tags(tlist): |
| nlist=[] |
| for indx in tlist: |
| |
| |
| nlist.append(custom_names.index(inv_map[indx])) |
| return nlist |
|
|
| class BioNLP2004Config(datasets.BuilderConfig): |
| """BuilderConfig""" |
|
|
| def __init__(self, **kwargs): |
| """BuilderConfig. |
| |
| Args: |
| **kwargs: keyword arguments forwarded to super. |
| """ |
| super(BioNLP2004Config, self).__init__(**kwargs) |
|
|
|
|
| class BioNLP2004(datasets.GeneratorBasedBuilder): |
| """Dataset.""" |
|
|
| BUILDER_CONFIGS = [ |
| BioNLP2004Config(name=_NAME, version=datasets.Version(_VERSION), description=_DESCRIPTION), |
| ] |
|
|
|
|
|
|
| def _split_generators(self, dl_manager): |
| downloaded_file = dl_manager.download_and_extract(_URLS) |
| return [datasets.SplitGenerator(name=i, gen_kwargs={"filepaths": downloaded_file[str(i)]}) |
| for i in [datasets.Split.TRAIN, datasets.Split.VALIDATION, datasets.Split.TEST]] |
|
|
| def _generate_examples(self, filepaths): |
| custom_names = ['O','B-GENE','I-GENE','B-CHEMICAL','I-CHEMICAL','B-DISEASE','I-DISEASE', |
| 'B-DNA', 'I-DNA', 'B-RNA', 'I-RNA', 'B-CELL_LINE', 'I-CELL_LINE', 'B-CELL_TYPE', 'I-CELL_TYPE', |
| 'B-PROTEIN', 'I-PROTEIN', 'B-SPECIES', 'I-SPECIES'] |
|
|
| pre_def = {"O": 0, "B-DNA": 1, "I-DNA": 2, "B-PROTEIN": 3, "I-PROTEIN": 4, |
| "B-CELL_TYPE": 5, "I-CELL_TYPE": 6, "B-CELL_LINE": 7, "I-CELL_LINE": 8, |
| "B-RNA": 9, "I-RNA": 10} |
| inv_map = {0: 'O', 1: 'B-DNA', 2: 'I-DNA', 3: 'B-PROTEIN', 4: 'I-PROTEIN', |
| 5: 'B-CELL_TYPE', 6: 'I-CELL_TYPE', 7: 'B-CELL_LINE', 8: 'I-CELL_LINE', 9: 'B-RNA', 10: 'I-RNA'} |
| |
| _key = 0 |
| for filepath in filepaths: |
| logger.info(f"generating examples from = {filepath}") |
| with open(filepath, encoding="utf-8") as f: |
| _list = [i for i in f.read().split('\n') if len(i) > 0] |
| for i in _list: |
| data = json.loads(i) |
| |
| |
| nlist = [] |
| for indx in data['ner_tags']: |
| nlist.append(custom_names.index(inv_map[indx])) |
| |
| data['ner_tags']=nlist |
| |
| xstr = str(_key) |
| yield xstr,{"id":xstr,"tokens":data['tokens'], "ner_tags":data['ner_tags']} |
| |
| _key += 1 |
|
|
| def _info(self): |
| custom_names = ['O','B-GENE','I-GENE','B-CHEMICAL','I-CHEMICAL','B-DISEASE','I-DISEASE', |
| 'B-DNA', 'I-DNA', 'B-RNA', 'I-RNA', 'B-CELL_LINE', 'I-CELL_LINE', 'B-CELL_TYPE', 'I-CELL_TYPE', |
| 'B-PROTEIN', 'I-PROTEIN', 'B-SPECIES', 'I-SPECIES'] |
| return datasets.DatasetInfo( |
| description=_DESCRIPTION, |
| features=datasets.Features( |
| { |
| "id": datasets.Value("string"), |
| "tokens": datasets.Sequence(datasets.Value("string")), |
| "ner_tags": datasets.Sequence( |
| datasets.features.ClassLabel( |
| names=custom_names |
| ) |
| ), |
| } |
| ), |
| supervised_keys=None, |
| homepage=_HOME_PAGE, |
| citation=_CITATION, |
| ) |
|
|