| { |
| "module": "keras_hub.src.models.gemma.gemma_causal_lm_preprocessor", |
| "class_name": "GemmaCausalLMPreprocessor", |
| "config": { |
| "name": "gemma_causal_lm_preprocessor", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "float32" |
| }, |
| "registered_name": null |
| }, |
| "tokenizer": { |
| "module": "keras_hub.src.models.gemma.gemma_tokenizer", |
| "class_name": "GemmaTokenizer", |
| "config": { |
| "name": "gemma_tokenizer", |
| "trainable": true, |
| "dtype": { |
| "module": "keras", |
| "class_name": "DTypePolicy", |
| "config": { |
| "name": "int32" |
| }, |
| "registered_name": null |
| }, |
| "config_file": "tokenizer.json", |
| "proto": null, |
| "sequence_length": null, |
| "add_bos": false, |
| "add_eos": false |
| }, |
| "registered_name": "keras_hub>GemmaTokenizer" |
| }, |
| "config_file": "preprocessor.json", |
| "sequence_length": 64, |
| "add_start_token": true, |
| "add_end_token": true |
| }, |
| "registered_name": "keras_hub>GemmaCausalLMPreprocessor" |
| } |