| { | |
| "model_type": "gator", | |
| "architectures": ["GatorForCausalLM"], | |
| "hidden_size": 448, | |
| "num_attention_heads": 8, | |
| "num_hidden_layers": 10, | |
| "vocab_size": 50257, | |
| "max_position_embeddings": 1024, | |
| "auto_map": { | |
| "AutoConfig": "configuration_gator.GatorConfig", | |
| "AutoModel": "modeling_gator.GatorModel", | |
| "AutoModelForCausalLM": "modeling_gator.GatorForCausalLM" | |
| } | |
| } |