{ "model_type": "llama", "hidden_size": 3200, "num_attention_heads": 32, "num_hidden_layers": 80 }