{ "activation": "swiglu", "architectures": [ "FinAIForCausalLM" ], "attention_dropout": 0.1, "auto_map": { "AutoConfig": "configuration_finai.FinAIConfig", "AutoModelForCausalLM": "modeling_finai.FinAIForCausalLM" }, "dropout": 0.1, "dtype": "float32", "embed_dim": 768, "ff_dim": 3072, "hidden_size": 768, "initializer_range": 0.02, "layer_norm_eps": 1e-06, "max_seq_len": 1024, "model_type": "finai", "n_heads": 12, "n_kv_heads": 6, "n_layers": 12, "num_attention_heads": 12, "num_hidden_layers": 12, "pos_encoding": "rotary", "rope_theta": 10000.0, "size_preset": "base", "tie_word_embeddings": true, "transformers_version": "5.0.0", "use_cache": true, "use_flash_attention": true, "vocab_size": 50257 }