{ "architectures": [ "DeepseekV3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "bos_token_id": 0, "dtype": "bfloat16", "eos_token_id": 1, "first_k_dense_replace": 3, "head_dim": 64, "hidden_act": "silu", "hidden_size": 8, "initializer_range": 0.02, "intermediate_size": 32, "kv_lora_rank": 512, "max_position_embeddings": 4096, "model_type": "deepseek_v3", "moe_intermediate_size": 2048, "n_group": 8, "n_routed_experts": 256, "n_shared_experts": 1, "norm_topk_prob": true, "num_attention_heads": 4, "num_experts_per_tok": 8, "num_hidden_layers": 2, "num_key_value_heads": 2, "pretraining_tp": 1, "q_lora_rank": 1536, "qk_head_dim": 192, "qk_nope_head_dim": 128, "qk_rope_head_dim": 64, "rms_norm_eps": 1e-06, "rope_interleave": true, "rope_scaling": null, "rope_theta": 10000.0, "routed_scaling_factor": 2.5, "tie_word_embeddings": false, "topk_group": 4, "transformers_version": "4.57.3", "use_cache": true, "v_head_dim": 128, "vocab_size": 128815 }