Skip to content

Commit e23bfed

Browse files
authored
Fix RoPE theta in config (#102)
1 parent fda6cd1 commit e23bfed

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

penzai/models/transformer/variants/llamalike_common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -596,7 +596,7 @@ def llamalike_from_huggingface_model(
596596
num_decoder_blocks=hf_config.num_hidden_layers,
597597
vocab_size=hf_config.vocab_size,
598598
mlp_variant="swiglu",
599-
rope_wavelength=10_000,
599+
rope_wavelength=hf_config.rope_theta,
600600
tie_embedder_and_logits=False,
601601
attention_type=attention_type,
602602
rms_norm_eps=hf_config.rms_norm_eps,

0 commit comments

Comments
 (0)