fixes for merging
This commit is contained in:
@@ -98,6 +98,9 @@ class SmolVLAConfig(PreTrainedConfig):
|
||||
self_attn_every_n_layers: int = 2 # Interleave SA layers each self_attn_every_n_layers
|
||||
expert_width_multiplier: float = 0.75 # The action expert hidden size (wrt to the VLM)
|
||||
|
||||
min_period: float = 4e-3 # sensitivity range for the timestep used in sine-cosine positional encoding
|
||||
max_period: float = 4.0
|
||||
|
||||
def __post_init__(self):
|
||||
super().__post_init__()
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"""
|
||||
SmolVLA:
|
||||
|
||||
[Paper]()
|
||||
[Paper](https://huggingface.co/papers/2506.01844)
|
||||
|
||||
Designed by Hugging Face.
|
||||
|
||||
@@ -656,7 +656,7 @@ class VLAFlowMatching(nn.Module):
|
||||
dtype = action_emb.dtype
|
||||
# Embed timestep using sine-cosine positional encoding with sensitivity in the range [0, 1]
|
||||
time_emb = create_sinusoidal_pos_embedding(
|
||||
timestep, self.vlm_with_expert.expert_hidden_size, min_period=4e-3, max_period=4.0, device=device
|
||||
timestep, self.vlm_with_expert.expert_hidden_size, self.config.min_period, self.config.max_period, device=device
|
||||
)
|
||||
time_emb = time_emb.type(dtype=dtype)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user