Skip to content

Commit

Permalink
Rebased; remove unnecessary assertion block
Browse files Browse the repository at this point in the history
  • Loading branch information
Yoshi Suhara committed Aug 18, 2024
1 parent 416d72a commit 06cc89d
Showing 1 changed file with 0 additions and 6 deletions.
6 changes: 0 additions & 6 deletions src/transformers/models/llama/modeling_llama.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,12 +347,6 @@ def __init__(self, config: LlamaConfig, layer_idx: Optional[int] = None):
self.rope_theta = config.rope_theta
self.is_causal = True

if self.head_dim is None and (self.head_dim * self.num_heads) != self.hidden_size:
raise ValueError(
f"hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}"
f" and `num_heads`: {self.num_heads})."
)

self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias)
self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias)
self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias)
Expand Down

0 comments on commit 06cc89d

Please sign in to comment.