Skip to content

Commit b017eb4

Browse files
committed
format code
1 parent 69d4065 commit b017eb4

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

lmdeploy/pytorch/kernels/ascend/apply_rotary_pos_emb.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ def apply_rotary_pos_emb(
2626
cos = cos[position_ids_1d].view(1, bs, 1, -1)
2727
sin = sin[position_ids_1d].view(1, bs, 1, -1)
2828
else:
29-
raise RuntimeError("Cannot handle cos/sin shape dims!")
29+
raise RuntimeError('Cannot handle cos/sin shape dims!')
3030

3131
if context:
3232
setattr(context, 'cos', cos)

lmdeploy/pytorch/models/qwen2_moe.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -106,10 +106,9 @@ def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
106106
routing_weights /= routing_weights.sum(dim=-1, keepdim=True)
107107
routing_weights = routing_weights.to(hidden_states.dtype)
108108

109-
out_states = torch.zeros(
110-
(batch_size * sequence_length, hidden_dim),
111-
dtype=hidden_states.dtype,
112-
device=hidden_states.device)
109+
out_states = torch.zeros((batch_size * sequence_length, hidden_dim),
110+
dtype=hidden_states.dtype,
111+
device=hidden_states.device)
113112

114113
expert_mask = torch.nn.functional.one_hot(
115114
selected_experts, num_classes=self.num_experts).permute(2, 1, 0)

0 commit comments

Comments
 (0)