Skip to content

Commit

Permalink
annotate input_
Browse files Browse the repository at this point in the history
Signed-off-by: lucast2021 <[email protected]>
  • Loading branch information
lucast2021 committed Jan 5, 2025
1 parent 0c6272d commit 58fbdfb
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions vllm/lora/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,8 +406,8 @@ def __init__(self, base_layer: ReplicatedLinear) -> None:
self.n_slices = 1

def forward(
self,
input_) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]:
self, input_: torch.Tensor
) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]:
"""Forward of ReplicatedLinearWithLoRA
Args:
Expand Down Expand Up @@ -499,8 +499,8 @@ def slice_bias(self, bias: torch.Tensor) -> torch.Tensor:
return bias

def forward(
self,
input_) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]:
self, input_: torch.Tensor
) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]:
"""Forward of ColumnParallelLinear
Args:
Expand Down Expand Up @@ -838,8 +838,8 @@ def slice_bias(self, bias: torch.Tensor) -> torch.Tensor:
return bias

def forward(
self,
input_) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]:
self, input_: torch.Tensor
) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]:
"""Forward of RowParallelLinear
Args:
Expand Down

0 comments on commit 58fbdfb

Please sign in to comment.