Skip to content

Commit 832d253

Browse files
authored
[Code Simplification] fix init_distributed_environment() (#2982)
1 parent bfeb664 commit 832d253

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

fastdeploy/model_executor/layers/linear.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -500,7 +500,7 @@ def load_state_dict(self, state_dict: dict):
500500
k_bias = get_tensor(state_dict.pop(k_bias_key))
501501
v_bias = get_tensor(state_dict.pop(v_bias_key))
502502
qkv_bias = paddle.concat([q_bias, k_bias, v_bias], axis=-1)
503-
self.bias.set_value(qkv_bias)
503+
self.bias.set_value(qkv_bias)
504504

505505

506506
class RowParallelLinear(LinearBase):

fastdeploy/worker/worker_process.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
import argparse
1818
import json
1919
import time
20-
from typing import List
20+
from typing import Tuple
2121

2222
import numpy as np
2323
import paddle
@@ -74,7 +74,7 @@ def get_worker(fd_config: FDConfig, local_rank: int, rank: int) -> WorkerBase:
7474
return GcuWorker(fd_config=fd_config, local_rank=local_rank, rank=rank)
7575

7676

77-
def init_distributed_environment(seed: int = 20) -> List[int]:
77+
def init_distributed_environment(seed: int = 20) -> Tuple[int, int]:
7878
"""Initialize Paddle Fleet and get rank of worker"""
7979
# Global rank
8080
ranks = dist.get_world_size()
@@ -122,9 +122,9 @@ def update_fd_config_for_mm(fd_config: FDConfig) -> None:
122122

123123
class PaddleDisWorkerProc:
124124
"""
125-
Paddle Distrubuted wrapper for fastdeploy.worker.Worker,
125+
Paddle Distributed wrapper for fastdeploy.worker.Worker,
126126
for handling single-node multi-GPU tensor parallel.
127-
The wrapper internally executea an event loop that continuously executes requests
127+
The wrapper internally executes an event loop that continuously executes requests
128128
in the task queue. Control flow is transmitted by IPC.
129129
"""
130130

0 commit comments

Comments
 (0)