We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7629e27 commit 7fd0070Copy full SHA for 7fd0070
python/sglang/srt/models/seed_oss.py
@@ -15,6 +15,7 @@
15
get_tensor_model_parallel_world_size,
16
)
17
from sglang.srt.layers.activation import SiluAndMul
18
+from sglang.srt.layers.dp_attention import is_dp_attention_enabled
19
from sglang.srt.layers.layernorm import RMSNorm
20
from sglang.srt.layers.linear import (
21
MergedColumnParallelLinear,
@@ -258,7 +259,7 @@ def __init__(
258
259
config.vocab_size,
260
config.hidden_size,
261
quant_config=quant_config,
- enable_tp=not global_server_args_dict["enable_dp_attention"],
262
+ enable_tp=not is_dp_attention_enabled(),
263
prefix=add_prefix("embed_tokens", prefix),
264
265
else:
0 commit comments