Skip to content

Commit f18a3b1

Browse files
committed
Revert "modify condition"
This reverts commit 26430bd.
1 parent 26430bd commit f18a3b1

File tree

3 files changed

+30
-15
lines changed

3 files changed

+30
-15
lines changed

fastdeploy/input/ernie_processor.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,12 @@ def process_request(self, request, max_model_len=None, **kwargs):
110110
task = request.to_dict()
111111
chat_template_kwargs = kwargs.get("chat_template_kwargs")
112112
if chat_template_kwargs:
113-
for k, v in chat_template_kwargs.items():
114-
if k not in task:
115-
task[k] = v
113+
if isinstance(chat_template_kwargs, dict):
114+
for k, v in chat_template_kwargs.items():
115+
if k not in task:
116+
task[k] = v
117+
else:
118+
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
116119
request.prompt_token_ids = self.messages2ids(task)
117120

118121
if len(request.prompt_token_ids) == 0:
@@ -170,9 +173,12 @@ def process_request_dict(self, request, max_model_len=None):
170173
else:
171174
chat_template_kwargs = request.get("chat_template_kwargs")
172175
if chat_template_kwargs:
173-
for k, v in chat_template_kwargs.items():
174-
if k not in request:
175-
request[k] = v
176+
if isinstance(chat_template_kwargs, dict):
177+
for k, v in chat_template_kwargs.items():
178+
if k not in request:
179+
request[k] = v
180+
else:
181+
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
176182
request["prompt_token_ids"] = self.messages2ids(request)
177183
if len(request["prompt_token_ids"]) == 0:
178184
raise ValueError("Invalid input: prompt_token_ids must be a non-empty sequence of token IDs")

fastdeploy/input/ernie_vl_processor.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -219,9 +219,12 @@ def process_request_dict(self, request, max_model_len=None):
219219
self._check_mm_limits(messages)
220220
chat_template_kwargs = request.get("chat_template_kwargs")
221221
if chat_template_kwargs:
222-
for k, v in chat_template_kwargs.items():
223-
if k not in request:
224-
request[k] = v
222+
if isinstance(chat_template_kwargs, dict):
223+
for k, v in chat_template_kwargs.items():
224+
if k not in request:
225+
request[k] = v
226+
else:
227+
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
225228
request.setdefault("enable_thinking", True)
226229
outputs = self.ernie_processor.request2ids(request)
227230
else:

fastdeploy/input/text_processor.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -222,9 +222,12 @@ def process_request(self, request, max_model_len=None, **kwargs):
222222
task = request.to_dict()
223223
chat_template_kwargs = kwargs.get("chat_template_kwargs")
224224
if chat_template_kwargs:
225-
for k, v in chat_template_kwargs.items():
226-
if k not in task:
227-
task[k] = v
225+
if isinstance(chat_template_kwargs, dict):
226+
for k, v in chat_template_kwargs.items():
227+
if k not in task:
228+
task[k] = v
229+
else:
230+
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
228231
task.setdefault("enable_thinking", True)
229232
request.prompt_token_ids = self.messages2ids(task)
230233
else:
@@ -277,9 +280,12 @@ def process_request_dict(self, request, max_model_len=None, **kwargs):
277280
raise ValueError("This model does not support chat_template.")
278281
chat_template_kwargs = request.get("chat_template_kwargs")
279282
if chat_template_kwargs:
280-
for k, v in chat_template_kwargs.items():
281-
if k not in request:
282-
request[k] = v
283+
if isinstance(chat_template_kwargs, dict):
284+
for k, v in chat_template_kwargs.items():
285+
if k not in request:
286+
request[k] = v
287+
else:
288+
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
283289
request.setdefault("enable_thinking", True)
284290
request["prompt_token_ids"] = self.messages2ids(request)
285291
else:

0 commit comments

Comments
 (0)