Skip to content

Commit 26430bd

Browse files
committed
modify condition
1 parent af57373 commit 26430bd

File tree

3 files changed

+15
-30
lines changed

3 files changed

+15
-30
lines changed

fastdeploy/input/ernie_processor.py

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -110,12 +110,9 @@ def process_request(self, request, max_model_len=None, **kwargs):
110110
task = request.to_dict()
111111
chat_template_kwargs = kwargs.get("chat_template_kwargs")
112112
if chat_template_kwargs:
113-
if isinstance(chat_template_kwargs, dict):
114-
for k, v in chat_template_kwargs.items():
115-
if k not in task:
116-
task[k] = v
117-
else:
118-
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
113+
for k, v in chat_template_kwargs.items():
114+
if k not in task:
115+
task[k] = v
119116
request.prompt_token_ids = self.messages2ids(task)
120117

121118
if len(request.prompt_token_ids) == 0:
@@ -173,12 +170,9 @@ def process_request_dict(self, request, max_model_len=None):
173170
else:
174171
chat_template_kwargs = request.get("chat_template_kwargs")
175172
if chat_template_kwargs:
176-
if isinstance(chat_template_kwargs, dict):
177-
for k, v in chat_template_kwargs.items():
178-
if k not in request:
179-
request[k] = v
180-
else:
181-
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
173+
for k, v in chat_template_kwargs.items():
174+
if k not in request:
175+
request[k] = v
182176
request["prompt_token_ids"] = self.messages2ids(request)
183177
if len(request["prompt_token_ids"]) == 0:
184178
raise ValueError("Invalid input: prompt_token_ids must be a non-empty sequence of token IDs")

fastdeploy/input/ernie_vl_processor.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -219,12 +219,9 @@ def process_request_dict(self, request, max_model_len=None):
219219
self._check_mm_limits(messages)
220220
chat_template_kwargs = request.get("chat_template_kwargs")
221221
if chat_template_kwargs:
222-
if isinstance(chat_template_kwargs, dict):
223-
for k, v in chat_template_kwargs.items():
224-
if k not in request:
225-
request[k] = v
226-
else:
227-
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
222+
for k, v in chat_template_kwargs.items():
223+
if k not in request:
224+
request[k] = v
228225
request.setdefault("enable_thinking", True)
229226
outputs = self.ernie_processor.request2ids(request)
230227
else:

fastdeploy/input/text_processor.py

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -222,12 +222,9 @@ def process_request(self, request, max_model_len=None, **kwargs):
222222
task = request.to_dict()
223223
chat_template_kwargs = kwargs.get("chat_template_kwargs")
224224
if chat_template_kwargs:
225-
if isinstance(chat_template_kwargs, dict):
226-
for k, v in chat_template_kwargs.items():
227-
if k not in task:
228-
task[k] = v
229-
else:
230-
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
225+
for k, v in chat_template_kwargs.items():
226+
if k not in task:
227+
task[k] = v
231228
task.setdefault("enable_thinking", True)
232229
request.prompt_token_ids = self.messages2ids(task)
233230
else:
@@ -280,12 +277,9 @@ def process_request_dict(self, request, max_model_len=None, **kwargs):
280277
raise ValueError("This model does not support chat_template.")
281278
chat_template_kwargs = request.get("chat_template_kwargs")
282279
if chat_template_kwargs:
283-
if isinstance(chat_template_kwargs, dict):
284-
for k, v in chat_template_kwargs.items():
285-
if k not in request:
286-
request[k] = v
287-
else:
288-
raise ValueError("Invalid input: chat_template_kwargs must be a dict")
280+
for k, v in chat_template_kwargs.items():
281+
if k not in request:
282+
request[k] = v
289283
request.setdefault("enable_thinking", True)
290284
request["prompt_token_ids"] = self.messages2ids(request)
291285
else:

0 commit comments

Comments
 (0)