Skip to content

Commit d339403

Browse files
authored
Chore: optimize the code of PromptTransform (langgenius#16143)
1 parent e0cf55f commit d339403

File tree

3 files changed

+8
-11
lines changed

3 files changed

+8
-11
lines changed

api/core/prompt/simple_prompt_transform.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def get_prompt(
9393

9494
return prompt_messages, stops
9595

96-
def get_prompt_str_and_rules(
96+
def _get_prompt_str_and_rules(
9797
self,
9898
app_mode: AppMode,
9999
model_config: ModelConfigWithCredentialsEntity,
@@ -184,7 +184,7 @@ def _get_chat_model_prompt_messages(
184184
prompt_messages: list[PromptMessage] = []
185185

186186
# get prompt
187-
prompt, _ = self.get_prompt_str_and_rules(
187+
prompt, _ = self._get_prompt_str_and_rules(
188188
app_mode=app_mode,
189189
model_config=model_config,
190190
pre_prompt=pre_prompt,
@@ -209,9 +209,9 @@ def _get_chat_model_prompt_messages(
209209
)
210210

211211
if query:
212-
prompt_messages.append(self.get_last_user_message(query, files, image_detail_config))
212+
prompt_messages.append(self._get_last_user_message(query, files, image_detail_config))
213213
else:
214-
prompt_messages.append(self.get_last_user_message(prompt, files, image_detail_config))
214+
prompt_messages.append(self._get_last_user_message(prompt, files, image_detail_config))
215215

216216
return prompt_messages, None
217217

@@ -228,7 +228,7 @@ def _get_completion_model_prompt_messages(
228228
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
229229
) -> tuple[list[PromptMessage], Optional[list[str]]]:
230230
# get prompt
231-
prompt, prompt_rules = self.get_prompt_str_and_rules(
231+
prompt, prompt_rules = self._get_prompt_str_and_rules(
232232
app_mode=app_mode,
233233
model_config=model_config,
234234
pre_prompt=pre_prompt,
@@ -254,7 +254,7 @@ def _get_completion_model_prompt_messages(
254254
)
255255

256256
# get prompt
257-
prompt, prompt_rules = self.get_prompt_str_and_rules(
257+
prompt, prompt_rules = self._get_prompt_str_and_rules(
258258
app_mode=app_mode,
259259
model_config=model_config,
260260
pre_prompt=pre_prompt,
@@ -268,9 +268,9 @@ def _get_completion_model_prompt_messages(
268268
if stops is not None and len(stops) == 0:
269269
stops = None
270270

271-
return [self.get_last_user_message(prompt, files, image_detail_config)], stops
271+
return [self._get_last_user_message(prompt, files, image_detail_config)], stops
272272

273-
def get_last_user_message(
273+
def _get_last_user_message(
274274
self,
275275
prompt: str,
276276
files: Sequence["File"],

api/tests/unit_tests/core/prompt/test_agent_history_prompt_transform.py

-2
Original file line numberDiff line numberDiff line change
@@ -64,12 +64,10 @@ def side_effect_get_num_tokens(*args):
6464
transform._calculate_rest_token = MagicMock(return_value=max_token_limit)
6565
result = transform.get_prompt()
6666

67-
assert len(result) <= max_token_limit
6867
assert len(result) == 4
6968

7069
max_token_limit = 20
7170
transform._calculate_rest_token = MagicMock(return_value=max_token_limit)
7271
result = transform.get_prompt()
7372

74-
assert len(result) <= max_token_limit
7573
assert len(result) == 12

api/tests/unit_tests/core/prompt/test_simple_prompt_transform.py

-1
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,6 @@ def test_get_baichuan_completion_app_prompt_template_with_pcq():
8484
query_in_prompt=True,
8585
with_memory_prompt=False,
8686
)
87-
print(prompt_template["prompt_template"].template)
8887
prompt_rules = prompt_template["prompt_rules"]
8988
assert prompt_template["prompt_template"].template == (
9089
prompt_rules["context_prompt"] + pre_prompt + "\n" + prompt_rules["query_prompt"]

0 commit comments

Comments
 (0)