Skip to content

Commit c2defc1

Browse files
authored
fix litellm log content (#1278)
1 parent 417766e commit c2defc1

File tree

1 file changed

+6
-4
lines changed

1 file changed

+6
-4
lines changed

rdagent/oai/backend/litellm.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,8 @@ def _create_chat_completion_inner_function( # type: ignore[no-untyped-def] # no
158158
**complete_kwargs,
159159
**kwargs,
160160
)
161-
logger.info(f"{LogColors.GREEN}Using chat model{LogColors.END} {model}", tag="llm_messages")
161+
if LITELLM_SETTINGS.log_llm_chat_content:
162+
logger.info(f"{LogColors.GREEN}Using chat model{LogColors.END} {model}", tag="llm_messages")
162163

163164
if LITELLM_SETTINGS.chat_stream:
164165
if LITELLM_SETTINGS.log_llm_chat_content:
@@ -198,9 +199,10 @@ def _create_chat_completion_inner_function( # type: ignore[no-untyped-def] # no
198199
cost = np.nan
199200
else:
200201
ACC_COST += cost
201-
logger.info(
202-
f"Current Cost: ${float(cost):.10f}; Accumulated Cost: ${float(ACC_COST):.10f}; {finish_reason=}",
203-
)
202+
if LITELLM_SETTINGS.log_llm_chat_content:
203+
logger.info(
204+
f"Current Cost: ${float(cost):.10f}; Accumulated Cost: ${float(ACC_COST):.10f}; {finish_reason=}",
205+
)
204206

205207
prompt_tokens = token_counter(model=model, messages=messages)
206208
completion_tokens = token_counter(model=model, text=content)

0 commit comments

Comments
 (0)