Skip to content

Commit 43b47ed

Browse files
authored
Merge pull request #102 from ansible/vulnerability/AAP-47388
AAP-47388: upgrade setuptools
2 parents c9a648b + 7011e01 commit 43b47ed

File tree

11 files changed

+351
-261
lines changed

11 files changed

+351
-261
lines changed
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
"""Workaround for the compatibility issue between max_tokens and max_completion_tokens."""
2+
3+
from typing import Any, List, Optional
4+
5+
from langchain_core.language_models import LanguageModelInput
6+
from langchain_openai import ChatOpenAI
7+
8+
9+
class ChatRHOAI(ChatOpenAI):
10+
"""Workaround for the compatibility issue between max_tokens and max_completion_tokens."""
11+
12+
def _get_request_payload(
13+
self,
14+
input_: LanguageModelInput,
15+
*,
16+
stop: Optional[List[str]] = None,
17+
**kwargs: Any,
18+
) -> dict:
19+
payload = super()._get_request_payload(input_, stop=stop, **kwargs)
20+
if "max_completion_tokens" in payload:
21+
payload["max_tokens"] = payload.pop("max_completion_tokens")
22+
return payload

ols/src/llms/providers/rhelai_vllm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
from langchain.llms.base import LLM
77

88
from ols import constants
9+
from ols.src.llms.providers.chat_rhoai import ChatRHOAI
910
from ols.src.llms.providers.provider import LLMProvider
1011
from ols.src.llms.providers.registry import register_llm_provider_as
11-
from ols.src.llms.providers.rhoai_vllm import ChatRHOAI
1212

1313
logger = logging.getLogger(__name__)
1414

ols/src/llms/providers/rhoai_vllm.py

Lines changed: 2 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
"""Red Hat OpenShift VLLM provider implementation."""
22

33
import logging
4-
from typing import Any, List, Optional
4+
from typing import Any, Optional
55

66
from langchain.llms.base import LLM
7-
from langchain_core.language_models import LanguageModelInput
8-
from langchain_openai import ChatOpenAI
97

108
from ols import constants
9+
from ols.src.llms.providers.chat_rhoai import ChatRHOAI
1110
from ols.src.llms.providers.provider import LLMProvider
1211
from ols.src.llms.providers.registry import register_llm_provider_as
1312

@@ -51,19 +50,3 @@ def default_params(self) -> dict[str, Any]:
5150
def load(self) -> LLM:
5251
"""Load LLM."""
5352
return ChatRHOAI(**self.params)
54-
55-
56-
class ChatRHOAI(ChatOpenAI):
57-
"""Workaround for the compatibility issue between max_tokens and max_completion_tokens."""
58-
59-
def _get_request_payload(
60-
self,
61-
input_: LanguageModelInput,
62-
*,
63-
stop: Optional[List[str]] = None,
64-
**kwargs: Any,
65-
) -> dict:
66-
payload = super()._get_request_payload(input_, stop=stop, **kwargs)
67-
if "max_completion_tokens" in payload:
68-
payload["max_tokens"] = payload.pop("max_completion_tokens")
69-
return payload

ols/utils/ssl.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
"""Utility function for retrieving SSL version and list of ciphers for TLS secutiry profile."""
22

3-
# ruff: noqa:A005
4-
53
import logging
64
from typing import Optional
75

0 commit comments

Comments
 (0)