|
@@ -1,11 +1,8 @@
|
|
-import decimal
|
|
|
|
import logging
|
|
import logging
|
|
-from functools import wraps
|
|
|
|
from typing import List, Optional, Any
|
|
from typing import List, Optional, Any
|
|
|
|
|
|
import anthropic
|
|
import anthropic
|
|
from langchain.callbacks.manager import Callbacks
|
|
from langchain.callbacks.manager import Callbacks
|
|
-from langchain.chat_models import ChatAnthropic
|
|
|
|
from langchain.schema import LLMResult
|
|
from langchain.schema import LLMResult
|
|
|
|
|
|
from core.model_providers.error import LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError, \
|
|
from core.model_providers.error import LLMBadRequestError, LLMAPIConnectionError, LLMAPIUnavailableError, \
|
|
@@ -13,6 +10,7 @@ from core.model_providers.error import LLMBadRequestError, LLMAPIConnectionError
|
|
from core.model_providers.models.llm.base import BaseLLM
|
|
from core.model_providers.models.llm.base import BaseLLM
|
|
from core.model_providers.models.entity.message import PromptMessage, MessageType
|
|
from core.model_providers.models.entity.message import PromptMessage, MessageType
|
|
from core.model_providers.models.entity.model_params import ModelMode, ModelKwargs
|
|
from core.model_providers.models.entity.model_params import ModelMode, ModelKwargs
|
|
|
|
+from core.third_party.langchain.llms.anthropic_llm import AnthropicLLM
|
|
|
|
|
|
|
|
|
|
class AnthropicModel(BaseLLM):
|
|
class AnthropicModel(BaseLLM):
|
|
@@ -20,7 +18,7 @@ class AnthropicModel(BaseLLM):
|
|
|
|
|
|
def _init_client(self) -> Any:
|
|
def _init_client(self) -> Any:
|
|
provider_model_kwargs = self._to_model_kwargs_input(self.model_rules, self.model_kwargs)
|
|
provider_model_kwargs = self._to_model_kwargs_input(self.model_rules, self.model_kwargs)
|
|
- return ChatAnthropic(
|
|
|
|
|
|
+ return AnthropicLLM(
|
|
model=self.name,
|
|
model=self.name,
|
|
streaming=self.streaming,
|
|
streaming=self.streaming,
|
|
callbacks=self.callbacks,
|
|
callbacks=self.callbacks,
|