from collections.abc import Generator from typing import Optional, Union from dify_plugin.entities.model.llm import LLMMode, LLMResult from dify_plugin.entities.model.message import PromptMessage, PromptMessageTool from yarl import URL from dify_plugin import OAICompatLargeLanguageModel class DeepseekLargeLanguageModel(OAICompatLargeLanguageModel): def _invoke( self, model: str, credentials: dict, prompt_messages: list[PromptMessage], model_parameters: dict, tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, ) -> Union[LLMResult, Generator]: self._add_custom_parameters(credentials) return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) # def validate_credentials(self, model: str, credentials: dict) -> None: # self._add_custom_parameters(credentials) # super().validate_credentials(model, credentials) @staticmethod def _add_custom_parameters(credentials) -> None: credentials["endpoint_url"] = str("http://10.132.200.185:30012/gateway/ti/v1/") credentials["mode"] = LLMMode.CHAT.value credentials["function_calling_type"] = "tool_call" credentials["stream_function_calling"] = "support" credentials["extra_headers"] = { "Content-type": "application/json", "X-TC-Project": "1", "X-TC-Service": "deepseek-r1-h800-master-sglang", "X-TC-Action": "/v1/chat/completions", "X-TC-Version": "2020-10-01", "szc-api-key": "f4eb2eb55b4f4a17a5" }