123456789101112131415161718192021222324252627282930313233343536373839 |
- from collections.abc import Generator
- from typing import Optional, Union
- from dify_plugin.entities.model.llm import LLMMode, LLMResult
- from dify_plugin.entities.model.message import PromptMessage, PromptMessageTool
- from dify_plugin import OAICompatLargeLanguageModel
- class DeepseekLargeLanguageModel(OAICompatLargeLanguageModel):
- def _invoke(
- self,
- model: str,
- credentials: dict,
- prompt_messages: list[PromptMessage],
- model_parameters: dict,
- tools: Optional[list[PromptMessageTool]] = None,
- stop: Optional[list[str]] = None,
- stream: bool = True,
- user: Optional[str] = None,
- ) -> Union[LLMResult, Generator]:
- self._add_custom_parameters(credentials)
- model="Qwen3-32B-H800"
- return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream)
- # def validate_credentials(self, model: str, credentials: dict) -> None:
- # self._add_custom_parameters(credentials)
- # super().validate_credentials(model, credentials)
- @staticmethod
- def _add_custom_parameters(credentials) -> None:
- # credentials["endpoint_url"] = str("http://10.132.200.185:30012/gateway/ti/v1/")
- credentials["endpoint_url"] = str("http://10.132.200.185:30012/gateway/ti/qwen3-32b-h800/v1/chat/completions")
- credentials["mode"] = LLMMode.CHAT.value
- credentials["function_calling_type"] = "tool_call"
- credentials["stream_function_calling"] = "support"
- credentials["extra_headers"] = {
- "Content-type": "application/json",
- "szc-api-key": "f4eb2eb55b4f4a17a5"
- }
|