llm.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142
  1. from collections.abc import Generator
  2. from typing import Optional, Union
  3. from dify_plugin.entities.model.llm import LLMMode, LLMResult
  4. from dify_plugin.entities.model.message import PromptMessage, PromptMessageTool
  5. from yarl import URL
  6. from dify_plugin import OAICompatLargeLanguageModel
  7. class DeepseekLargeLanguageModel(OAICompatLargeLanguageModel):
  8. def _invoke(
  9. self,
  10. model: str,
  11. credentials: dict,
  12. prompt_messages: list[PromptMessage],
  13. model_parameters: dict,
  14. tools: Optional[list[PromptMessageTool]] = None,
  15. stop: Optional[list[str]] = None,
  16. stream: bool = True,
  17. user: Optional[str] = None,
  18. ) -> Union[LLMResult, Generator]:
  19. self._add_custom_parameters(credentials)
  20. return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream)
  21. # def validate_credentials(self, model: str, credentials: dict) -> None:
  22. # self._add_custom_parameters(credentials)
  23. # super().validate_credentials(model, credentials)
  24. @staticmethod
  25. def _add_custom_parameters(credentials) -> None:
  26. credentials["endpoint_url"] = str("http://10.132.200.185:30012/gateway/ti/v1/")
  27. credentials["mode"] = LLMMode.CHAT.value
  28. credentials["function_calling_type"] = "tool_call"
  29. credentials["stream_function_calling"] = "support"
  30. credentials["extra_headers"] = {
  31. "Content-type": "application/json",
  32. "X-TC-Project": "1",
  33. "X-TC-Service": "deepseek-r1-h800-master-sglang",
  34. "X-TC-Action": "/v1/chat/completions",
  35. "X-TC-Version": "2020-10-01",
  36. "szc-api-key": "f4eb2eb55b4f4a17a5"
  37. }