advanced_prompt_template_service.py 3.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364
  1. import copy
  2. from core.prompt.prompt_transform import AppMode
  3. from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CONFIG, CHAT_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_CHAT_PROMPT_CONFIG, COMPLETION_APP_COMPLETION_PROMPT_CONFIG, \
  4. BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG, BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG, CONTEXT, BAICHUAN_CONTEXT
  5. class AdvancedPromptTemplateService:
  6. @classmethod
  7. def get_prompt(cls, args: dict) -> dict:
  8. app_mode = args['app_mode']
  9. model_mode = args['model_mode']
  10. model_name = args['model_name']
  11. has_context = args['has_context']
  12. if 'baichuan' in model_name.lower():
  13. return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
  14. else:
  15. return cls.get_common_prompt(app_mode, model_mode, has_context)
  16. @classmethod
  17. def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
  18. context_prompt = copy.deepcopy(CONTEXT)
  19. if app_mode == AppMode.CHAT.value:
  20. if model_mode == "completion":
  21. return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
  22. elif model_mode == "chat":
  23. return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
  24. elif app_mode == AppMode.COMPLETION.value:
  25. if model_mode == "completion":
  26. return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
  27. elif model_mode == "chat":
  28. return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
  29. @classmethod
  30. def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
  31. if has_context == 'true':
  32. prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
  33. return prompt_template
  34. @classmethod
  35. def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
  36. if has_context == 'true':
  37. prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
  38. return prompt_template
  39. @classmethod
  40. def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
  41. baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
  42. if app_mode == AppMode.CHAT.value:
  43. if model_mode == "completion":
  44. return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
  45. elif model_mode == "chat":
  46. return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
  47. elif app_mode == AppMode.COMPLETION.value:
  48. if model_mode == "completion":
  49. return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
  50. elif model_mode == "chat":
  51. return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)