|
@@ -6,51 +6,58 @@ from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CON
|
|
|
|
|
|
class AdvancedPromptTemplateService:
|
|
class AdvancedPromptTemplateService:
|
|
|
|
|
|
- def get_prompt(self, args: dict) -> dict:
|
|
|
|
|
|
+ @classmethod
|
|
|
|
+ def get_prompt(cls, args: dict) -> dict:
|
|
app_mode = args['app_mode']
|
|
app_mode = args['app_mode']
|
|
model_mode = args['model_mode']
|
|
model_mode = args['model_mode']
|
|
model_name = args['model_name']
|
|
model_name = args['model_name']
|
|
has_context = args['has_context']
|
|
has_context = args['has_context']
|
|
|
|
|
|
if 'baichuan' in model_name:
|
|
if 'baichuan' in model_name:
|
|
- return self.get_baichuan_prompt(app_mode, model_mode, has_context)
|
|
|
|
|
|
+ return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
|
|
else:
|
|
else:
|
|
- return self.get_common_prompt(app_mode, model_mode, has_context)
|
|
|
|
|
|
+ return cls.get_common_prompt(app_mode, model_mode, has_context)
|
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
|
|
|
|
+ context_prompt = copy.deepcopy(CONTEXT)
|
|
|
|
|
|
- def get_common_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
|
|
|
|
if app_mode == 'chat':
|
|
if app_mode == 'chat':
|
|
if model_mode == 'completion':
|
|
if model_mode == 'completion':
|
|
- return self.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
|
|
|
|
|
|
+ return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
|
|
elif model_mode == 'chat':
|
|
elif model_mode == 'chat':
|
|
- return self.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
|
|
|
|
|
|
+ return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
|
|
elif app_mode == 'completion':
|
|
elif app_mode == 'completion':
|
|
if model_mode == 'completion':
|
|
if model_mode == 'completion':
|
|
- return self.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
|
|
|
|
|
|
+ return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
|
|
elif model_mode == 'chat':
|
|
elif model_mode == 'chat':
|
|
- return self.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
|
|
|
|
|
|
+ return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
|
|
|
|
|
|
- def get_completion_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
|
|
|
|
|
|
+ @classmethod
|
|
|
|
+ def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
|
|
if has_context == 'true':
|
|
if has_context == 'true':
|
|
prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
|
|
prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
|
|
|
|
|
|
return prompt_template
|
|
return prompt_template
|
|
|
|
|
|
-
|
|
|
|
- def get_chat_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
|
|
|
|
|
|
+ @classmethod
|
|
|
|
+ def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
|
|
if has_context == 'true':
|
|
if has_context == 'true':
|
|
prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
|
|
prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
|
|
|
|
|
|
return prompt_template
|
|
return prompt_template
|
|
|
|
|
|
|
|
+ @classmethod
|
|
|
|
+ def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
|
|
|
|
+ baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
|
|
|
|
|
|
- def get_baichuan_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
|
|
|
|
if app_mode == 'chat':
|
|
if app_mode == 'chat':
|
|
if model_mode == 'completion':
|
|
if model_mode == 'completion':
|
|
- return self.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
|
|
|
|
|
+ return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
|
elif model_mode == 'chat':
|
|
elif model_mode == 'chat':
|
|
- return self.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
|
|
|
|
|
+ return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
|
elif app_mode == 'completion':
|
|
elif app_mode == 'completion':
|
|
if model_mode == 'completion':
|
|
if model_mode == 'completion':
|
|
- return self.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
|
|
|
|
|
+ return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|
|
elif model_mode == 'chat':
|
|
elif model_mode == 'chat':
|
|
- return self.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
|
|
|
|
|
|
+ return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
|