Feat/advanced prompt enhancement (#1340)

This commit is contained in:
Garfield Dai 2023-10-13 16:47:01 +08:00 committed by GitHub
parent 3efaa713da
commit 695841a3cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 35 additions and 22 deletions

View File

@ -20,7 +20,6 @@ class AdvancedPromptTemplateList(Resource):
parser.add_argument('model_name', type=str, required=True, location='args')
args = parser.parse_args()
service = AdvancedPromptTemplateService()
return service.get_prompt(args)
return AdvancedPromptTemplateService.get_prompt(args)
api.add_resource(AdvancedPromptTemplateList, '/app/prompt-templates')

View File

@ -11,7 +11,8 @@ CHAT_APP_COMPLETION_PROMPT_CONFIG = {
"user_prefix": "Human",
"assistant_prefix": "Assistant"
}
}
},
"stop": ["Human:"]
}
CHAT_APP_CHAT_PROMPT_CONFIG = {
@ -37,7 +38,8 @@ COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
"prompt": {
"text": "{{#pre_prompt#}}"
}
}
},
"stop": ["Human:"]
}
BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
@ -49,7 +51,8 @@ BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG = {
"user_prefix": "用户",
"assistant_prefix": "助手"
}
}
},
"stop": ["用户:"]
}
BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG = {
@ -75,5 +78,6 @@ BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG = {
"prompt": {
"text": "{{#pre_prompt#}}"
}
}
},
"stop": ["用户:"]
}

View File

@ -6,51 +6,58 @@ from core.prompt.advanced_prompt_templates import CHAT_APP_COMPLETION_PROMPT_CON
class AdvancedPromptTemplateService:
def get_prompt(self, args: dict) -> dict:
@classmethod
def get_prompt(cls, args: dict) -> dict:
app_mode = args['app_mode']
model_mode = args['model_mode']
model_name = args['model_name']
has_context = args['has_context']
if 'baichuan' in model_name:
return self.get_baichuan_prompt(app_mode, model_mode, has_context)
return cls.get_baichuan_prompt(app_mode, model_mode, has_context)
else:
return self.get_common_prompt(app_mode, model_mode, has_context)
return cls.get_common_prompt(app_mode, model_mode, has_context)
@classmethod
def get_common_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
context_prompt = copy.deepcopy(CONTEXT)
def get_common_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
if app_mode == 'chat':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(CHAT_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
elif app_mode == 'completion':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, context_prompt)
def get_completion_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
@classmethod
def get_completion_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
if has_context == 'true':
prompt_template['completion_prompt_config']['prompt']['text'] = context + prompt_template['completion_prompt_config']['prompt']['text']
return prompt_template
def get_chat_prompt(self, prompt_template: str, has_context: bool, context: str) -> dict:
@classmethod
def get_chat_prompt(cls, prompt_template: dict, has_context: str, context: str) -> dict:
if has_context == 'true':
prompt_template['chat_prompt_config']['prompt'][0]['text'] = context + prompt_template['chat_prompt_config']['prompt'][0]['text']
return prompt_template
@classmethod
def get_baichuan_prompt(cls, app_mode: str, model_mode:str, has_context: str) -> dict:
baichuan_context_prompt = copy.deepcopy(BAICHUAN_CONTEXT)
def get_baichuan_prompt(self, app_mode: str, model_mode:str, has_context: bool) -> dict:
if app_mode == 'chat':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_CHAT_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)
elif app_mode == 'completion':
if model_mode == 'completion':
return self.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_completion_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_COMPLETION_PROMPT_CONFIG), has_context, baichuan_context_prompt)
elif model_mode == 'chat':
return self.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, BAICHUAN_CONTEXT)
return cls.get_chat_prompt(copy.deepcopy(BAICHUAN_COMPLETION_APP_CHAT_PROMPT_CONFIG), has_context, baichuan_context_prompt)

View File

@ -57,6 +57,9 @@ class AppModelConfigService:
elif not isinstance(cp["stop"], list):
raise ValueError("stop in model.completion_params must be of list type")
if len(cp["stop"]) > 4:
raise ValueError("stop sequences must be less than 4")
# Filter out extra parameters
filtered_cp = {
"max_tokens": cp["max_tokens"],