"""Set of default prompts."""
from llama_index.core.prompts.base import PromptTemplate
from llama_index.core.prompts.prompt_type import PromptType
############################################
# Tree
############################################
DEFAULT_SUMMARY_PROMPT_TMPL = (
"Write a summary of the following. Try to use only the "
"information provided. "
"Try to include as many key details as possible.\n"
"\n"
"\n"
"{context_str}\n"
"\n"
"\n"
'SUMMARY:"""\n'
)
DEFAULT_SUMMARY_PROMPT = PromptTemplate(
DEFAULT_SUMMARY_PROMPT_TMPL, prompt_type=PromptType.SUMMARY
)
# insert prompts
DEFAULT_INSERT_PROMPT_TMPL = (
"Context information is below. It is provided in a numbered list "
"(1 to {num_chunks}), "
"where each item in the list corresponds to a summary.\n"
"---------------------\n"
"{context_list}"
"---------------------\n"
"Given the context information, here is a new piece of "
"information: {new_chunk_text}\n"
"Answer with the number corresponding to the summary that should be updated. "
"The answer should be the number corresponding to the "
"summary that is most relevant to the question.\n"
)
DEFAULT_INSERT_PROMPT = PromptTemplate(
DEFAULT_INSERT_PROMPT_TMPL, prompt_type=PromptType.TREE_INSERT
)
# # single choice
DEFAULT_QUERY_PROMPT_TMPL = (
"Some choices are given below. It is provided in a numbered list "
"(1 to {num_chunks}), "
"where each item in the list corresponds to a summary.\n"
"---------------------\n"
"{context_list}"
"\n---------------------\n"
"Using only the choices above and not prior knowledge, return "
"the choice that is most relevant to the question: '{query_str}'\n"
"Provide choice in the following format: 'ANSWER: <number>' and explain why "
"this summary was selected in relation to the question.\n"
)
DEFAULT_QUERY_PROMPT = PromptTemplate(
DEFAULT_QUERY_PROMPT_TMPL, prompt_type=PromptType.TREE_SELECT
)
# multiple choice
DEFAULT_QUERY_PROMPT_MULTIPLE_TMPL = (
"Some choices are given below. It is provided in a numbered "
"list (1 to {num_chunks}), "
"where each item in the list corresponds to a summary.\n"
"---------------------\n"
"{context_list}"
"\n---------------------\n"
"Using only the choices above and not prior knowledge, return the top choices "
"(no more than {branching_factor}, ranked by most relevant to least) that "
"are most relevant to the question: '{query_str}'\n"
"Provide choices in the following format: 'ANSWER: <numbers>' and explain why "
"these summaries were selected in relation to the question.\n"
)
DEFAULT_QUERY_PROMPT_MULTIPLE = PromptTemplate(
DEFAULT_QUERY_PROMPT_MULTIPLE_TMPL, prompt_type=PromptType.TREE_SELECT_MULTIPLE
)
DEFAULT_REFINE_PROMPT_TMPL = (
"The original query is as follows: {query_str}\n"
"We have provided an existing answer: {existing_answer}\n"
"We have the opportunity to refine the existing answer "
"(only if needed) with some more context below.\n"
"------------\n"
"{context_msg}\n"
"------------\n"
"Given the new context, refine the original answer to better "
"answer the query. "
"If the context isn't useful, return the original answer.\n"
"Refined Answer: "
)
DEFAULT_REFINE_PROMPT = PromptTemplate(
DEFAULT_REFINE_PROMPT_TMPL, prompt_type=PromptType.REFINE
)
DEFAULT_TEXT_QA_PROMPT_TMPL = (
"Context information is below.\n"
"---------------------\n"
"{context_str}\n"
"---------------------\n"
"Given the cont
132 llamaindex所有默认提示 prompt
最新推荐文章于 2025-06-09 17:39:22 发布

最低0.47元/天 解锁文章
918

被折叠的 条评论
为什么被折叠?



