Newer
Older
def format_as_chat(message: str, history: List[List[str]]) -> str:
Given a message and a history of previous messages, returns a string that formats the conversation as a chat.
:param message: A string containing the user's most recent message
:param history: A list of lists of previous messages, where each sublist is a conversation turn:
[[user_message1, assistant_reply1], [user_message2, assistant_reply2], ...]
formatted_prompt = "<|begin_of_text|>"
if len(history) == 0:
return formatted_prompt + f"<|start_header_id|>user<|end_header_id|>\n\n{message}<|eot_id|>"
for conv_turn in history:
prompt = f"<|start_header_id|>user<|end_header_id|>\n\n{
conv_turn[0]}<|eot_id|>"
reply = f"<|start_header_id|>assistant<|end_header_id|>\n\n{
conv_turn[1]}<|eot_id|>"
# Add conversation turn to full prompt
formatted_prompt += prompt + reply
# Add last message
formatted_prompt += \
f"<|start_header_id|>user<|end_header_id|>\n\n{message}<|eot_id|>"
return formatted_prompt
def generate_payload(prompt: str):
"""
Given a formatted prompt, returns an object to be used when posting to the API
:param prompt: A formatted prompt following Llama 3's multi-turn conversation structure
"""
payload = {
"inputs": prompt,
"parameters": {
"do_sample": False
}
}
return payload
def format_for_translator(sentence: str, trg_lang: str):
# Wrap sentence with translation instruction
instruction = (
f'Translate the following sentence into {
trg_lang} using the native script: "{sentence}". '
"Only output the translated sentence in the native script."
)
# Create the prompt structure
formatted_prompt = "<|begin_of_text|>"
return formatted_prompt + f"<|start_header_id|>user<|end_header_id|>\n\n{instruction}<|eot_id|>"