move back to root

This commit is contained in:
Bruce MacDonald
2023-06-27 12:12:44 -04:00
parent e07d81c7f1
commit 6599268d78
5 changed files with 0 additions and 0 deletions

12
lib/model_prompts.json Normal file
View File

@@ -0,0 +1,12 @@
{
"alpaca": "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction:\n{prompt}\n\n### Response:\n\n",
"ggml": "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.\n\n### Human: Hello, Assistant.\n### Assistant: Hello. How may I help you today?\n### Human: ${prompt}",
"gpt4": "### Instruction:\n{prompt}\n\n### Response:\n",
"hermes": "### Instruction:\n{prompt}\n\n### Response:\n",
"oasst": "{prompt}",
"orca": "### System:\nYou are an AI assistant that follows instruction extremely well. Help as much as you can.\n\n### User:\n{prompt}\n\n### Response:",
"qlora": "### Human: {prompt}\n### Assistant:",
"tulu": "\n{prompt}\n\n(include newline)",
"vicuna": "A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.\n\nUSER: {prompt}\nASSISTANT:",
"wizardlm": "{prompt}\n\n### Response:"
}

20
lib/template.py Normal file
View File

@@ -0,0 +1,20 @@
from difflib import SequenceMatcher
import json
with open("model_prompts.json", "r") as f:
model_prompts = json.load(f)
def template(model, prompt):
max_ratio = 0
closest_key = ""
model_name = model.lower()
# Find the specialized prompt with the closest name match
for key in model_prompts.keys():
ratio = SequenceMatcher(None, model_name, key).ratio()
if ratio > max_ratio:
max_ratio = ratio
closest_key = key
# Return the value of the closest match
p = model_prompts.get(closest_key) # TODO: provide a better default template
return p.format(prompt=prompt)