Add Gryphe/MythoMax-L2-13b (#173)

This commit is contained in:
arcticfly
2023-08-18 00:37:16 -07:00
committed by GitHub
parent a5e59e4235
commit 733d53625b
5 changed files with 62 additions and 0 deletions

View File

@@ -7,6 +7,7 @@ import {
// templateSystemUserAssistantPrompt,
templateInstructionInputResponsePrompt,
templateAiroborosPrompt,
templateGryphePrompt,
templateVicunaPrompt,
} from "./templatePrompt";
@@ -69,6 +70,15 @@ const frontendModelProvider: FrontendModelProvider<SupportedModel, OpenpipeChatO
learnMoreUrl: "https://huggingface.co/lmsys/vicuna-13b-v1.5",
templatePrompt: templateVicunaPrompt,
},
"Gryphe/MythoMax-L2-13b": {
name: "MythoMax-L2-13b",
contextWindow: 4096,
pricePerSecond: 0.0003,
speed: "medium",
provider: "openpipe/Chat",
learnMoreUrl: "https://huggingface.co/Gryphe/MythoMax-L2-13b",
templatePrompt: templateGryphePrompt,
},
"NousResearch/Nous-Hermes-llama-2-7b": {
name: "Nous-Hermes-llama-2-7b",
contextWindow: 4096,

View File

@@ -13,6 +13,7 @@ const modelEndpoints: Record<OpenpipeChatInput["model"], string> = {
"NousResearch/Nous-Hermes-Llama2-13b": "https://ncv8pw3u0vb8j2-8000.proxy.runpod.net/v1",
"jondurbin/airoboros-l2-13b-gpt4-2.0": "https://9nrbx7oph4btou-8000.proxy.runpod.net/v1",
"lmsys/vicuna-13b-v1.5": "https://h88hkt3ux73rb7-8000.proxy.runpod.net/v1",
"Gryphe/MythoMax-L2-13b": "https://3l5jvhnxdgky3v-8000.proxy.runpod.net/v1",
"NousResearch/Nous-Hermes-llama-2-7b": "https://ua1bpc6kv3dgge-8000.proxy.runpod.net/v1",
};

View File

@@ -11,6 +11,7 @@ const supportedModels = [
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b",
] as const;

View File

@@ -11,6 +11,7 @@
"NousResearch/Nous-Hermes-Llama2-13b",
"jondurbin/airoboros-l2-13b-gpt4-2.0",
"lmsys/vicuna-13b-v1.5",
"Gryphe/MythoMax-L2-13b",
"NousResearch/Nous-Hermes-llama-2-7b"
]
},

View File

@@ -223,3 +223,52 @@ export const templateVicunaPrompt = (messages: OpenpipeChatInput["messages"]) =>
return prompt.trim();
};
// <System prompt/Character Card>
// ### Instruction:
// Your instruction or question here.
// For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only.
// ### Response:
export const templateGryphePrompt = (messages: OpenpipeChatInput["messages"]) => {
const splitter = "\n\n";
const instructionTag = "### Instruction:\n";
const responseTag = "### Response:\n";
let combinedSystemMessage = "";
const conversationMessages = [];
for (const message of messages) {
if (message.role === "system") {
combinedSystemMessage += message.content;
} else if (message.role === "user") {
conversationMessages.push(instructionTag + message.content);
} else {
conversationMessages.push(responseTag + message.content);
}
}
let systemMessage = "";
if (combinedSystemMessage) {
// If there is no user message, add a user tag to the system message
if (conversationMessages.find((message) => message.startsWith(instructionTag))) {
systemMessage = `${combinedSystemMessage}\n\n`;
} else {
conversationMessages.unshift(instructionTag + combinedSystemMessage);
}
}
let prompt = `${systemMessage}${conversationMessages.join(splitter)}`;
// Ensure that the prompt ends with an assistant message
const lastInstructionIndex = prompt.lastIndexOf(instructionTag);
const lastAssistantIndex = prompt.lastIndexOf(responseTag);
if (lastInstructionIndex > lastAssistantIndex) {
prompt += splitter + responseTag;
}
return prompt;
};