Continue polling VariantStats while LLM retrieval in progress, minor UI fixes (#54)

* Prevent zoom in on iOS

* Expand function return code background to fill cell

* Keep OutputStats on far right of cells

* Continue polling prompt stats while cells are retrieving from LLM

* Add comment to _document.tsx

* Fix prettier
This commit is contained in:
arcticfly
2023-07-17 18:04:38 -07:00
committed by GitHub
parent 8e7a6d3ae2
commit 4131aa67d0
6 changed files with 85 additions and 18 deletions

View File

@@ -31,7 +31,7 @@ export const OutputStats = ({
const cost = promptCost + completionCost;
return (
<HStack align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
<HStack w="full" align="center" color="gray.500" fontSize="2xs" mt={{ base: 0, md: 1 }}>
<HStack flex={1}>
{modelOutput.outputEvaluation.map((evaluation) => {
const passed = evaluation.result > 0.5;