Better streaming

- Always stream the visible scenarios, if the modelProvider supports it
 - Never stream the invisible scenarios

Also actually runs our query tasks in a background worker, which we weren't quite doing before.
This commit is contained in:
Kyle Corbitt
2023-07-24 18:34:30 -07:00
parent d6b97b29f7
commit e1cbeccb90
25 changed files with 152 additions and 153 deletions

View File

@@ -1,12 +1,18 @@
import { type Prisma } from "@prisma/client";
import { prisma } from "../db";
import { queueLLMRetrievalTask } from "./queueLLMRetrievalTask";
import parseConstructFn from "./parseConstructFn";
import { type JsonObject } from "type-fest";
import hashPrompt from "./hashPrompt";
import { omit } from "lodash-es";
import { queueQueryModel } from "../tasks/queryModel.task";
export const generateNewCell = async (
variantId: string,
scenarioId: string,
options?: { stream?: boolean },
): Promise<void> => {
const stream = options?.stream ?? false;
export const generateNewCell = async (variantId: string, scenarioId: string): Promise<void> => {
const variant = await prisma.promptVariant.findUnique({
where: {
id: variantId,
@@ -98,6 +104,6 @@ export const generateNewCell = async (variantId: string, scenarioId: string): Pr
}),
);
} else {
cell = await queueLLMRetrievalTask(cell.id);
await queueQueryModel(cell.id, stream);
}
};

View File

@@ -1,22 +0,0 @@
import { prisma } from "../db";
import { queryLLM } from "../tasks/queryLLM.task";
export const queueLLMRetrievalTask = async (cellId: string) => {
const updatedCell = await prisma.scenarioVariantCell.update({
where: {
id: cellId,
},
data: {
retrievalStatus: "PENDING",
errorMessage: null,
},
include: {
modelOutput: true,
},
});
// @ts-expect-error we aren't passing the helpers but that's ok
void queryLLM.task.handler({ scenarioVariantCellId: cellId }, { logger: console });
return updatedCell;
};