We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 4e44e80 commit 6a87668Copy full SHA for 6a87668
src/types/llmService/LLMService.ts
@@ -12,12 +12,14 @@ export interface LLMServiceInterface {
12
/**
13
* Calls the LLM with the provided engineered prompt, prompt ID, and input token limit.
14
* @param engineeredPrompt - The prompt that has been engineered for the LLM.
15
+ * @param jsonSchema - The string of the guided json of the structure of the LLM response (optional)
16
* @param promptId - The ID of the prompt (optional).
17
* @param outputTokenLimit - The limit on the number of output tokens (optional).
18
* @returns A promise that resolves to the LLM's response as a string.
19
*/
20
callLLM(
21
engineeredPrompt: string,
22
+ jsonSchema?: string,
23
promptId?: string,
24
outputTokenLimit?: number
25
): Promise<string>;
0 commit comments