Skip to content

Commit cb5de6d

Browse files
committed
chore: output tokens not input
1 parent 63bb681 commit cb5de6d

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/types/llmService/LLMService.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@ export interface LLMServiceInterface {
1313
* Calls the LLM with the provided engineered prompt, prompt ID, and input token limit.
1414
* @param engineeredPrompt - The prompt that has been engineered for the LLM.
1515
* @param promptId - The ID of the prompt (optional).
16-
* @param inputTokenLimit - The limit on the number of input tokens (optional).
16+
* @param outputTokenLimit - The limit on the number of output tokens (optional).
1717
* @returns A promise that resolves to the LLM's response as a string.
1818
*/
1919
callLLM(
2020
engineeredPrompt: string,
2121
promptId?: string,
22-
inputTokenLimit?: number
22+
outputTokenLimit?: number
2323
): Promise<string>;
2424
}

0 commit comments

Comments
 (0)