From 8143e5700f53279a5a18d21b7c5466f3b9bb6ce6 Mon Sep 17 00:00:00 2001 From: KernelDeimos Date: Wed, 22 Jan 2025 17:28:18 -0500 Subject: [PATCH] fix: remove hard-coded token limit for OpenAI --- .../src/modules/puterai/OpenAICompletionService.js | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/backend/src/modules/puterai/OpenAICompletionService.js b/src/backend/src/modules/puterai/OpenAICompletionService.js index 96cd385b..d69837be 100644 --- a/src/backend/src/modules/puterai/OpenAICompletionService.js +++ b/src/backend/src/modules/puterai/OpenAICompletionService.js @@ -354,17 +354,6 @@ class OpenAICompletionService extends BaseService { } } - const max_tokens = 4096 - token_count; - console.log('MAX TOKENS ???', max_tokens); - - const svc_apiErrpr = this.services.get('api-error'); - if ( max_tokens <= 8 ) { - throw svc_apiErrpr.create('max_tokens_exceeded', { - input_tokens: token_count, - max_tokens: 4096 - 8, - }); - } - const completion = await this.openai.chat.completions.create({ user: user_private_uid, messages: messages,