From aa5c6bbfd96af9294e6ad8f9d46dd8dcb85cb263 Mon Sep 17 00:00:00 2001 From: 0xMRTT <0xMRTT@proton.me> Date: Sat, 6 May 2023 00:42:49 +0200 Subject: [PATCH] provider(oa): add warning for chunking (#13) --- src/provider/openai.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/provider/openai.py b/src/provider/openai.py index 23d3977..7acd180 100644 --- a/src/provider/openai.py +++ b/src/provider/openai.py @@ -22,6 +22,9 @@ class BaseOpenAIProvider(BavarderProvider): prompt = self.chunk(prompt) try: if isinstance(prompt, list): + self.win.banner.props.title = "Prompt too long, splitting into chunks." + self.win.banner.props.button_label = "" + self.win.banner.set_revealed(True) response = "" for chunk in prompt: response += self.chat.create(