@@ -175,7 +175,7 @@ def chat(self, query, lc_agent, history):
175175 history_length = len (completion_enc .encode (history ))
176176
177177 max_context_len = max_comp_model_tokens - query_length - MAX_OUTPUT_TOKENS - empty_prompt_length - history_length - 1
178-
178+
179179 print ("max_context_len" , max_context_len )
180180 search_results = completion_enc .decode (completion_enc .encode (search_results )[:max_context_len ])
181181
@@ -197,7 +197,7 @@ def chat(self, query, lc_agent, history):
197197
198198 messages .append ({"role" : "user" , "content" :body .format (history = history , context = search_results , query = query )})
199199
200- print ("search_results" , len (search_results ), search_results )
200+ print ("search_results" , len (completion_enc . encode ( search_results ) ), search_results )
201201 answer = openai_helpers .contact_openai (messages , completion_model = CHOSEN_COMP_MODEL , functions = intent_functions )
202202 answer = answer ['choices' ][0 ]['message' ]['content' ]
203203
@@ -209,11 +209,7 @@ def chat(self, query, lc_agent, history):
209209
210210
211211 def run (self , query , lc_agent = None , history = None ):
212-
213- print ("history" , history )
214- answer = self .chat (query , lc_agent , history )
215- print (answer )
216212
217- # self.update_history (query, answer )
213+ answer = self .chat (query , lc_agent , history )
218214
219215 return answer
0 commit comments