monoai.chat

Chat module is responsible for handling the chat interface and messages history.

1"""
2Chat module is responsible for handling the chat interface and messages history.
3"""
4
5from monoai.chat.chat import Chat
6
7__all__ = ["Chat"]
class Chat:
 23class Chat():
 24    """
 25    Chat class is responsible for handling the chat interface and messages history.
 26    
 27    Examples
 28    --------
 29    Basic usage:
 30    ```
 31    chat = Chat(provider="openai", model="gpt-4o-mini")
 32    response = chat.ask("2+2") # 4
 33    response = chat.ask("+2") # 6
 34    ```    
 35
 36    With history:
 37    ```
 38
 39    # Create a new chat
 40    chat = Chat(provider="openai", model="gpt-4o-mini", history_type="json")
 41    print(chat.chat_id) # 8cc2bfa3-e9a0-4b82-b46e-3376cd220dd3
 42    response = chat.ask("Hello! I'm Giuseppe") # Hello!
 43
 44    # Load a chat
 45    chat = Chat(provider="openai", model="gpt-4o-mini", history_type="json", chat_id="8cc2bfa3-e9a0-4b82-b46e-3376cd220dd3")
 46    response = chat.ask("What's my name?") # Your name is Giuseppe
 47    ```
 48
 49    With history summarizer:
 50
 51    ```
 52    chat = Chat(provider="openai", 
 53                model="gpt-4o-mini", 
 54                history_type="json", 
 55                history_summarizer_provider="openai", 
 56                history_summarizer_model="gpt-4o-mini", 
 57                history_summarizer_max_tokens=100)
 58                
 59    response = chat.ask("Hello! I'm Giuseppe") # Hello!
 60    response = chat.ask("What's my name?") # Your name is Giuseppe
 61    ```
 62    """
 63
 64    _HISTORY_MAP = {
 65        "json": JSONHistory,
 66        "sqlite": SQLiteHistory,
 67        "mongodb": MongoDBHistory
 68    }
 69
 70    def __init__(self, 
 71                 provider: str, 
 72                 model: str, 
 73                 system_prompt: Optional[Union[Prompt, str]] = None,
 74                 max_tokens: Optional[int] = None,
 75                 history_type: Union[str, BaseHistory] = "json", 
 76                 history_last_n: Optional[int] = None,
 77                 history_path: Optional[str] = None,
 78                 history_summarizer_provider: Optional[str] = None, 
 79                 history_summarizer_model: Optional[str] = None,
 80                 history_summarizer_max_tokens: Optional[int] = None,
 81                 chat_id: Optional[str] = None) -> None:
 82
 83        """
 84        Initialize a new Chat instance.
 85
 86        Parameters
 87        ----------
 88        provider : str
 89            Name of the provider (e.g., 'openai', 'anthropic')
 90        model : str
 91            Name of the model (e.g., 'gpt-4', 'claude-3')
 92        system_prompt : str | Prompt, optional
 93            System prompt or Prompt object
 94        max_tokens : int, optional
 95            Maximum number of tokens for each request
 96        history_type : str | BaseHistory, optional
 97            The type of history to use for the chat.
 98        history_last_n : int, optional
 99            The last n messages to keep in the history.
100        history_path : str, optional
101            The path to the history.
102        history_summarizer_provider : str, optional
103            The provider of the history summarizer.
104        history_summarizer_model : str, optional
105            The model of the history summarizer.
106        history_summarizer_max_tokens : int, optional
107            The maximum number of tokens for the history summarizer.
108        chat_id : str, optional
109            The id of the chat to load, if not provided a new chat will be created
110
111        Raises
112        ------
113        ChatError
114            If invalid parameters are provided or initialization fails
115        """
116        try:
117            self._validate_parameters(provider, model, max_tokens, history_type)
118            
119            self._model = f"{provider}/{model}"
120            self._max_tokens = max_tokens
121            self._provider = provider
122            self._model_name = model
123            
124            load_key(provider)
125            self._history_summarizer = None
126
127            # Initialize history
128            self._initialize_history(history_type, history_last_n, history_path)
129            
130            # Initialize history summarizer
131            self._initialize_history_summarizer(
132                history_summarizer_provider, 
133                history_summarizer_model, 
134                history_summarizer_max_tokens
135            )
136
137            # Process system prompt
138            processed_system_prompt = self._process_system_prompt(system_prompt)
139
140            # Initialize chat
141            if chat_id is None:
142                self.chat_id = self._history.new(processed_system_prompt)
143            else:
144                self.chat_id = chat_id
145                
146        except Exception as e:
147            logger.error(f"Failed to initialize Chat: {e}")
148            raise ChatError(f"Chat initialization failed: {e}")
149
150    def _validate_parameters(self, provider: str, model: str, max_tokens: Optional[int], history_type: Union[str, BaseHistory]) -> None:
151        """Validate input parameters."""
152        if not provider or not isinstance(provider, str):
153            raise ChatError("Provider must be a non-empty string")
154        
155        if not model or not isinstance(model, str):
156            raise ChatError("Model must be a non-empty string")
157            
158        if max_tokens is not None and (not isinstance(max_tokens, int) or max_tokens <= 0):
159            raise ChatError("max_tokens must be a positive integer")
160            
161        if isinstance(history_type, str) and history_type not in self._HISTORY_MAP:
162            raise ChatError(f"Unsupported history type: {history_type}. Supported types: {list(self._HISTORY_MAP.keys())}")
163
164    def _initialize_history(self, history_type: Union[str, BaseHistory], history_last_n: Optional[int], history_path: Optional[str]) -> None:
165        """Initialize the history system."""
166        try:
167            if isinstance(history_type, str):
168                self._history = self._HISTORY_MAP[history_type](last_n=history_last_n, db_path=history_path)
169            else:
170                self._history = history_type
171        except Exception as e:
172            raise ChatError(f"Failed to initialize history: {e}")
173
174    def _initialize_history_summarizer(self, provider: Optional[str], model: Optional[str], max_tokens: Optional[int]) -> None:
175        """Initialize the history summarizer."""
176        if provider is not None and model is not None:
177            try:
178                self._history_summarizer = HistorySummarizer(
179                    Model(provider=provider, model=model, max_tokens=max_tokens)
180                )
181            except Exception as e:
182                logger.warning(f"Failed to initialize history summarizer: {e}")
183
184    def _process_system_prompt(self, system_prompt: Optional[Union[Prompt, str]]) -> str:
185        """Process and load the system prompt."""
186        try:
187            if system_prompt is None:
188                return self._load_default_system_prompt()
189            elif isinstance(system_prompt, str) and system_prompt.endswith(".prompt"):
190                return self._load_prompt_file(system_prompt)
191            elif isinstance(system_prompt, Prompt):
192                return str(system_prompt)
193            else:
194                return system_prompt
195        except Exception as e:
196            logger.warning(f"Failed to process system prompt: {e}")
197            return ""
198
199    def _load_default_system_prompt(self) -> str:
200        """Load the default system prompt from file."""
201        try:
202            prompt_path = Conf()["prompts_path"]
203            system_prompt_path = Path(prompt_path) / "system.prompt"
204            
205            if system_prompt_path.exists():
206                with open(system_prompt_path, "r", encoding="utf-8") as f:
207                    return f.read()
208            else:
209                logger.info("Default system prompt file not found, using empty prompt")
210                return ""
211        except Exception as e:
212            logger.warning(f"Failed to load default system prompt: {e}")
213            return ""
214
215    def _load_prompt_file(self, prompt_file: str) -> str:
216        """Load a prompt from a file."""
217        try:
218            prompt_path = Conf()["prompts_path"]
219            full_path = Path(prompt_path) / prompt_file
220            
221            if not full_path.exists():
222                raise FileNotFoundError(f"Prompt file not found: {full_path}")
223                
224            with open(full_path, "r", encoding="utf-8") as f:
225                return f.read()
226        except Exception as e:
227            raise ChatError(f"Failed to load prompt file {prompt_file}: {e}")
228
229    def _process_file_attachment(self, prompt: str, file: Optional[Union[str, bytes]], file_type: Optional[str]) -> Union[str, List[Dict[str, Any]]]:
230        """Process file attachment and return modified prompt."""
231        if file is None:
232            return prompt
233            
234        try:
235            # Determine file type
236            if file_type is None and isinstance(file, str):
237                file_type = file.split(".")[-1].lower()
238                
239            if not file_type:
240                raise ChatError("File type could not be determined")
241                
242            conf = Conf()
243            
244            # Handle text files
245            if file_type in conf["supported_files"]["text"]:
246                return self._process_text_file(prompt, file)
247            # Handle image files
248            elif file_type in conf["supported_files"]["image"]:
249                return self._process_image_file(prompt, file)
250            else:
251                raise ChatError(f"Unsupported file type: {file_type}")
252                
253        except Exception as e:
254            raise ChatError(f"Failed to process file attachment: {e}")
255
256    def _process_text_file(self, prompt: str, file: Union[str, bytes]) -> str:
257        """Process text file attachment."""
258        try:
259            if isinstance(file, str):
260                # Handle as file path or URL
261                if os.path.exists(file):
262                    with open(file, "r", encoding="utf-8") as f:
263                        file_content = f.read()
264                else:
265                    # Assume it's a URL or remote file
266                    file_content = file
267            else:
268                # Handle as bytes
269                file_content = file.decode("utf-8")
270                
271            return prompt + Conf()["default_prompt"]["file"] + file_content
272            
273        except Exception as e:
274            raise ChatError(f"Failed to process text file: {e}")
275
276    def _process_image_file(self, prompt: str, file: Union[str, bytes]) -> List[Dict[str, Any]]:
277        """Process image file attachment."""
278        try:
279            if isinstance(file, str):
280                # Handle as file path or URL
281                if os.path.exists(file):
282                    # Convert local file to base64
283                    with open(file, "rb") as f:
284                        file_bytes = f.read()
285                    image_url = f"data:image/jpeg;base64,{base64.b64encode(file_bytes).decode('utf-8')}"
286                else:
287                    # Assume it's already a URL
288                    image_url = file
289            else:
290                # Handle as bytes
291                image_url = f"data:image/jpeg;base64,{base64.b64encode(file).decode('utf-8')}"
292                
293            return [
294                {"type": "text", "text": prompt},
295                {"type": "image_url", "image_url": image_url}
296            ]
297            
298        except Exception as e:
299            raise ChatError(f"Failed to process image file: {e}")
300
301    def _prepare_messages(self, prompt: Union[str, List[Dict[str, Any]]]) -> List[Dict[str, Any]]:
302        """Prepare messages for the API call."""
303        try:
304            messages = self._history.load(self.chat_id)
305            messages = [{'role': d.get('role'), 'content': d.get('content')} for d in messages]
306
307            # Add user message
308            if isinstance(prompt, str):
309                messages.append({"role": "user", "content": prompt})
310            else:
311                # Handle multimodal content
312                messages.append({"role": "user", "content": prompt})
313
314            # Apply history summarization if enabled
315            if self._history_summarizer is not None:
316                return self._apply_history_summarization(messages)
317            else:
318                return messages
319                
320        except Exception as e:
321            raise ChatError(f"Failed to prepare messages: {e}")
322
323    def _apply_history_summarization(self, messages: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
324        """Apply history summarization to messages."""
325        try:
326            summarized = self._history_summarizer.summarize(messages)
327            ask_messages = [messages[0], messages[-1]]
328            ask_messages[0]["content"] += Conf()["default_prompt"]["summary"] + summarized
329            return ask_messages
330        except Exception as e:
331            logger.warning(f"History summarization failed, using full history: {e}")
332            return messages
333
334    def _store_messages(self, messages: List[Dict[str, Any]], response_content: str) -> None:
335        """Store messages in history."""
336        try:
337            messages.append({"role": "assistant", "content": response_content})
338            self._history.store(self.chat_id, messages[-2:])
339        except Exception as e:
340            logger.error(f"Failed to store messages: {e}")
341
342    def ask(self, prompt: str, file: Optional[Union[str, bytes]] = None, file_type: Optional[str] = None, return_history: bool = False) -> Union[str, List[Dict[str, Any]]]:
343        """
344        Ask the model a question.
345
346        Parameters
347        ----------
348        prompt : str
349            The question to ask the model
350        file : str | bytes, optional
351            The file to attach to the message, if it's a string it will be treated as a local path or remote url to a file
352        file_type : str, optional
353            The type of the file
354        return_history : bool, optional
355            Whether to return the full history of the chat or only the response
356
357        Returns
358        -------
359        str | List[Dict[str, Any]]
360            The response from the model or the full history if return_history is True
361
362        Raises
363        ------
364        ChatError
365            If the request fails or parameters are invalid
366        """
367        try:
368            # Process file attachment
369            processed_prompt = self._process_file_attachment(prompt, file, file_type)
370            
371            # Prepare messages
372            messages = self._prepare_messages(processed_prompt)
373            
374            # Make API call
375            response = completion(
376                model=self._model,
377                messages=messages,
378                max_tokens=self._max_tokens
379            )
380            
381            response_content = response["choices"][0]["message"]["content"]
382            
383            # Store messages
384            self._store_messages(messages, response_content)
385            
386            if return_history:
387                return messages
388            else:
389                return response_content
390                
391        except Exception as e:
392            logger.error(f"Ask request failed: {e}")
393            raise ChatError(f"Ask request failed: {e}")
394
395    async def ask_stream(self, prompt: str, file: Optional[Union[str, bytes]] = None, file_type: Optional[str] = None) -> AsyncGenerator[str, None]:
396        """
397        Ask the model a question and stream the response.
398
399        Parameters
400        ----------
401        prompt : str
402            The question to ask the model
403        file : str | bytes, optional
404            The file to attach to the message
405
406        Yields
407        ------
408        str
409            JSON string containing streaming response chunks
410
411        Raises
412        ------
413        ChatError
414            If the request fails or parameters are invalid
415        """
416        try:
417            # Process file attachment
418            processed_prompt = self._process_file_attachment(prompt, file, file_type)
419            
420            # Prepare messages
421            messages = self._prepare_messages(processed_prompt)
422            
423            # Make streaming API call
424            response = await acompletion(
425                model=self._model,
426                messages=messages,
427                max_tokens=self._max_tokens,
428                stream=True
429            )
430
431            response_text = ""
432            async for chunk in response: 
433                part = chunk["choices"][0]["delta"]["content"] or ""
434                response_text += part
435                yield json.dumps({"answer": part})
436
437            # Store messages
438            self._store_messages(messages, response_text)
439            
440        except Exception as e:
441            logger.error(f"Stream request failed: {e}")
442            raise ChatError(f"Stream request failed: {e}")
443
444    async def ask_async(self, prompt: str, file: Optional[Union[str, bytes]] = None, file_type: Optional[str] = None) -> AsyncGenerator[str, None]:
445        """
446        Ask the model a question and stream the response asynchronously.
447
448        Parameters
449        ----------
450        prompt : str
451            The question to ask the model
452        file : str | bytes, optional
453            The file to attach to the message
454        file_type : str, optional
455            The type of the file
456
457        Yields
458        ------
459        str
460            Response chunks as strings
461
462        Raises
463        ------
464        ChatError
465            If the request fails or parameters are invalid
466        """
467        try:
468            # Process file attachment
469            processed_prompt = self._process_file_attachment(prompt, file, file_type)
470            
471            # Prepare messages
472            messages = self._prepare_messages(processed_prompt)
473            
474            # Make streaming API call
475            response = await acompletion(
476                model=self._model,
477                messages=messages,
478                stream=True,
479                max_tokens=self._max_tokens
480            )
481
482            response_text = ""
483            async for part in response:
484                content = part["choices"][0]["delta"]["content"] or ""
485                response_text += content
486                yield content
487                
488            # Store messages
489            self._store_messages(messages, response_text)
490            
491        except Exception as e:
492            logger.error(f"Async request failed: {e}")
493            raise ChatError(f"Async request failed: {e}")
494
495    def get_chat_info(self) -> Dict[str, Any]:
496        """
497        Get information about the current chat.
498
499        Returns
500        -------
501        Dict[str, Any]
502            Dictionary containing chat information
503        """
504        return {
505            "chat_id": self.chat_id,
506            "provider": self._provider,
507            "model": self._model_name,
508            "max_tokens": self._max_tokens,
509            "has_history_summarizer": self._history_summarizer is not None
510        }
511
512    def clear_history(self) -> None:
513        """
514        Clear the chat history.
515
516        Raises
517        ------
518        ChatError
519            If clearing history fails
520        """
521        try:
522            self._history.clear(self.chat_id)
523            logger.info(f"Cleared history for chat {self.chat_id}")
524        except Exception as e:
525            logger.error(f"Failed to clear history: {e}")
526            raise ChatError(f"Failed to clear history: {e}")

Chat class is responsible for handling the chat interface and messages history.

Examples

Basic usage:

chat = Chat(provider="openai", model="gpt-4o-mini")
response = chat.ask("2+2") # 4
response = chat.ask("+2") # 6

With history:


# Create a new chat
chat = Chat(provider="openai", model="gpt-4o-mini", history_type="json")
print(chat.chat_id) # 8cc2bfa3-e9a0-4b82-b46e-3376cd220dd3
response = chat.ask("Hello! I'm Giuseppe") # Hello!

# Load a chat
chat = Chat(provider="openai", model="gpt-4o-mini", history_type="json", chat_id="8cc2bfa3-e9a0-4b82-b46e-3376cd220dd3")
response = chat.ask("What's my name?") # Your name is Giuseppe

With history summarizer:

chat = Chat(provider="openai", 
            model="gpt-4o-mini", 
            history_type="json", 
            history_summarizer_provider="openai", 
            history_summarizer_model="gpt-4o-mini", 
            history_summarizer_max_tokens=100)

response = chat.ask("Hello! I'm Giuseppe") # Hello!
response = chat.ask("What's my name?") # Your name is Giuseppe
Chat( provider: str, model: str, system_prompt: Union[monoai.prompts.Prompt, str, NoneType] = None, max_tokens: Optional[int] = None, history_type: Union[str, monoai.chat.history.BaseHistory] = 'json', history_last_n: Optional[int] = None, history_path: Optional[str] = None, history_summarizer_provider: Optional[str] = None, history_summarizer_model: Optional[str] = None, history_summarizer_max_tokens: Optional[int] = None, chat_id: Optional[str] = None)
 70    def __init__(self, 
 71                 provider: str, 
 72                 model: str, 
 73                 system_prompt: Optional[Union[Prompt, str]] = None,
 74                 max_tokens: Optional[int] = None,
 75                 history_type: Union[str, BaseHistory] = "json", 
 76                 history_last_n: Optional[int] = None,
 77                 history_path: Optional[str] = None,
 78                 history_summarizer_provider: Optional[str] = None, 
 79                 history_summarizer_model: Optional[str] = None,
 80                 history_summarizer_max_tokens: Optional[int] = None,
 81                 chat_id: Optional[str] = None) -> None:
 82
 83        """
 84        Initialize a new Chat instance.
 85
 86        Parameters
 87        ----------
 88        provider : str
 89            Name of the provider (e.g., 'openai', 'anthropic')
 90        model : str
 91            Name of the model (e.g., 'gpt-4', 'claude-3')
 92        system_prompt : str | Prompt, optional
 93            System prompt or Prompt object
 94        max_tokens : int, optional
 95            Maximum number of tokens for each request
 96        history_type : str | BaseHistory, optional
 97            The type of history to use for the chat.
 98        history_last_n : int, optional
 99            The last n messages to keep in the history.
100        history_path : str, optional
101            The path to the history.
102        history_summarizer_provider : str, optional
103            The provider of the history summarizer.
104        history_summarizer_model : str, optional
105            The model of the history summarizer.
106        history_summarizer_max_tokens : int, optional
107            The maximum number of tokens for the history summarizer.
108        chat_id : str, optional
109            The id of the chat to load, if not provided a new chat will be created
110
111        Raises
112        ------
113        ChatError
114            If invalid parameters are provided or initialization fails
115        """
116        try:
117            self._validate_parameters(provider, model, max_tokens, history_type)
118            
119            self._model = f"{provider}/{model}"
120            self._max_tokens = max_tokens
121            self._provider = provider
122            self._model_name = model
123            
124            load_key(provider)
125            self._history_summarizer = None
126
127            # Initialize history
128            self._initialize_history(history_type, history_last_n, history_path)
129            
130            # Initialize history summarizer
131            self._initialize_history_summarizer(
132                history_summarizer_provider, 
133                history_summarizer_model, 
134                history_summarizer_max_tokens
135            )
136
137            # Process system prompt
138            processed_system_prompt = self._process_system_prompt(system_prompt)
139
140            # Initialize chat
141            if chat_id is None:
142                self.chat_id = self._history.new(processed_system_prompt)
143            else:
144                self.chat_id = chat_id
145                
146        except Exception as e:
147            logger.error(f"Failed to initialize Chat: {e}")
148            raise ChatError(f"Chat initialization failed: {e}")

Initialize a new Chat instance.

Parameters
  • provider (str): Name of the provider (e.g., 'openai', 'anthropic')
  • model (str): Name of the model (e.g., 'gpt-4', 'claude-3')
  • system_prompt (str | Prompt, optional): System prompt or Prompt object
  • max_tokens (int, optional): Maximum number of tokens for each request
  • history_type (str | BaseHistory, optional): The type of history to use for the chat.
  • history_last_n (int, optional): The last n messages to keep in the history.
  • history_path (str, optional): The path to the history.
  • history_summarizer_provider (str, optional): The provider of the history summarizer.
  • history_summarizer_model (str, optional): The model of the history summarizer.
  • history_summarizer_max_tokens (int, optional): The maximum number of tokens for the history summarizer.
  • chat_id (str, optional): The id of the chat to load, if not provided a new chat will be created
Raises
  • ChatError: If invalid parameters are provided or initialization fails
def ask( self, prompt: str, file: Union[str, bytes, NoneType] = None, file_type: Optional[str] = None, return_history: bool = False) -> Union[str, List[Dict[str, Any]]]:
342    def ask(self, prompt: str, file: Optional[Union[str, bytes]] = None, file_type: Optional[str] = None, return_history: bool = False) -> Union[str, List[Dict[str, Any]]]:
343        """
344        Ask the model a question.
345
346        Parameters
347        ----------
348        prompt : str
349            The question to ask the model
350        file : str | bytes, optional
351            The file to attach to the message, if it's a string it will be treated as a local path or remote url to a file
352        file_type : str, optional
353            The type of the file
354        return_history : bool, optional
355            Whether to return the full history of the chat or only the response
356
357        Returns
358        -------
359        str | List[Dict[str, Any]]
360            The response from the model or the full history if return_history is True
361
362        Raises
363        ------
364        ChatError
365            If the request fails or parameters are invalid
366        """
367        try:
368            # Process file attachment
369            processed_prompt = self._process_file_attachment(prompt, file, file_type)
370            
371            # Prepare messages
372            messages = self._prepare_messages(processed_prompt)
373            
374            # Make API call
375            response = completion(
376                model=self._model,
377                messages=messages,
378                max_tokens=self._max_tokens
379            )
380            
381            response_content = response["choices"][0]["message"]["content"]
382            
383            # Store messages
384            self._store_messages(messages, response_content)
385            
386            if return_history:
387                return messages
388            else:
389                return response_content
390                
391        except Exception as e:
392            logger.error(f"Ask request failed: {e}")
393            raise ChatError(f"Ask request failed: {e}")

Ask the model a question.

Parameters
  • prompt (str): The question to ask the model
  • file (str | bytes, optional): The file to attach to the message, if it's a string it will be treated as a local path or remote url to a file
  • file_type (str, optional): The type of the file
  • return_history (bool, optional): Whether to return the full history of the chat or only the response
Returns
  • str | List[Dict[str, Any]]: The response from the model or the full history if return_history is True
Raises
  • ChatError: If the request fails or parameters are invalid
async def ask_stream( self, prompt: str, file: Union[str, bytes, NoneType] = None, file_type: Optional[str] = None) -> AsyncGenerator[str, NoneType]:
395    async def ask_stream(self, prompt: str, file: Optional[Union[str, bytes]] = None, file_type: Optional[str] = None) -> AsyncGenerator[str, None]:
396        """
397        Ask the model a question and stream the response.
398
399        Parameters
400        ----------
401        prompt : str
402            The question to ask the model
403        file : str | bytes, optional
404            The file to attach to the message
405
406        Yields
407        ------
408        str
409            JSON string containing streaming response chunks
410
411        Raises
412        ------
413        ChatError
414            If the request fails or parameters are invalid
415        """
416        try:
417            # Process file attachment
418            processed_prompt = self._process_file_attachment(prompt, file, file_type)
419            
420            # Prepare messages
421            messages = self._prepare_messages(processed_prompt)
422            
423            # Make streaming API call
424            response = await acompletion(
425                model=self._model,
426                messages=messages,
427                max_tokens=self._max_tokens,
428                stream=True
429            )
430
431            response_text = ""
432            async for chunk in response: 
433                part = chunk["choices"][0]["delta"]["content"] or ""
434                response_text += part
435                yield json.dumps({"answer": part})
436
437            # Store messages
438            self._store_messages(messages, response_text)
439            
440        except Exception as e:
441            logger.error(f"Stream request failed: {e}")
442            raise ChatError(f"Stream request failed: {e}")

Ask the model a question and stream the response.

Parameters
  • prompt (str): The question to ask the model
  • file (str | bytes, optional): The file to attach to the message
Yields
  • str: JSON string containing streaming response chunks
Raises
  • ChatError: If the request fails or parameters are invalid
async def ask_async( self, prompt: str, file: Union[str, bytes, NoneType] = None, file_type: Optional[str] = None) -> AsyncGenerator[str, NoneType]:
444    async def ask_async(self, prompt: str, file: Optional[Union[str, bytes]] = None, file_type: Optional[str] = None) -> AsyncGenerator[str, None]:
445        """
446        Ask the model a question and stream the response asynchronously.
447
448        Parameters
449        ----------
450        prompt : str
451            The question to ask the model
452        file : str | bytes, optional
453            The file to attach to the message
454        file_type : str, optional
455            The type of the file
456
457        Yields
458        ------
459        str
460            Response chunks as strings
461
462        Raises
463        ------
464        ChatError
465            If the request fails or parameters are invalid
466        """
467        try:
468            # Process file attachment
469            processed_prompt = self._process_file_attachment(prompt, file, file_type)
470            
471            # Prepare messages
472            messages = self._prepare_messages(processed_prompt)
473            
474            # Make streaming API call
475            response = await acompletion(
476                model=self._model,
477                messages=messages,
478                stream=True,
479                max_tokens=self._max_tokens
480            )
481
482            response_text = ""
483            async for part in response:
484                content = part["choices"][0]["delta"]["content"] or ""
485                response_text += content
486                yield content
487                
488            # Store messages
489            self._store_messages(messages, response_text)
490            
491        except Exception as e:
492            logger.error(f"Async request failed: {e}")
493            raise ChatError(f"Async request failed: {e}")

Ask the model a question and stream the response asynchronously.

Parameters
  • prompt (str): The question to ask the model
  • file (str | bytes, optional): The file to attach to the message
  • file_type (str, optional): The type of the file
Yields
  • str: Response chunks as strings
Raises
  • ChatError: If the request fails or parameters are invalid
def get_chat_info(self) -> Dict[str, Any]:
495    def get_chat_info(self) -> Dict[str, Any]:
496        """
497        Get information about the current chat.
498
499        Returns
500        -------
501        Dict[str, Any]
502            Dictionary containing chat information
503        """
504        return {
505            "chat_id": self.chat_id,
506            "provider": self._provider,
507            "model": self._model_name,
508            "max_tokens": self._max_tokens,
509            "has_history_summarizer": self._history_summarizer is not None
510        }

Get information about the current chat.

Returns
  • Dict[str, Any]: Dictionary containing chat information
def clear_history(self) -> None:
512    def clear_history(self) -> None:
513        """
514        Clear the chat history.
515
516        Raises
517        ------
518        ChatError
519            If clearing history fails
520        """
521        try:
522            self._history.clear(self.chat_id)
523            logger.info(f"Cleared history for chat {self.chat_id}")
524        except Exception as e:
525            logger.error(f"Failed to clear history: {e}")
526            raise ChatError(f"Failed to clear history: {e}")

Clear the chat history.

Raises
  • ChatError: If clearing history fails