diff --git a/ovos_plugin_manager/templates/agents.py b/ovos_plugin_manager/templates/agents.py index 0f763545..59d2908c 100644 --- a/ovos_plugin_manager/templates/agents.py +++ b/ovos_plugin_manager/templates/agents.py @@ -225,12 +225,16 @@ def continue_chat(self, messages: List[AgentMessage], """ raise NotImplementedError() - def stream_chat(self, messages: List[AgentMessage], + def stream_tokens(self, messages: List[AgentMessage], session_id: str = "default", lang: Optional[str] = None, - units: Optional[str] = None) -> Iterable[AgentMessage]: + units: Optional[str] = None) -> Iterable[str]: """ - Stream back response messages as they are generated. + Stream back response tokens as they are generated. + + Returns partial sentences and is not suitable for direct TTS. + + Once merged the output corresponds to the content of a AgentMessage with MessageRole.ASSISTANT Note: Default implementation yields the full response from continue_chat. @@ -243,9 +247,35 @@ def stream_chat(self, messages: List[AgentMessage], units (str, optional): Unit system. Returns: - Iterable[AgentMessage]: A stream of response messages. + Iterable[str]: A stream of tokens/partial text. """ - yield self.continue_chat(messages, session_id, lang, units) + yield from self.continue_chat(messages, session_id, lang, units).content.split() + + def stream_sentences(self, messages: List[AgentMessage], + session_id: str = "default", + lang: Optional[str] = None, + units: Optional[str] = None) -> Iterable[str]: + """ + Stream back response sentences as they are generated. + + Returns full sentences only, suitable for direct TTS. + + Once merged the output corresponds to the content of a AgentMessage with MessageRole.ASSISTANT + + Note: + Default implementation yields the full response from continue_chat. + Subclasses should override this for real-time sentence streaming. + + Args: + messages (List[AgentMessage]): Full list of messages. + session_id (str): Identifier for the session. + lang (str, optional): Language code. + units (str, optional): Unit system. + + Returns: + Iterable[str]: A stream of complete sentences. + """ + yield from self.continue_chat(messages, session_id, lang, units).content.split("\n") def get_response(self, utterance: str, session_id: str = "default", diff --git a/ovos_plugin_manager/thirdparty/solvers.py b/ovos_plugin_manager/thirdparty/solvers.py index db20a78b..7556b796 100644 --- a/ovos_plugin_manager/thirdparty/solvers.py +++ b/ovos_plugin_manager/thirdparty/solvers.py @@ -107,7 +107,7 @@ def sentence_split(text: str, max_sentences: int = 25) -> List[str]: for t in text.split("\n")])[:max_sentences] except Exception as e: LOG.exception(f"Error in sentence_split: {e}") - return [text] + return text.split("\n") @lru_cache(maxsize=128) def detect_language(self, text: str) -> str: