Skip to content

Commit

Permalink
prep code for langchain interrupt streaming fix
Browse files Browse the repository at this point in the history
  • Loading branch information
thehunmonkgroup committed May 9, 2023
1 parent b22739a commit a15848c
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions chatgpt_wrapper/core/backend.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
from abc import ABC, abstractmethod
from typing import Any

# TODO: Uncomment after https://github.com/hwchase17/langchain/pull/4403 is fixed.
# from langchain.callbacks.manager import CallbackManager, StreamInterruption
# TODO: Remove after https://github.com/hwchase17/langchain/pull/4403 is fixed.
from langchain.callbacks.manager import CallbackManager
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler

Expand All @@ -22,6 +25,9 @@ def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
util.print_status_message(False, "\n\nWARNING:\nStream interruption on the API backend is not currently working properly, and may not properly store information on an interrupted stream.\nIf you'd like to help fix this error, see https://github.com/mmabrouk/chatgpt-wrapper/issues/274")
message = "Request to interrupt streaming"
backend.log.info(message)
# TODO: Uncomment after https://github.com/hwchase17/langchain/pull/4403 is fixed.
# raise StreamInterruption(message)
# TODO: Remove after https://github.com/hwchase17/langchain/pull/4403 is fixed.
raise EOFError(message)
return InterruptStreamingCallbackHandler()

Expand Down

0 comments on commit a15848c

Please sign in to comment.