-
-
Notifications
You must be signed in to change notification settings - Fork 4.5k
Standard ChatGPT
def generate_random_hex(length: int = 17) -> str
Generate a random hex string
Arguments:
-
length
int, optional - Length of the hex string. Defaults to 17.
Returns:
-
str
- Random hex string
def random_int(min: int, max: int) -> int
Generate a random integer
Arguments:
-
min
int - Minimum value -
max
int - Maximum value
Returns:
-
int
- Random integer
def logger(is_timed: bool)
Logger decorator
Arguments:
-
is_timed
bool - Whether to include function running time in exit log
Returns:
-
_type_
- decorated function
class Chatbot()
Chatbot class for ChatGPT
@logger(is_timed=True)
def __init__(config: dict[str, str],
conversation_id: str | None = None,
parent_id: str | None = None,
lazy_loading: bool = True,
base_url: str | None = None) -> None
Initialize a chatbot
Arguments:
-
config
dict[str, str] - Login and proxy info. Example: { -
"access_token"
- "<access_token>" -
"proxy"
- "<proxy_url_string>", -
"model"
- "<model_name>", -
"plugin"
- "<plugin_id>", } More details on these are available at https://github.com/acheong08/ChatGPT#configuration -
conversation_id
str | None, optional - Id of the conversation to continue on. Defaults to None. -
parent_id
str | None, optional - Id of the previous response message to continue on. Defaults to None. -
session_client
type, optional - description. Defaults to None.
Raises:
-
Exception
- description
@logger(is_timed=False)
def set_access_token(access_token: str) -> None
Set access token in request header and self.config, then cache it to file.
Arguments:
-
access_token
str - access_token
@logger(is_timed=True)
def login() -> None
Login to OpenAI by email and password
@logger(is_timed=True)
def post_messages(messages: list[dict],
conversation_id: str | None = None,
parent_id: str | None = None,
plugin_ids: list = [],
model: str | None = None,
auto_continue: bool = False,
timeout: float = 360,
**kwargs) -> Generator[dict, None, None]
Ask a question to the chatbot
Arguments:
-
messages
list[dict] - The messages to send -
conversation_id
str | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_id
str | None, optional - UUID for the message to continue on. Defaults to None. -
model
str | None, optional - The model to use. Defaults to None. -
auto_continue
bool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeout
float, optional - Timeout for getting the full response, unit is second. Defaults to 360. -
Yields
- Generator[dict, None, None] - The response from the chatbot -
dict
- { -
"message"
- str, -
"conversation_id"
- str, -
"parent_id"
- str, -
"model"
- str, -
"finish_details"
- str, # "max_tokens" or "stop" -
"end_turn"
- bool, -
"recipient"
- str, -
"citations"
- list[dict], }
@logger(is_timed=True)
def ask(prompt: str,
conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
plugin_ids: list = [],
auto_continue: bool = False,
timeout: float = 360,
**kwargs) -> Generator[dict, None, None]
Ask a question to the chatbot
Arguments:
-
prompt
str - The question -
conversation_id
str, optional - UUID for the conversation to continue on. Defaults to None. -
parent_id
str, optional - UUID for the message to continue on. Defaults to "". -
model
str, optional - The model to use. Defaults to "". -
auto_continue
bool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeout
float, optional - Timeout for getting the full response, unit is second. Defaults to 360. -
Yields
- The response from the chatbot -
dict
- { -
"message"
- str, -
"conversation_id"
- str, -
"parent_id"
- str, -
"model"
- str, -
"finish_details"
- str, # "max_tokens" or "stop" -
"end_turn"
- bool, -
"recipient"
- str, }
@logger(is_timed=True)
def continue_write(conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
auto_continue: bool = False,
timeout: float = 360) -> Generator[dict, None, None]
let the chatbot continue to write.
Arguments:
-
conversation_id
str | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_id
str, optional - UUID for the message to continue on. Defaults to None. -
model
str, optional - The model to use. Defaults to None. -
auto_continue
bool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeout
float, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
-
dict
- { -
"message"
- str, -
"conversation_id"
- str, -
"parent_id"
- str, -
"model"
- str, -
"finish_details"
- str, # "max_tokens" or "stop" -
"end_turn"
- bool, -
"recipient"
- str, }
@logger(is_timed=True)
def get_conversations(offset: int = 0,
limit: int = 20,
encoding: str | None = None) -> list
Get conversations
Arguments:
-
offset
: Integer -
limit
: Integer
@logger(is_timed=True)
def get_msg_history(convo_id: str, encoding: str | None = None) -> list
Get message history
Arguments:
-
id
: UUID of conversation -
encoding
: String
def share_conversation(title: str = None,
convo_id: str = None,
node_id: str = None,
anonymous: bool = True) -> str
Creates a share link to a conversation
Arguments:
-
convo_id
: UUID of conversation -
node_id
: UUID of node -
anonymous
: Boolean -
title
: String Returns: str: A URL to the shared link
@logger(is_timed=True)
def gen_title(convo_id: str, message_id: str) -> str
Generate title for conversation
Arguments:
-
id
: UUID of conversation -
message_id
: UUID of message
@logger(is_timed=True)
def change_title(convo_id: str, title: str) -> None
Change title of conversation
Arguments:
-
id
: UUID of conversation -
title
: String
@logger(is_timed=True)
def delete_conversation(convo_id: str) -> None
Delete conversation
Arguments:
-
id
: UUID of conversation
@logger(is_timed=True)
def clear_conversations() -> None
Delete all conversations
@logger(is_timed=False)
def reset_chat() -> None
Reset the conversation ID and parent ID.
Returns:
None
@logger(is_timed=False)
def rollback_conversation(num: int = 1) -> None
Rollback the conversation.
Arguments:
-
num
: Integer. The number of messages to rollback
Returns:
None
@logger(is_timed=True)
def get_plugins(offset: int = 0, limit: int = 250, status: str = "approved")
Get plugins
Arguments:
-
offset
: Integer. Offset (Only supports 0) -
limit
: Integer. Limit (Only below 250) -
status
: String. Status of plugin (approved)
@logger(is_timed=True)
def install_plugin(plugin_id: str)
Install plugin by ID
Arguments:
-
plugin_id
: String. ID of plugin
@logger(is_timed=True)
def get_unverified_plugin(domain: str, install: bool = True) -> dict
Get unverified plugin by domain
Arguments:
-
domain
: String. Domain of plugin -
install
: Boolean. Install plugin if found
class AsyncChatbot(Chatbot)
Async Chatbot class for ChatGPT
def __init__(config: dict,
conversation_id: str | None = None,
parent_id: str | None = None,
base_url: str | None = None,
lazy_loading: bool = True) -> None
Same as Chatbot class, but with async methods.
async def post_messages(messages: list[dict],
conversation_id: str | None = None,
parent_id: str | None = None,
plugin_ids: list = [],
model: str | None = None,
auto_continue: bool = False,
timeout: float = 360,
**kwargs) -> AsyncGenerator[dict, None]
Post messages to the chatbot
Arguments:
-
messages
list[dict] - the messages to post -
conversation_id
str | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_id
str | None, optional - UUID for the message to continue on. Defaults to None. -
model
str | None, optional - The model to use. Defaults to None. -
auto_continue
bool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeout
float, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
AsyncGenerator[dict, None]: The response from the chatbot {
-
"message"
- str, -
"conversation_id"
- str, -
"parent_id"
- str, -
"model"
- str, -
"finish_details"
- str, -
"end_turn"
- bool, -
"recipient"
- str, -
"citations"
- list[dict], }
async def ask(prompt: str,
conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
plugin_ids: list = [],
auto_continue: bool = False,
timeout: int = 360,
**kwargs) -> AsyncGenerator[dict, None]
Ask a question to the chatbot
Arguments:
-
prompt
str - The question to ask -
conversation_id
str | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_id
str, optional - UUID for the message to continue on. Defaults to "". -
model
str, optional - The model to use. Defaults to "". -
auto_continue
bool, optional - Whether to continue the conversation automatically. Defaults to False. -
timeout
float, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
AsyncGenerator[dict, None]: The response from the chatbot {
-
"message"
- str, -
"conversation_id"
- str, -
"parent_id"
- str, -
"model"
- str, -
"finish_details"
- str, -
"end_turn"
- bool, -
"recipient"
- str, }
async def continue_write(conversation_id: str | None = None,
parent_id: str = "",
model: str = "",
auto_continue: bool = False,
timeout: float = 360) -> AsyncGenerator[dict, None]
let the chatbot continue to write
Arguments:
-
conversation_id
str | None, optional - UUID for the conversation to continue on. Defaults to None. -
parent_id
str, optional - UUID for the message to continue on. Defaults to None. -
model
str, optional - Model to use. Defaults to None. -
auto_continue
bool, optional - Whether to continue writing automatically. Defaults to False. -
timeout
float, optional - Timeout for getting the full response, unit is second. Defaults to 360.
Yields:
AsyncGenerator[dict, None]: The response from the chatbot {
-
"message"
- str, -
"conversation_id"
- str, -
"parent_id"
- str, -
"model"
- str, -
"finish_details"
- str, -
"end_turn"
- bool, -
"recipient"
- str, }
async def get_conversations(offset: int = 0, limit: int = 20) -> list
Get conversations
Arguments:
-
offset
: Integer -
limit
: Integer
async def get_msg_history(convo_id: str,
encoding: str | None = "utf-8") -> dict
Get message history
Arguments:
-
id
: UUID of conversation
async def share_conversation(title: str = None,
convo_id: str = None,
node_id: str = None,
anonymous: bool = True) -> str
Creates a share link to a conversation
Arguments:
-
convo_id
: UUID of conversation -
node_id
: UUID of node Returns: str: A URL to the shared link
async def gen_title(convo_id: str, message_id: str) -> None
Generate title for conversation
async def change_title(convo_id: str, title: str) -> None
Change title of conversation
Arguments:
-
convo_id
: UUID of conversation -
title
: String
async def delete_conversation(convo_id: str) -> None
Delete conversation
Arguments:
-
convo_id
: UUID of conversation
async def clear_conversations() -> None
Delete all conversations
@logger(is_timed=False)
def configure() -> dict
Looks for a config file in the following locations:
@logger(is_timed=False)
def main(config: dict) -> NoReturn
Main function for the chatGPT program.