Support chatting with chatGPT web

This commit is contained in:
hibobmaster 2023-05-28 21:38:28 +08:00
parent 5c736cf86d
commit 2e3fb4ae30
Signed by: bobmaster
SSH key fingerprint: SHA256:5ZYgd8fg+PcNZNy4SzcSKu5JtqZyBF8kUhY7/k2viDk
6 changed files with 223 additions and 6 deletions

View file

@ -5,3 +5,5 @@ OPENAI_API_KEY="sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
BING_API_ENDPOINT="http://api:3000/conversation"
BARD_TOKEN="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx."
BING_AUTH_COOKIE="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
PANDORA_API_ENDPOINT="http://127.0.0.1:8008"
PANDORA_API_MODEL="text-davinci-002-render-sha-mobile"

View file

@ -1,11 +1,12 @@
## Introduction
This is a simple Mattermost Bot that uses OpenAI's GPT API and Bing AI and Google Bard to generate responses to user inputs. The bot responds to six types of prompts: `!gpt`, `!chat` and `!bing` and `!pic` and `!bard` and `!help` depending on the first word of the prompt.
This is a simple Mattermost Bot that uses OpenAI's GPT API and Bing AI and Google Bard to generate responses to user inputs. The bot responds to these commands: `!gpt`, `!chat` and `!bing` and `!pic` and `!bard` and `!talk` and `!goon` and `!new` and `!help` depending on the first word of the prompt.
## Feature
1. Support Openai ChatGPT and Bing AI and Google Bard(US only at the moment)
1. Support Openai ChatGPT and Bing AI and Google Bard
2. Support Bing Image Creator
3. [pandora](https://github.com/pengzhile/pandora)
## Installation and Setup
@ -17,6 +18,20 @@ Edit `config.json` or `.env` with proper values
docker compose up -d
```
## Commands
- `!help` help message
- `!gpt + [prompt]` generate a one time response from chatGPT
- `!chat + [prompt]` chat using official chatGPT api with context conversation
- `!bing + [prompt]` chat with Bing AI with context conversation
- `!bard + [prompt]` chat with Google's Bard
- `!pic + [prompt]` generate an image from Bing Image Creator
The following commands need pandora http api: https://github.com/pengzhile/pandora/blob/master/doc/wiki_en.md#http-restful-api
- `!talk + [prompt]` chat using chatGPT web with context conversation
- `!goon` ask chatGPT to complete the missing part from previous conversation
- `!new` start a new converstaion
## Demo
![demo1](https://i.imgur.com/XRAQB4B.jpg)

98
bot.py
View file

@ -11,6 +11,8 @@ from bing import BingBot
from bard import Bardbot
from BingImageGen import ImageGenAsync
from log import getlogger
from pandora import Pandora
import uuid
logger = getlogger()
@ -26,6 +28,8 @@ class Bot:
openai_api_key: Optional[str] = None,
openai_api_endpoint: Optional[str] = None,
bing_api_endpoint: Optional[str] = None,
pandora_api_endpoint: Optional[str] = None,
pandora_api_model: Optional[str] = None,
bard_token: Optional[str] = None,
bing_auth_cookie: Optional[str] = None,
port: int = 443,
@ -112,6 +116,18 @@ class Bot:
"bing_api_endpoint is not provided, !bing command will not work"
)
# initialize pandora
if pandora_api_endpoint is not None:
self.pandora_api_endpoint = pandora_api_endpoint
self.pandora = Pandora(
api_endpoint=pandora_api_endpoint
)
self.pandora_init()
if pandora_api_model is None:
self.pandora_api_model = "text-davinci-002-render-sha-mobile"
else:
self.pandora_api_model = pandora_api_model
self.bard_token = bard_token
# initialize bard
if self.bard_token is not None:
@ -128,24 +144,35 @@ class Bot:
"bing_auth_cookie is not provided, !pic command will not work"
)
# regular expression to match keyword [!gpt {prompt}] [!chat {prompt}] [!bing {prompt}] [!pic {prompt}] [!bard {prompt}]
# regular expression to match keyword
self.gpt_prog = re.compile(r"^\s*!gpt\s*(.+)$")
self.chat_prog = re.compile(r"^\s*!chat\s*(.+)$")
self.bing_prog = re.compile(r"^\s*!bing\s*(.+)$")
self.bard_prog = re.compile(r"^\s*!bard\s*(.+)$")
self.pic_prog = re.compile(r"^\s*!pic\s*(.+)$")
self.help_prog = re.compile(r"^\s*!help\s*.*$")
self.talk_prog = re.compile(r"^\s*!talk\s*(.+)$")
self.goon_prog = re.compile(r"^\s*!goon\s*.*$")
self.new_prog = re.compile(r"^\s*!new\s*.*$")
# close session
def __del__(self) -> None:
self.driver.disconnect()
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.session.close()
def login(self) -> None:
self.driver.login()
def pandora_init(self) -> None:
self.conversation_id = None
self.parent_message_id = str(uuid.uuid4())
self.first_time = True
async def run(self) -> None:
await self.driver.init_websocket(self.websocket_handler)
@ -162,6 +189,7 @@ class Bot:
channel_id = raw_data_dict["channel_id"]
sender_name = response["data"]["sender_name"]
raw_message = raw_data_dict["message"]
try:
asyncio.create_task(
self.message_callback(
@ -217,6 +245,69 @@ class Bot:
logger.error(e, exc_info=True)
raise Exception(e)
if self.pandora_api_endpoint is not None:
# !talk command trigger handler
if self.talk_prog.match(message):
prompt = self.talk_prog.match(message).group(1)
try:
if self.conversation_id is not None:
data = {
"prompt": prompt,
"model": self.pandora_api_model,
"parent_message_id": self.parent_message_id,
"conversation_id": self.conversation_id,
"stream": False,
}
else:
data = {
"prompt": prompt,
"model": self.pandora_api_model,
"parent_message_id": self.parent_message_id,
"stream": False,
}
response = await self.pandora.talk(data)
self.conversation_id = response['conversation_id']
self.parent_message_id = response['message']['id']
content = response['message']['content']['parts'][0]
if self.first_time:
self.first_time = False
data = {
"model": self.pandora_api_model,
"message_id": self.parent_message_id,
}
await self.pandora.gen_title(data, self.conversation_id)
await asyncio.to_thread(
self.send_message, channel_id, f"{content}"
)
except Exception as e:
logger.error(e, exc_info=True)
raise Exception(e)
# !goon command trigger handler
if self.goon_prog.match(message) and self.conversation_id is not None:
try:
data = {
"model": self.pandora_api_model,
"parent_message_id": self.parent_message_id,
"conversation_id": self.conversation_id,
"stream": False,
}
response = await self.pandora.goon(data)
self.conversation_id = response['conversation_id']
self.parent_message_id = response['message']['id']
content = response['message']['content']['parts'][0]
await asyncio.to_thread(
self.send_message, channel_id, f"{content}"
)
except Exception as e:
logger.error(e, exc_info=True)
raise Exception(e)
# !new command trigger handler
if self.new_prog.match(message):
self.pandora_init()
if self.bard_token is not None:
# !bard command trigger handler
if self.bard_prog.match(message):
@ -265,7 +356,7 @@ class Bot:
self.driver.posts.create_post(
options={
"channel_id": channel_id,
"message": message,
"message": message
}
)
@ -321,6 +412,9 @@ class Bot:
+ "!bing [content], chat with context conversation powered by Bing AI\n"
+ "!bard [content], chat with Google's Bard\n"
+ "!pic [prompt], Image generation by Microsoft Bing\n"
+ "!talk [content], talk using chatgpt web\n"
+ "!goon, continue the incomplete conversation\n"
+ "!new, start a new conversation\n"
+ "!help, help message"
)
return help_info

View file

@ -5,5 +5,7 @@
"openai_api_key": "sk-xxxxxxxxxxxxxxxxxxx",
"bing_api_endpoint": "http://api:3000/conversation",
"bard_token": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxx.",
"bing_auth_cookie": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
"bing_auth_cookie": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"pandora_api_endpoint": "http://127.0.0.1:8008",
"pandora_api_model": "text-davinci-002-render-sha-mobile"
}

View file

@ -20,6 +20,8 @@ async def main():
bing_api_endpoint=config.get("bing_api_endpoint"),
bard_token=config.get("bard_token"),
bing_auth_cookie=config.get("bing_auth_cookie"),
pandora_api_endpoint=config.get("pandora_api_endpoint"),
pandora_api_model=config.get("pandora_api_model"),
port=config.get("port"),
timeout=config.get("timeout"),
)
@ -36,6 +38,8 @@ async def main():
bing_api_endpoint=os.environ.get("BING_API_ENDPOINT"),
bard_token=os.environ.get("BARD_TOKEN"),
bing_auth_cookie=os.environ.get("BING_AUTH_COOKIE"),
pandora_api_endpoint=os.environ.get("PANDORA_API_ENDPOINT"),
pandora_api_model=os.environ.get("PANDORA_API_MODEL"),
port=os.environ.get("PORT"),
timeout=os.environ.get("TIMEOUT"),
)

100
pandora.py Normal file
View file

@ -0,0 +1,100 @@
# https://github.com/pengzhile/pandora/blob/master/doc/HTTP-API.md
import uuid
import aiohttp
import asyncio
class Pandora:
def __init__(self, api_endpoint: str) -> None:
self.api_endpoint = api_endpoint.rstrip('/')
self.session = aiohttp.ClientSession()
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.session.close()
async def gen_title(self, data: dict, conversation_id: str) -> None:
"""
data = {
"model": "",
"message_id": "",
}
:param data: dict
:param conversation_id: str
:return: None
"""
api_endpoint = self.api_endpoint + f"/api/conversation/gen_title/{conversation_id}"
async with self.session.post(api_endpoint, json=data) as resp:
return await resp.json()
async def talk(self, data: dict) -> None:
api_endpoint = self.api_endpoint + "/api/conversation/talk"
"""
data = {
"prompt": "",
"model": "",
"parent_message_id": "",
"conversation_id": "", # ignore at the first time
"stream": True,
}
:param data: dict
:return: None
"""
data['message_id'] = str(uuid.uuid4())
async with self.session.post(api_endpoint, json=data) as resp:
return await resp.json()
async def goon(self, data: dict) -> None:
"""
data = {
"model": "",
"parent_message_id": "",
"conversation_id": "",
"stream": True,
}
"""
api_endpoint = self.api_endpoint + "/api/conversation/goon"
async with self.session.post(api_endpoint, json=data) as resp:
return await resp.json()
async def test():
model = "text-davinci-002-render-sha-mobile"
api_endpoint = "http://127.0.0.1:8008"
client = Pandora(api_endpoint)
conversation_id = None
parent_message_id = str(uuid.uuid4())
first_time = True
async with client:
while True:
prompt = input("BobMaster: ")
if conversation_id:
data = {
"prompt": prompt,
"model": model,
"parent_message_id": parent_message_id,
"conversation_id": conversation_id,
"stream": False,
}
else:
data = {
"prompt": prompt,
"model": model,
"parent_message_id": parent_message_id,
"stream": False,
}
response = await client.talk(data)
conversation_id = response['conversation_id']
parent_message_id = response['message']['id']
content = response['message']['content']['parts'][0]
print("ChatGPT: " + content + "\n")
if first_time:
first_time = False
data = {
"model": model,
"message_id": parent_message_id,
}
response = await client.gen_title(data, conversation_id)
if __name__ == '__main__':
asyncio.run(test())