Queue, Conversations
This commit is contained in:
parent
036ee268dd
commit
54a2236b6c
|
@ -17,6 +17,21 @@
|
||||||
ws = new WebSocket("ws://localhost:8000/");
|
ws = new WebSocket("ws://localhost:8000/");
|
||||||
ws.onopen = function(event) {
|
ws.onopen = function(event) {
|
||||||
console.log("Connected to WebSocket server.");
|
console.log("Connected to WebSocket server.");
|
||||||
|
|
||||||
|
ws.onmessage = function (event) {
|
||||||
|
if (lastMessageElement == null) {
|
||||||
|
lastMessageElement = document.createElement('p');
|
||||||
|
document.getElementById('messages').appendChild(lastMessageElement);
|
||||||
|
}
|
||||||
|
var data = JSON.parse(event.data);
|
||||||
|
if (data.hasOwnProperty('content')) {
|
||||||
|
if (data.type == 'code') {
|
||||||
|
lastMessageElement.innerHTML += '<pre>' + data.content + '</pre>';
|
||||||
|
} else {
|
||||||
|
lastMessageElement.innerHTML += data.content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
ws.onerror = function(error) {
|
ws.onerror = function(error) {
|
||||||
console.log("WebSocket error: ", error);
|
console.log("WebSocket error: ", error);
|
||||||
|
@ -28,13 +43,7 @@
|
||||||
}
|
}
|
||||||
connectWebSocket();
|
connectWebSocket();
|
||||||
var lastMessageElement = null;
|
var lastMessageElement = null;
|
||||||
ws.onmessage = function (event) {
|
|
||||||
if (lastMessageElement == null) {
|
|
||||||
lastMessageElement = document.createElement('p');
|
|
||||||
document.getElementById('messages').appendChild(lastMessageElement);
|
|
||||||
}
|
|
||||||
lastMessageElement.innerHTML += event.data;
|
|
||||||
};
|
|
||||||
function sendMessage(event) {
|
function sendMessage(event) {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
var input = document.getElementById("messageInput");
|
var input = document.getElementById("messageInput");
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
||||||
|
[{"role": "user", "type": "message", "content": "uh"}, {"role": "assistant", "type": "message", "content": "Hello! How can I assist you today?"}, {"role": "user", "type": "message", "content": "hello"}, {"role": "assistant", "type": "message", "content": "Hi there! What can I help you with today? Can we start by planning your day or there's something specific you have in mind?"}]
|
|
@ -14,7 +14,7 @@ import os
|
||||||
import glob
|
import glob
|
||||||
|
|
||||||
def check_queue():
|
def check_queue():
|
||||||
queue_files = glob.glob("/queue/*.json")
|
queue_files = glob.glob("interpreter/queue/*.json")
|
||||||
if queue_files:
|
if queue_files:
|
||||||
with open(queue_files[0], 'r') as file:
|
with open(queue_files[0], 'r') as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
|
@ -24,15 +24,15 @@ def check_queue():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def save_conversation(messages):
|
def save_conversation(messages):
|
||||||
with open('/conversations/user.json', 'w') as file:
|
with open('interpreter/conversations/user.json', 'w') as file:
|
||||||
json.dump(messages, file)
|
json.dump(messages, file)
|
||||||
|
|
||||||
def load_conversation():
|
def load_conversation():
|
||||||
try:
|
try:
|
||||||
with open('/conversations/user.json', 'r') as file:
|
with open('interpreter/conversations/user.json', 'r') as file:
|
||||||
messages = json.load(file)
|
messages = json.load(file)
|
||||||
return messages
|
return messages
|
||||||
except FileNotFoundError:
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def main(interpreter):
|
def main(interpreter):
|
||||||
|
@ -42,55 +42,72 @@ def main(interpreter):
|
||||||
@app.websocket("/")
|
@app.websocket("/")
|
||||||
async def i_test(websocket: WebSocket):
|
async def i_test(websocket: WebSocket):
|
||||||
await websocket.accept()
|
await websocket.accept()
|
||||||
|
data = None
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
data = await websocket.receive_text()
|
# This is the task for waiting for the user to send any message at all.
|
||||||
while data.strip().lower() != "stop": # Stop command
|
task = asyncio.create_task(websocket.receive_text())
|
||||||
task = asyncio.create_task(websocket.receive_text())
|
|
||||||
|
|
||||||
# This would be terrible for production. Just for testing.
|
if data == None: # Data will have stuff in it if we inturrupted it.
|
||||||
try:
|
while True:
|
||||||
data_dict = json.loads(data)
|
# Has the user sent a message?
|
||||||
if set(data_dict.keys()) == {"role", "content", "type"} or set(
|
if task.done():
|
||||||
data_dict.keys()
|
data = task.result()
|
||||||
) == {"role", "content", "type", "format"}:
|
break
|
||||||
data = data_dict
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
for response in interpreter.chat(
|
# Has the queue recieved a message?
|
||||||
message=data, stream=True, display=False
|
|
||||||
):
|
|
||||||
# Check queue
|
|
||||||
queued_message = check_queue()
|
queued_message = check_queue()
|
||||||
if queued_message:
|
if queued_message:
|
||||||
data = queued_message
|
data = queued_message
|
||||||
break
|
break
|
||||||
|
|
||||||
if task.done():
|
|
||||||
data = task.result() # Get the new message
|
|
||||||
break # Break the loop and start processing the new message
|
|
||||||
|
|
||||||
# Send out assistant message chunks
|
# Wait 0.2 seconds
|
||||||
if (
|
await asyncio.sleep(0.2)
|
||||||
response.get("type") == "message"
|
|
||||||
and response["role"] == "assistant"
|
|
||||||
and "content" in response
|
|
||||||
):
|
|
||||||
await websocket.send_text(response["content"])
|
|
||||||
await asyncio.sleep(0.01) # Add a small delay
|
|
||||||
|
|
||||||
# If it just finished sending an assistant message, send a newline. Otherwise it looks weird.
|
|
||||||
if (
|
|
||||||
response.get("type") == "message"
|
|
||||||
and response["role"] == "assistant"
|
|
||||||
and response.get("end") == True
|
|
||||||
):
|
|
||||||
await websocket.send_text("\n")
|
|
||||||
await asyncio.sleep(0.01) # Add a small delay
|
|
||||||
|
|
||||||
if not task.done():
|
### FOR DEV ONLY: SIMULATE LMC MESSAGES
|
||||||
data = (
|
# This lets users simulate any kind of LMC message by passing a JSON into the textbox in index.html.
|
||||||
await task
|
|
||||||
) # Wait for the next message if it hasn't arrived yet
|
try:
|
||||||
|
data_dict = json.loads(data)
|
||||||
|
data = data_dict
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
### CONVERSATION / DISC MANAGEMENT
|
||||||
|
user_message = {"role": "user", "type": "message", "content": data}
|
||||||
|
messages = load_conversation()
|
||||||
|
messages.append(user_message)
|
||||||
|
save_conversation(messages)
|
||||||
|
|
||||||
|
### RESPONDING
|
||||||
|
|
||||||
|
# This is the task for waiting for user inturruptions.
|
||||||
|
task = asyncio.create_task(websocket.receive_text())
|
||||||
|
|
||||||
|
for chunk in interpreter.chat(
|
||||||
|
messages, stream=True, display=True
|
||||||
|
):
|
||||||
|
print(chunk)
|
||||||
|
# Check queue
|
||||||
|
queued_message = check_queue()
|
||||||
|
if queued_message:
|
||||||
|
data = queued_message
|
||||||
|
break
|
||||||
|
|
||||||
|
# Check for new user messages
|
||||||
|
if task.done():
|
||||||
|
data = task.result() # Get the new message
|
||||||
|
break # Break the loop and start processing the new message
|
||||||
|
|
||||||
|
# Send out chunks
|
||||||
|
await websocket.send_json(chunk)
|
||||||
|
await asyncio.sleep(0.01) # Add a small delay
|
||||||
|
|
||||||
|
# If the interpreter just finished sending a message, save it
|
||||||
|
if "end" in chunk:
|
||||||
|
save_conversation(interpreter.messages)
|
||||||
|
data = None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
uvicorn.run(app, host="0.0.0.0", port=8000)
|
|
@ -1,5 +0,0 @@
|
||||||
{
|
|
||||||
"role": "computer",
|
|
||||||
"type": "message",
|
|
||||||
"content": "Your 10:00am alarm has gone off."
|
|
||||||
}
|
|
|
@ -50,7 +50,8 @@ interpreter.system_message = system_message
|
||||||
|
|
||||||
for file in glob.glob('/tools/*.py'):
|
for file in glob.glob('/tools/*.py'):
|
||||||
with open(file, 'r') as f:
|
with open(file, 'r') as f:
|
||||||
interpreter.computer.run("python", f.read())
|
for chunk in interpreter.computer.run("python", f.read()):
|
||||||
|
print(chunk)
|
||||||
|
|
||||||
|
|
||||||
### LLM SETTINGS
|
### LLM SETTINGS
|
||||||
|
|
|
@ -1358,13 +1358,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "1.20.0"
|
version = "1.20.1"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8, !=2.7.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, !=3.7.*"
|
python-versions = ">=3.8, !=2.7.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, !=3.7.*"
|
||||||
files = [
|
files = [
|
||||||
{file = "litellm-1.20.0-py3-none-any.whl", hash = "sha256:c90bb88d30307f67849b9c234de2cfd082ab8a259cacb76079cebba6fa27ce06"},
|
{file = "litellm-1.20.1-py3-none-any.whl", hash = "sha256:83a63c2fde88d3cd11ba963da79ce18c22deb316bf9579fefea86b3116f743ba"},
|
||||||
{file = "litellm-1.20.0.tar.gz", hash = "sha256:91793c455d94c6999942765be2fe86ec5fe85615f110f499464e33bea15b82ac"},
|
{file = "litellm-1.20.1.tar.gz", hash = "sha256:cb14c567187e2e6fa06a396111701dfe5a4bd3a0ccebd8104a23d7b87c97b2c4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
|
|
@ -8,8 +8,8 @@ license = "AGPL"
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.11"
|
python = "^3.11"
|
||||||
open-interpreter = "^0.2.0"
|
open-interpreter = "^0.2.0"
|
||||||
fastapi = "^0.109.0"
|
|
||||||
uvicorn = {extras = ["standard"], version = "^0.27.0"}
|
uvicorn = {extras = ["standard"], version = "^0.27.0"}
|
||||||
|
fastapi = "^0.109.0"
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
|
|
Loading…
Reference in New Issue