resolve conflicts

This commit is contained in:
Alihan
2023-05-19 02:30:17 +03:00
15 changed files with 156 additions and 187 deletions

3
.gitignore vendored
View File

@@ -5,4 +5,5 @@ pids/
logs/
.ipynb_checkpoints
packages
*.ipynb
*.ipynb
.idea

View File

@@ -1,18 +1,23 @@
# Preparations before Flight
## Hardware setup
* turn uadis hotpot on
* plugin xiaomi repeater "_plus" to powers source
* connect rpi to power source
* wait for boot and ams vpn connection
* until connected devices @uadis increment +1 with device name "uad"
* manual setup:
* ssh into
* cd tello-commander
* source venv/bin/activate
* ./manage.sh connect-drone
* ifconfig: check if wlan1 is up
* ./manage.sh start-commander
* start drone
* until repeater lights blue without blinking
* ./manage.sh get-dhcp
* ./manage start-talikng
### manual cli-rpi setup:
* ssh into
* cd tello-commander
* source venv/bin/activate
* ./manage.sh connect-drone
* ifconfig: check if wlan1 is up
* ./manage.sh start-commander
* start drone
* until repeater lights blue without blinking
* ./manage.sh get-dhcp
* ./manage start-talikng
### auto chatui-rpi setup:
*

View File

@@ -1,7 +1,10 @@
import ast
import json
import os
from commander.commands import CommandHandler
from loguru import logger
from brain.commands import CommandHandler
class BaseBrain:
@@ -20,13 +23,48 @@ class BaseBrain:
with open(prompt_filepath, "r") as f:
return f.read()
def _is_valid_json(self, answer):
try:
response_json = json.loads(answer)
return True
except ValueError as e:
logger.error(f"chatgpt failed to return json obj: {answer}")
return False
def _gc(self):
self.cmd_prompt = None
self.response = None
def is_emergency(self, input):
if input == "q":
print("##### BASE BRAIN: EMERGENCY STOP DETECTED!!! #####")
msg = "##### BASE BRAIN: EMERGENCY STOP DETECTED!!! #####"
logger.warning(msg)
self.response_to_chatui = msg
self.command_handler.handle({"command": "emergency"})
return True
else:
return False
def listen(self):
self.cmd_prompt = input("\n\nwhat should I do now?\n(enter q for emergency)\n\t")
def listen(self, channel="cli", prompt=None):
if channel == "cli":
self.cmd_prompt = input("\n\nwhat should I do now?\n(enter q for emergency)\n\t")
elif channel == "api":
self.cmd_prompt = prompt
def command(self):
if self._is_valid_json(self.answer):
command = ast.literal_eval(self.answer)
if command == {}:
msg = f"I failed to understand: {command}"
logger.warning(msg)
self.response_to_chatui = msg
else:
msg = f"I will send this command: {command}"
logger.success(msg)
self.response_to_chatui = msg
self.command_handler.handle(command)
else:
msg = f"\tI will skip this:\n {self.answer}"
logger.warning(msg)
self.response_to_chatui = msg
self._gc()

View File

@@ -1,10 +1,9 @@
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import json
import ast
import openai
from loguru import logger
from brain_base import BaseBrain
from settings.config import settings
@@ -13,48 +12,25 @@ from settings.config import settings
class CloudSTTBrain:
def __init__(self):
print("not implemented")
logger.error("not implemented")
class CloudChatBrain(BaseBrain):
def __init__(self):
super().__init__()
openai.api_key = settings.OPENAI_API_KEY
def _is_valid_json(self, answer):
try:
response_json = json.loads(answer)
return True
except ValueError as e:
print(f"chatgpt failed to return json obj: {answer}")
return False
def _gc(self):
self.cmd_prompt = None
self.response = None
def understand(self):
## BURADA TRY:EXCEPT BLOCKU GEREKİR
self.response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
temperature=0.2,
temperature=0.3,
messages=[
{"role": "system", "content": self.sys_prompt},
{"role": "user", "content": self.cmd_prompt}
])
def command(self):
answer = self.response.choices[0].message.content
if self._is_valid_json(answer):
command = ast.literal_eval(answer)
if command == {}:
print(f"I failed to understand: {command}")
else:
print(f"I will send this command: {command}")
self.command_handler.handle(command)
else:
print(f"\tI will skip this:\n {command}")
self._gc()
self.answer = self.response.choices[0].message.content

View File

@@ -5,7 +5,7 @@ brain = CloudChatBrain()
while True:
try:
brain.listen()
brain.listen(channel="cli")
if not brain.is_emergency(brain.cmd_prompt):
brain.understand()
brain.command()

33
brain/service.py Normal file
View File

@@ -0,0 +1,33 @@
import random
import gradio as gr
from loguru import logger
from brain.brain_openai import CloudChatBrain
def brain_commander(prompt, history):
brain.listen(channel="api", prompt=prompt)
brain.understand()
history.append({"role": "user", "content": prompt})
history.append({"role": "assistant", "content": brain.answer})
messages = [(history[i]["content"], history[i+1]["content"]) for i in range(0, len(history)-1, 2)]
brain.command()
return messages, history
brain = CloudChatBrain()
with gr.Blocks() as demo:
chatbot_ui = gr.Chatbot(label="drone flight with chatgpt as copilot")
state = gr.State([])
with gr.Row():
prompt = gr.Textbox(
show_label=True,
label="what should I do now? (enter q for emergency)",
placeholder="Enter flight command and press enter")\
.style(container=True)
prompt.submit(brain_commander, [prompt, state], [chatbot_ui, state])
demo.launch(server_name="0.0.0.0", server_port=8890, debug=True)

View File

@@ -1,34 +0,0 @@
import sys
from fastapi import FastAPI
import uvicorn
from brain_openai import CloudChatBrain
#def start_application():
# app = FastAPI()
# # app.include_router(api_router)
# # brain = CloudChatBrain()
# return app
if __name__ == "__main__":
if len(sys.argv) == 1:
port = 8890
else:
port = int(sys.argv[1])
app = FastAPI()
@app.post("/command/")
def post_command(payload: dict):
prompt = payload.get("prompt")
return {"response": prompt}
#app = start_application()
uvicorn.run(app, host="0.0.0.0", port=port, log_level="debug")

View File

@@ -1,33 +0,0 @@
import random
import gradio as gr
from loguru import logger
def make_completion(history):
return "ok"
def answer(input, history):
history.append({"role": "user", "content": input})
response = random.choice(["How are you?", "I love you", "I'm very hungry"])
history.append({"role": "assistant", "content": response})
messages = [(history[i]["content"], history[i+1]["content"]) for i in range(0, len(history)-1, 2)]
logger.debug(history)
return messages, history
with gr.Blocks() as demo:
chatbot = gr.Chatbot(label="Fly my drone with chatGPT as copilot")
state = gr.State([])
with gr.Row():
prompt = gr.Textbox(
show_label=True,
label="what should I do now?\n(enter q for emergency)",
placeholder="Enter text and press enter")\
.style(container=True)
prompt.submit(answer, [prompt, state], [chatbot, state])
demo.launch(server_name="0.0.0.0", server_port=8890, debug=True)

View File

@@ -1,37 +0,0 @@
import gradio as gr
import requests
conversation = []
def generate_response(prompt, params):
#response = requests.get(f"http://localhost:8000/command/{prompt}").json()
response = f"I got {prompt}"
conversation.append({"prompt": prompt, "response": response})
print(params)
#params["conversation_history"].description = f"Conversation History:\n\n{populate_chat_history(conversation)}"
return f"I got: {prompt}"
def populate_chat_history(conversation):
history = ""
for i, chat in enumerate(conversation):
history += f"User: {chat['prompt']}\nBot: {chat['response']}\n\n"
return history
iface = gr.Interface(
fn=generate_response,
inputs=["text"],
outputs=["text"],
parameters=[
{
"type": "textbox",
"key": "conversation_history",
"label": "Conversation:",
"default": ""
}
],
title="fly my drone by talking",
description="what should I do now?\n(enter q for emergency)"
)
iface.launch(server_name="0.0.0.0", server_port=8890, debug=True)

View File

@@ -35,3 +35,5 @@ cd ..
sudo apt-get install wireguard -y
curl https://sh.rustup.rs -sSf | sh

View File

@@ -5,11 +5,11 @@
######################
## SCRIPT INIT
######################
DRONE_INTERFACE=$(yq '.commander.drone_interface' < settings/admin.yml)
DRONE_WPA_SUPP_CONF=$(yq '.commander.drone_wpa_supp' < settings/admin.yml)
NET_INTERFACE=$(yq '.commander.net_interface' < settings/admin.yml)
NET_WPA_SUPP_CONF=$(yq '.commander.net_wpa_supp' < settings/admin.yml)
#ENV_FOR_DYNACONF=$(yq '.commander.env_for_dynaconf' < settings/admin.yml)
DRONE_INTERFACE=$(yq '.drone_interface' < settings/admin.yml)
DRONE_WPA_SUPP_CONF=$(yq '.drone_wpa_supp' < settings/admin.yml)
NET_INTERFACE=$(yq '.net_interface' < settings/admin.yml)
NET_WPA_SUPP_CONF=$(yq '.net_wpa_supp' < settings/admin.yml)
#ENV_FOR_DYNACONF=$(yq '.env_for_dynaconf' < settings/admin.yml)
pids_dir='./pids'
if [[ ! -d "$pids_dir" ]]; then
@@ -31,17 +31,12 @@ list_wifi_ssid(){
}
connect_using_wpa_supp() {
echo "########## log start: $(date) ##########"
sudo wpa_supplicant -D nl80211 -i $DRONE_INTERFACE -c network/$DRONE_WPA_SUPP_CONF
}
#wait_for_drone() {
#while ! ping -c1 192.168.10.1 &>/dev/null; do
# echo "Drone is offline. Waiting.."; sleep 2
#done
#echo "Drone is available, can ask for dhcp"; sleep 1
#}
get_dhcp_ip () {
echo "########## log start: $(date) ##########"
while true;
do
sudo dhclient $DRONE_INTERFACE
@@ -58,23 +53,29 @@ get_dhcp_ip () {
}
start_jupyter() {
echo "########## log start: $(date) ##########"
venv/bin/python -m jupyter lab --ip='0.0.0.0' --NotebookApp.token='' --NotebookApp.password='' --no-browser --port=8888
}
start_codeserver(){
echo "########## log start: $(date) ##########"
packages/code-server/code-server-4.12.0-linux-amd64/bin/code-server --config /home/uad/misc/tello-commander/packages/code-server/config.yaml --disable-getting-started-override --disable-workspace-trust --disable-telemetry ./
}
start_commander_service() {
venv/bin/python commander/commander.py $1
echo "########## log start: $(date) ##########"
venv/bin/python commander/service.py $1
}
start_brain_service() {
venv/bin/python brain/service.py $1
start_webui_service() {
echo "########## log start: $(date) ##########"
commander_host=$1
ENV_FOR_DYNACONF=$commander_host venv/bin/python brain/service.py
}
talk_to_drone() {
ENV_FOR_DYNACONF=$1 python brain/cli.py
talk_to_drone_via_cli() {
commander_host=$1
ENV_FOR_DYNACONF=$commander_host python brain/cli.py
}
kill_everything() {
@@ -131,7 +132,6 @@ elif [ "$1" == "start-jupyter" ]; then
jupyter_pid=$!
echo "started jupyter with PID $jupyter_pid"
echo $jupyter_pid > $pids_dir/jupyter_pid.txt
#tail -f logs/jupyter.log
elif [ "$1" == "stop-jupyter" ]; then
jupyter_pid_file="$pids_dir/jupyter_pid.txt"
@@ -145,7 +145,6 @@ elif [ "$1" == "start-cs" ]; then
codeserver_pid=$!
echo "started code server with PID $codeserver_pid"
echo $codeserver_pid > $pids_dir/codeserver_pid.txt
#tail -f logs/codeserver.log
elif [ "$1" == "stop-cs" ]; then
codeserver_pid_file="$pids_dir/codeserver_pid.txt"
@@ -167,16 +166,29 @@ elif [ "$1" == "start-commander" ]; then
commander_pid=$!
echo "started commander with PID $commander_pid"
echo $commander_pid > $pids_dir/commander_pid.txt
#tail -f logs/commander.log
elif [ "$1" == "stop-commander" ]; then
commander_pid_file="$pids_dir/commander_pid.txt"
if [ -f "$commander_pid_file" ]; then
sudo killport 8889
sudo pkill -15 -P $(cat $commander_pid_file)
sudo killport 8889
echo "stopped commander"
fi
elif [ "$1" == "start-webui" ]; then
start_webui_service $2 > logs/webui.log 2>&1 &
webui_pid=$!
echo "started webui service with PID $webui_pid"
echo webui_pid > $pids_dir/webui_pid.txt
elif [ "$1" == "stop-webui" ]; then
webui_pid_file="$pids_dir/webui_pid.txt"
if [ -f "webui_pid_file" ]; then
sudo pkill -15 -P $(cat webui_pid_file)
sudo killport 8890
echo "stopped webui service"
fi
elif [ "$1" == "prepare-flight" ]; then
./manage.sh connect-drone
./manage.sh get-dhcp
@@ -189,8 +201,8 @@ elif [ "$1" == "finish-flight" ]; then
./manage.sh stop-commander
kill_everything
echo "flight finished"
elif [ "$1" == "start-talking" ]; then
talk_to_drone $2
elif [ "$1" == "start-clitalk" ]; then
talk_to_drone_via_cli $2
######################
## INFO
@@ -201,10 +213,11 @@ else
- list-wifis
- connect-/ disconnect-drone
- get-/ kill-dhcp
- start-/ stop-jupyter
- start-/ stop-cs
- start-/ stop-commander [port]
- start-talking tuncel / commander
- start-/ stop-jupyter (8888)
- start-/ stop-cs (8888)
- start-/ stop-commander (8889)
- start-/ stop-webui [commander_host (tuncel / commander)] (8890)
- start-/ stop-clitalk [commander_host (tuncel / commander_remote / commander_local)]
- turn-off
- prepare-/ finish-flight"
fi

View File

@@ -36,7 +36,10 @@ fonttools==4.39.4
fqdn==1.5.1
frozenlist==1.3.3
fsspec==2023.5.0
<<<<<<< HEAD
gpt-json==0.1.4
=======
>>>>>>> b80b5bf96c8b4819f8414c29abcbd6a4bb8af017
gradio==3.31.0
gradio_client==0.2.5
h11==0.14.0
@@ -124,7 +127,10 @@ sniffio==1.3.0
soupsieve==2.4.1
stack-data==0.6.2
starlette==0.26.1
<<<<<<< HEAD
tellopy @ file:///home/uad/misc/tello-commander/packages/TelloPy/dist/tellopy-0.7.0.dev0-py2.py3-none-any.whl
=======
>>>>>>> b80b5bf96c8b4819f8414c29abcbd6a4bb8af017
terminado==0.17.1
tiktoken==0.3.3
tinycss2==1.2.1

View File

@@ -1,9 +1,8 @@
commander:
drone_interface:
wlan1
net_interface:
wlan0
drone_wpa_supp:
wpa_supp_djituad0_plus.conf
net_wpa_supp:
wpa_supp_uadis.conf
drone_interface:
wlan1
net_interface:
wlan0
drone_wpa_supp:
wpa_supp_djituad0_plus.conf
net_wpa_supp:
wpa_supp_uadis.conf