Query improvement (#32)
* Testing memory usage * Test * Updating home file * Small fixes * Final fixes * Adding fix for same states * Small changes * User id changes * Config Checker and logging changes
This commit is contained in:
@@ -56,7 +56,7 @@ class ModelExecutor(hass.Hass):
|
||||
|
||||
self.log(f"Initialized rules engine DB", level="INFO")
|
||||
try:
|
||||
db_rules_engine.to_sql("rules_engine", con=con, if_exists="fail")
|
||||
db_rules_engine.to_sql("rules_engine", con=con, if_exists="replace")
|
||||
except:
|
||||
self.log(f"DB already exists. Skipping", level="INFO")
|
||||
|
||||
@@ -97,7 +97,6 @@ class ModelExecutor(hass.Hass):
|
||||
1,
|
||||
2,
|
||||
], " More than 2 matching rules. Please reach out in https://discord.gg/bCM2mX9S for assistance."
|
||||
self.log(f"matching_rule {matching_rule}")
|
||||
rules_state = matching_rule["state"].values
|
||||
|
||||
if len(matching_rule) == 2:
|
||||
@@ -237,7 +236,7 @@ class ModelExecutor(hass.Hass):
|
||||
sensors = tsh_config.sensors
|
||||
actuators = tsh_config.actuators
|
||||
float_sensors = tsh_config.float_sensors
|
||||
devices = actuators + sensors
|
||||
devices = tsh_config.devices
|
||||
now = datetime.datetime.now()
|
||||
|
||||
if entity in devices:
|
||||
@@ -262,15 +261,6 @@ class ModelExecutor(hass.Hass):
|
||||
df_sen_states[sensor] = true_state
|
||||
|
||||
last_states = self.last_states
|
||||
for device in devices:
|
||||
last_state = last_states[device]["state"]
|
||||
if device not in float_sensors:
|
||||
if f"last_state_{device}_{last_state}" in df_sen_states.columns:
|
||||
df_sen_states[f"last_state_{device}_{last_state}"] = 1
|
||||
elif device in float_sensors:
|
||||
if (last_state) in df_sen_states.columns:
|
||||
df_sen_states[f"last_state_{device}"] = last_state
|
||||
|
||||
all_states = self.get_state()
|
||||
|
||||
# Extract current date
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{
|
||||
"username": "XXXX",
|
||||
"password": "XXXX",
|
||||
"actuactors_id": [
|
||||
"light.bathroom_lights",
|
||||
"light.corridor_lights",
|
||||
"light.hallway_lights",
|
||||
"switch.livingroom_entrance_switch_right",
|
||||
"switch.livingroom_entrance_switch_center",
|
||||
"switch.livingroom_entrance_switch_left",
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# !/bin/bash
|
||||
echo "Starting to parse DB data"
|
||||
python3 -m thesillyhome.model_creator.main
|
||||
|
||||
|
||||
echo "Starting Appdaemon"
|
||||
eval "echo \"$(</thesillyhome_src/appdaemon/appdaemon.yaml)\"" > /thesillyhome_src/appdaemon/appdaemon.yaml
|
||||
nohup appdaemon -c /thesillyhome_src/appdaemon/ &
|
||||
|
||||
if python3 -m thesillyhome.model_creator.main; then
|
||||
echo "Starting Appdaemon"
|
||||
nohup appdaemon -c /thesillyhome_src/appdaemon/ &
|
||||
else
|
||||
echo "Model generation failed."
|
||||
fi
|
||||
echo "Starting frontend on 0.0.0.0:2300"
|
||||
PORT=2300 node /thesillyhome_src/frontend/build/index.js
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
import thesillyhome.model_creator.read_config_json as tsh_config
|
||||
import logging
|
||||
|
||||
|
||||
def base_config_checks():
|
||||
check_mandatory_fields(
|
||||
[
|
||||
("username", tsh_config.username),
|
||||
("password", tsh_config.password),
|
||||
("actuators", tsh_config.actuators),
|
||||
("sensors", tsh_config.sensors),
|
||||
("db_options", tsh_config.db_options),
|
||||
("db_password", tsh_config.db_password),
|
||||
("db_database", tsh_config.db_database),
|
||||
("db_username", tsh_config.db_username),
|
||||
("db_type", tsh_config.db_type),
|
||||
("db_host", tsh_config.db_host),
|
||||
("db_port", tsh_config.db_port),
|
||||
]
|
||||
)
|
||||
check_password(tsh_config.password)
|
||||
check_db(tsh_config.db_type)
|
||||
|
||||
|
||||
def check_password(password):
|
||||
if len(password) < 8:
|
||||
raise Exception("Make sure your password is at least 8 characters.")
|
||||
|
||||
|
||||
def check_db(db_type):
|
||||
if db_type not in ["mariadb", "postgres"]:
|
||||
raise Exception("Make sure your dbtype is either `mariadb` or `postgres`.")
|
||||
|
||||
|
||||
def check_mandatory_fields(mandatory_fields: list):
|
||||
for name, field in mandatory_fields:
|
||||
if field is None:
|
||||
raise KeyError(
|
||||
f"Missing Mandatory field {name}, please add this to the config file."
|
||||
)
|
||||
|
||||
|
||||
def check_actuators_ids(actuators_id):
|
||||
if len(actuators_id > 10):
|
||||
logging.warning(
|
||||
"In the current implementation, the suggestion is to use <= 10 actuators as it may causes throttling issues with Appdaemon."
|
||||
)
|
||||
|
||||
|
||||
def check_device_ids(in_data_ids):
|
||||
invalid_actuators_ids = set(tsh_config.actuators) - set(in_data_ids)
|
||||
invalid_sensors_ids = set(tsh_config.sensors) - set(in_data_ids)
|
||||
if invalid_actuators_ids:
|
||||
raise Exception(f"Cannot find actuator cases for ids {invalid_actuators_ids}")
|
||||
if invalid_sensors_ids:
|
||||
raise Exception(f"Cannot find sensor cases for ids {invalid_sensors_ids}")
|
||||
@@ -1,156 +1,192 @@
|
||||
# Library imports
|
||||
from datetime import datetime
|
||||
import string
|
||||
import mysql.connector
|
||||
import psycopg2
|
||||
import pandas as pd
|
||||
import os.path
|
||||
import os
|
||||
import logging
|
||||
import uuid
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
|
||||
# Local application imports
|
||||
import thesillyhome.model_creator.read_config_json as tsh_config
|
||||
|
||||
|
||||
"""
|
||||
Get data from DB and store locally
|
||||
"""
|
||||
|
||||
|
||||
class homedb:
|
||||
def __init__(self):
|
||||
self.host = tsh_config.db_host
|
||||
self.port = tsh_config.db_port
|
||||
self.username = tsh_config.db_username
|
||||
self.password = tsh_config.db_password
|
||||
self.database = tsh_config.db_database
|
||||
self.db_type = tsh_config.db_type
|
||||
self.share_data = tsh_config.share_data
|
||||
self.from_cache = False
|
||||
self.mydb = self.connect_internal_db()
|
||||
self.extdb = self.connect_external_db()
|
||||
|
||||
def connect_internal_db(self):
|
||||
if not self.from_cache:
|
||||
if self.db_type == "mariadb":
|
||||
mydb = mysql.connector.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.username,
|
||||
password=self.password,
|
||||
database=self.database,
|
||||
)
|
||||
|
||||
elif self.db_type == "postgres":
|
||||
mydb = psycopg2.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.username,
|
||||
password=self.password,
|
||||
database=self.database,
|
||||
)
|
||||
else:
|
||||
logging.info("DB type is mariadb or postgres.")
|
||||
return mydb
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_data(self):
|
||||
logging.info("Getting data from internal homeassistant db")
|
||||
|
||||
if self.from_cache:
|
||||
logging.info("Using cached all_states.pkl")
|
||||
return pd.read_pickle(f"{tsh_config.data_dir}/parsed/all_states.pkl")
|
||||
|
||||
query = f"SELECT \
|
||||
state_id,\
|
||||
entity_id ,\
|
||||
state ,\
|
||||
last_changed ,\
|
||||
last_updated ,\
|
||||
old_state_id \
|
||||
from states ORDER BY last_updated DESC;"
|
||||
mycursor = self.mydb.cursor()
|
||||
mycursor.execute(query)
|
||||
myresult = mycursor.fetchall()
|
||||
|
||||
# Clean to DF
|
||||
col_names = []
|
||||
for elt in mycursor.description:
|
||||
col_names.append(elt[0])
|
||||
df = pd.DataFrame.from_dict(myresult)
|
||||
df.columns = col_names
|
||||
|
||||
# Preprocessing
|
||||
df = df.set_index("state_id")
|
||||
|
||||
df.to_pickle(f"{tsh_config.data_dir}/parsed/all_states.pkl")
|
||||
if self.share_data:
|
||||
logging.info("Uploading data to external db. Thanks for sharing!")
|
||||
|
||||
self.upload_data(df)
|
||||
self.mydb.close()
|
||||
return df
|
||||
|
||||
def connect_external_db(self):
|
||||
host = tsh_config.extdb_host
|
||||
port = tsh_config.extdb_port
|
||||
user = tsh_config.extdb_username
|
||||
password = tsh_config.extdb_password
|
||||
database = tsh_config.extdb_database
|
||||
extdb = create_engine(
|
||||
f"mysql+pymysql://{user}:{password}@{host}:{port}/{database}", echo=False
|
||||
)
|
||||
return extdb
|
||||
|
||||
def upload_data(self, df: pd.DataFrame):
|
||||
|
||||
user_id, last_update_time = self.get_user_info()
|
||||
df["user_id"] = user_id
|
||||
logging.info(last_update_time)
|
||||
df = df[df["last_updated"] > last_update_time]
|
||||
if not df.empty:
|
||||
df.to_sql(name="states", con=self.extdb, if_exists="append")
|
||||
logging.info(f"Data updloaded.")
|
||||
max_time = df["last_updated"].max()
|
||||
self.update_last_update_time(user_id, max_time)
|
||||
|
||||
def get_user_info(self):
|
||||
# here we use the mac address as a dummy, this is used for now until an actual login system
|
||||
|
||||
user_id = hex(uuid.getnode())
|
||||
logging.info(f"Using MAC address as user_id {user_id}")
|
||||
|
||||
query = f"SELECT \
|
||||
last_update_time \
|
||||
from users where user_id = '{user_id}';"
|
||||
|
||||
with self.extdb.connect() as connection:
|
||||
myresult = connection.execute(query).fetchall()
|
||||
|
||||
assert len(myresult) in (0, 1)
|
||||
|
||||
if len(myresult) == 1:
|
||||
last_update_time = myresult[0][0]
|
||||
else:
|
||||
# Add user if none
|
||||
|
||||
last_update_time = datetime(1900, 1, 1, 0, 0, 0, 0)
|
||||
|
||||
query = f"INSERT INTO thesillyhomedb.users (user_id,last_update_time)\
|
||||
VALUES ('{user_id}','{last_update_time}');"
|
||||
with self.extdb.connect() as connection:
|
||||
connection.execute(query)
|
||||
|
||||
return user_id, last_update_time
|
||||
|
||||
def update_last_update_time(self, user_id: string, c_time: datetime):
|
||||
logging.info(f"Updating user table with last_update_time {c_time}")
|
||||
query = f"UPDATE thesillyhomedb.users \
|
||||
SET last_update_time = '{c_time}' \
|
||||
WHERE user_id = '{user_id}';"
|
||||
with self.extdb.connect() as connection:
|
||||
connection.execute(query)
|
||||
# Library imports
|
||||
from datetime import datetime
|
||||
import mysql.connector
|
||||
import psycopg2
|
||||
import pandas as pd
|
||||
import os.path
|
||||
import os
|
||||
import logging
|
||||
from sqlalchemy import create_engine
|
||||
import bcrypt
|
||||
import json
|
||||
|
||||
# Local application imports
|
||||
import thesillyhome.model_creator.read_config_json as tsh_config
|
||||
|
||||
|
||||
"""
|
||||
Get data from DB and store locally
|
||||
"""
|
||||
|
||||
|
||||
class homedb:
|
||||
def __init__(self):
|
||||
self.host = tsh_config.db_host
|
||||
self.port = tsh_config.db_port
|
||||
self.username = tsh_config.db_username
|
||||
self.password = tsh_config.db_password
|
||||
self.database = tsh_config.db_database
|
||||
self.db_type = tsh_config.db_type
|
||||
self.share_data = tsh_config.share_data
|
||||
self.from_cache = False
|
||||
self.mydb = self.connect_internal_db()
|
||||
self.extdb = self.connect_external_db()
|
||||
self.valid_user = self.verify_username()
|
||||
|
||||
def connect_internal_db(self):
|
||||
if not self.from_cache:
|
||||
if self.db_type == "postgres":
|
||||
mydb = create_engine(
|
||||
f"postgresql+psycopg2://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}",
|
||||
echo=False,
|
||||
)
|
||||
elif self.db_type == "mariadb":
|
||||
mydb = create_engine(
|
||||
f"mysql+pymysql://{self.username}:{self.password}@{self.host}:{self.port}/{self.database}",
|
||||
echo=False,
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Invalid DB type : {self.db_type}.")
|
||||
return mydb
|
||||
else:
|
||||
return None
|
||||
|
||||
def connect_external_db(self):
|
||||
host = tsh_config.extdb_host
|
||||
port = tsh_config.extdb_port
|
||||
user = tsh_config.extdb_username
|
||||
password = tsh_config.extdb_password
|
||||
database = tsh_config.extdb_database
|
||||
extdb = create_engine(
|
||||
f"mysql+pymysql://{user}:{password}@{host}:{port}/{database}", echo=False
|
||||
)
|
||||
return extdb
|
||||
|
||||
def get_data(self) -> pd.DataFrame:
|
||||
logging.info("Getting data from internal homeassistant db")
|
||||
|
||||
if self.from_cache:
|
||||
logging.info("Using cached all_states.pkl")
|
||||
return pd.read_pickle(f"{tsh_config.data_dir}/parsed/all_states.pkl")
|
||||
logging.info("Executing query")
|
||||
|
||||
query = f"SELECT \
|
||||
state_id,\
|
||||
entity_id ,\
|
||||
state ,\
|
||||
last_changed ,\
|
||||
last_updated ,\
|
||||
old_state_id \
|
||||
from states ORDER BY last_updated DESC;"
|
||||
with self.mydb.connect() as con:
|
||||
con = con.execution_options(stream_results=True)
|
||||
list_df = [
|
||||
df
|
||||
for df in pd.read_sql(
|
||||
query,
|
||||
con=con,
|
||||
index_col="state_id",
|
||||
parse_dates=["last_changed", "last_updated"],
|
||||
chunksize=1000,
|
||||
)
|
||||
]
|
||||
df_output = pd.concat(list_df)
|
||||
df_output.to_pickle(f"{tsh_config.data_dir}/parsed/all_states.pkl")
|
||||
if self.share_data:
|
||||
logging.info("Uploading data to external db. *Thanks for sharing!*")
|
||||
try:
|
||||
self.upload_data(df_output)
|
||||
except:
|
||||
logging.warning("User info not saved.")
|
||||
return df_output
|
||||
|
||||
def upload_data(self, df: pd.DataFrame):
|
||||
if self.valid_user is None:
|
||||
last_update_time = self.create_user()
|
||||
else:
|
||||
last_update_time = self.valid_user()
|
||||
|
||||
df["user_id"] = tsh_config.username
|
||||
df = df[df["last_updated"] > last_update_time]
|
||||
if not df.empty:
|
||||
max_time = df["last_updated"].max()
|
||||
self.update_user(max_time)
|
||||
else:
|
||||
logging.info(f"Latest data already uploaded.")
|
||||
|
||||
def update_user(self, c_time: datetime):
|
||||
logging.info(f"Updating user table with last_update_time {c_time} and config")
|
||||
options_json = json.dumps(
|
||||
{
|
||||
k: tsh_config.options[k]
|
||||
for k in set(list(tsh_config.keys())) - set(["ha_options", "password"])
|
||||
}
|
||||
)
|
||||
query = f"UPDATE thesillyhomedb.users \
|
||||
SET last_update_time = '{c_time}',\
|
||||
config = '{options_json}' \
|
||||
WHERE user_id = '{tsh_config.username}';"
|
||||
|
||||
with self.extdb.begin() as connection:
|
||||
connection.execute(query)
|
||||
|
||||
def verify_username(self):
|
||||
query = f"SELECT \
|
||||
password, last_update_time \
|
||||
from users where user_id = '{tsh_config.username}';"
|
||||
with self.extdb.begin() as connection:
|
||||
myresult = connection.execute(query).fetchall()
|
||||
if len(myresult) == 1:
|
||||
found_pwd = myresult[0][0]
|
||||
if check_password(tsh_config.password, found_pwd):
|
||||
logging.info(
|
||||
f"Username {tsh_config.username} exists, correct password. Proceeding..."
|
||||
)
|
||||
last_update_time = myresult[0][1]
|
||||
logging.info(f"Last updated time: {last_update_time}")
|
||||
return last_update_time
|
||||
else:
|
||||
raise ValueError(
|
||||
f"User id {tsh_config.username} already exists. Please use a different username or try a different password."
|
||||
)
|
||||
elif len(myresult) == 0:
|
||||
return None
|
||||
|
||||
def create_user(self):
|
||||
|
||||
logging.info(
|
||||
f"Username {tsh_config.username} does not exist. Creating new user."
|
||||
)
|
||||
last_update_time = datetime(1900, 1, 1, 0, 0, 0, 0)
|
||||
logging.info(tsh_config.password)
|
||||
new_hashed_pwd = get_hashed_password(tsh_config.password).decode("utf-8")
|
||||
|
||||
query = f"INSERT INTO thesillyhomedb.users (user_id,password,last_update_time) \
|
||||
VALUES ('{tsh_config.username}','{new_hashed_pwd}','{last_update_time}');"
|
||||
with self.extdb.begin() as connection:
|
||||
connection.execute(query)
|
||||
return last_update_time
|
||||
|
||||
def log_error(self, exc_traceback):
|
||||
if self.valid_user is not None:
|
||||
logging.info(f"Logging errors to {tsh_config.username}")
|
||||
exc_traceback
|
||||
query = f"UPDATE thesillyhomedb.users \
|
||||
SET log_error = '{exc_traceback}' \
|
||||
WHERE user_id = '{tsh_config.username}';"
|
||||
|
||||
with self.extdb.begin() as connection:
|
||||
connection.execute(query)
|
||||
|
||||
|
||||
def get_hashed_password(plain_text_password):
|
||||
# Hash a password for the first time
|
||||
# (Using bcrypt, the salt is saved into the hash itself)
|
||||
return bcrypt.hashpw(plain_text_password.encode("utf-8"), bcrypt.gensalt(15))
|
||||
|
||||
|
||||
def check_password(plain_text_password, hashed_password):
|
||||
# Check hashed password. Using bcrypt, the salt is saved into the hash itself
|
||||
return bcrypt.checkpw(
|
||||
plain_text_password.encode("utf-8"), hashed_password.encode("utf-8")
|
||||
)
|
||||
|
||||
@@ -90,6 +90,14 @@ def train_all_actuator_models():
|
||||
logging.info(f"No cases found for {actuator}")
|
||||
continue
|
||||
|
||||
if len(df_act) < 100:
|
||||
logging.info("Samples less than 100. Skipping")
|
||||
continue
|
||||
|
||||
if df_act["state"].nunique() == 1:
|
||||
logging.info(f"All cases for {actuator} have the same state. Skipping")
|
||||
continue
|
||||
|
||||
"""
|
||||
Setting output and feature vector
|
||||
"""
|
||||
@@ -119,10 +127,6 @@ def train_all_actuator_models():
|
||||
y_train = y_train.drop(columns="duplicate")
|
||||
y_test = y_test.drop(columns="duplicate")
|
||||
|
||||
if len(y_train) < 100:
|
||||
logging.info("Training samples less than 100. Skipping")
|
||||
continue
|
||||
|
||||
train_all_classifiers(
|
||||
model_types,
|
||||
actuator,
|
||||
@@ -138,10 +142,14 @@ def train_all_actuator_models():
|
||||
df_metrics_matrix = pd.DataFrame(metrics_matrix)
|
||||
df_metrics_matrix.to_pickle(f"/thesillyhome_src/data/model/metrics.pkl")
|
||||
|
||||
best_metrics_matrix = df_metrics_matrix.fillna(0)
|
||||
best_metrics_matrix = df_metrics_matrix.sort_values(
|
||||
"best_optimizer", ascending=False
|
||||
).drop_duplicates(subset=["actuator"], keep="first")
|
||||
try:
|
||||
best_metrics_matrix = df_metrics_matrix.fillna(0)
|
||||
best_metrics_matrix = df_metrics_matrix.sort_values(
|
||||
"best_optimizer", ascending=False
|
||||
).drop_duplicates(subset=["actuator"], keep="first")
|
||||
except:
|
||||
logging.warning("No metrics.")
|
||||
|
||||
best_metrics_matrix.to_json(
|
||||
"/thesillyhome_src/frontend/static/data/metrics_matrix.json", orient="records"
|
||||
)
|
||||
@@ -186,6 +194,7 @@ def train_all_classifiers(
|
||||
|
||||
# keep probabilities for the positive outcome only
|
||||
y_predictions_proba = y_predictions_proba[:, 1]
|
||||
|
||||
# calculate roc curves
|
||||
precision, recall, thresholds = precision_recall_curve(
|
||||
y_test, y_predictions_proba
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import logging
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
import sys
|
||||
from traceback import format_exception
|
||||
|
||||
# Local application imports
|
||||
from thesillyhome.model_creator.home import homedb
|
||||
|
||||
|
||||
def add_logger():
|
||||
FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logname = "/thesillyhome_src/log/thesillyhome.log"
|
||||
|
||||
# setup logging to file
|
||||
filelog = TimedRotatingFileHandler(
|
||||
logname, when="midnight", interval=1, backupCount=3
|
||||
)
|
||||
fileformatter = logging.Formatter(FORMAT)
|
||||
filelog.setLevel(logging.DEBUG)
|
||||
filelog.setFormatter(fileformatter)
|
||||
logger.addHandler(filelog)
|
||||
|
||||
# setup logging to console
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
console.setFormatter(formatter)
|
||||
logger.addHandler(console)
|
||||
|
||||
def handle_exception(exc_type, exc_value, exc_traceback):
|
||||
if issubclass(exc_type, KeyboardInterrupt):
|
||||
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
||||
return
|
||||
print(exc_traceback)
|
||||
logger.error(
|
||||
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
|
||||
)
|
||||
|
||||
homedb().log_error(
|
||||
str("".join(format_exception(exc_type, exc_value, exc_traceback))).replace(
|
||||
"'", '"'
|
||||
)
|
||||
)
|
||||
|
||||
sys.excepthook = handle_exception
|
||||
@@ -4,43 +4,14 @@ from thesillyhome.model_creator.read_config_json import replace_yaml
|
||||
from thesillyhome.model_creator.read_config_json import run_cron
|
||||
from thesillyhome.model_creator.parse_data import parse_data_from_db
|
||||
from thesillyhome.model_creator.learning_model import train_all_actuator_models
|
||||
import logging
|
||||
from logging.handlers import TimedRotatingFileHandler
|
||||
from thesillyhome.model_creator.logger import add_logger
|
||||
from thesillyhome.model_creator.config_checker import base_config_checks
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logname = "/thesillyhome_src/log/thesillyhome.log"
|
||||
|
||||
# setup logging to file
|
||||
filelog = TimedRotatingFileHandler(
|
||||
logname, when="midnight", interval=1, backupCount=3
|
||||
)
|
||||
fileformatter = logging.Formatter(FORMAT)
|
||||
filelog.setLevel(logging.DEBUG)
|
||||
filelog.setFormatter(fileformatter)
|
||||
logger.addHandler(filelog)
|
||||
|
||||
# setup logging to console
|
||||
console = logging.StreamHandler(sys.stdout)
|
||||
console.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
console.setFormatter(formatter)
|
||||
logger.addHandler(console)
|
||||
|
||||
def handle_exception(exc_type, exc_value, exc_traceback):
|
||||
if issubclass(exc_type, KeyboardInterrupt):
|
||||
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
||||
return
|
||||
|
||||
logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback))
|
||||
|
||||
sys.excepthook = handle_exception
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
add_logger()
|
||||
base_config_checks()
|
||||
replace_yaml()
|
||||
parse_data_from_db()
|
||||
train_all_actuator_models()
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
# Library imports
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import copy
|
||||
from joblib import Parallel, delayed
|
||||
import tqdm
|
||||
from tqdm import tqdm
|
||||
import numpy as np
|
||||
from multiprocessing import cpu_count
|
||||
import logging
|
||||
|
||||
# Local application imports
|
||||
from thesillyhome.model_creator.home import homedb
|
||||
from thesillyhome.model_creator.logger import add_logger
|
||||
from thesillyhome.model_creator.config_checker import check_device_ids
|
||||
import thesillyhome.model_creator.read_config_json as tsh_config
|
||||
|
||||
|
||||
|
||||
def get_current_states(df_output: pd.DataFrame) -> pd.DataFrame:
|
||||
"""
|
||||
Returns pivoted frame of each state id desc
|
||||
@@ -63,9 +60,7 @@ def convert_unavailabe(df: pd.DataFrame) -> pd.DataFrame:
|
||||
]
|
||||
|
||||
choices = [0, "off"]
|
||||
df["state"] = np.select(conditions, choices, default=df["state"])
|
||||
|
||||
return df["state"]
|
||||
return np.select(conditions, choices, default=df["state"])
|
||||
|
||||
|
||||
def parse_data_from_db():
|
||||
@@ -78,6 +73,8 @@ def parse_data_from_db():
|
||||
logging.info("Reading from homedb...")
|
||||
df_all = homedb().get_data()
|
||||
df_all = df_all[["entity_id", "state", "last_updated"]]
|
||||
|
||||
check_device_ids(df_all["entity_id"].unique())
|
||||
|
||||
df_all["state"] = convert_unavailabe(df_all)
|
||||
assert ~df_all["state"].isnull().values.any(), df_all[df_all["state"].isnull()]
|
||||
@@ -140,4 +137,5 @@ def parse_data_from_db():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
add_logger()
|
||||
parse_data_from_db()
|
||||
|
||||
@@ -1,102 +1,103 @@
|
||||
# Library imports
|
||||
import subprocess
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
|
||||
data_dir = "/thesillyhome_src/data"
|
||||
|
||||
if os.environ.get("HA_ADDON") == "true":
|
||||
f = open(f"/data/options.json")
|
||||
else:
|
||||
f = open(f"/thesillyhome_src/data/config/options.json")
|
||||
|
||||
options = json.load(f)
|
||||
|
||||
# Mandatory
|
||||
actuators = options["actuactors_id"]
|
||||
sensors = options["sensors_id"]
|
||||
devices = actuators + sensors
|
||||
db_options = options["db_options"][0]
|
||||
db_password = db_options["db_password"]
|
||||
db_database = db_options["db_database"]
|
||||
db_username = db_options["db_username"]
|
||||
db_type = db_options["db_type"]
|
||||
db_host = db_options["db_host"]
|
||||
db_port = db_options["db_port"]
|
||||
|
||||
# Defaults
|
||||
share_data = options.get("share_data", True)
|
||||
autotrain = options.get("autotrain", True)
|
||||
autotrain_cadence = options.get("autotrain_cadence", "0 0 * * 0")
|
||||
|
||||
# Non-user config
|
||||
|
||||
f = Fernet(b"w2PWqacy0_e4XZ2Zb8BU6GauyRgiZXw12wbmi0A6CjQ=")
|
||||
password = f.decrypt(
|
||||
b"gAAAAABi_2EebCwQSA3Lbk3MPCXvH3I6G-w8Ijt0oYiqfmUdzdrMjVRQuTqbpqK-DQCsyVliUWFsvd1NulF-WBsLKOpwmiCp-w=="
|
||||
).decode("utf-8")
|
||||
extdb_password = password
|
||||
extdb_database = "thesillyhomedb"
|
||||
extdb_username = "thesillyhome_general"
|
||||
extdb_host = "thesillyhomedb.cluster-cdioawtidgpj.eu-west-2.rds.amazonaws.com"
|
||||
extdb_port = 3306
|
||||
|
||||
|
||||
# Other helpers
|
||||
def extract_float_sensors(sensors: list):
|
||||
float_sensors_types = ["lux"]
|
||||
float_sensors = []
|
||||
for sensor in sensors:
|
||||
if sensor.split("_")[-1] in float_sensors_types:
|
||||
float_sensors.append(sensor)
|
||||
return float_sensors
|
||||
|
||||
|
||||
float_sensors = extract_float_sensors(sensors)
|
||||
|
||||
output_list_og = ["entity_id", "state"]
|
||||
output_list = ["entity_id", "state", "last_updated"]
|
||||
output_list_dup = ["entity_id", "state", "last_updated", "duplicate"]
|
||||
|
||||
|
||||
def replace_yaml():
|
||||
if os.environ.get("HA_ADDON") == "true":
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace("<ha_url>", "http://supervisor/core")
|
||||
content = content.replace("<ha_token>", "$SUPERVISOR_TOKEN")
|
||||
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "w") as file:
|
||||
file.write(content)
|
||||
return
|
||||
else:
|
||||
ha_options = options["ha_options"][0]
|
||||
ha_url = ha_options["ha_url"]
|
||||
ha_token = ha_options["ha_token"]
|
||||
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace("<ha_url>", ha_url)
|
||||
content = content.replace("<ha_token>", ha_token)
|
||||
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "w") as file:
|
||||
file.write(content)
|
||||
return
|
||||
|
||||
|
||||
def run_cron():
|
||||
if autotrain == True:
|
||||
with open("/thesillyhome_src/startup/crontab", "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace("<autotrain_cadence>", autotrain_cadence)
|
||||
with open("/thesillyhome_src/startup/crontab", "w") as file:
|
||||
file.write(content)
|
||||
|
||||
subprocess.run(["crontab", "/thesillyhome_src/startup/crontab"])
|
||||
logging.info(f"Runnining cron with cadence {autotrain_cadence}")
|
||||
return
|
||||
else:
|
||||
return
|
||||
# Library imports
|
||||
import subprocess
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
|
||||
data_dir = "/thesillyhome_src/data"
|
||||
|
||||
if os.environ.get("HA_ADDON") == "true":
|
||||
config_file = open(f"/data/options.json")
|
||||
else:
|
||||
config_file = open(f"/thesillyhome_src/data/config/options.json")
|
||||
|
||||
options = json.load(config_file)
|
||||
|
||||
# Mandatory
|
||||
username = options.get("username")
|
||||
password = options.get("password")
|
||||
actuators = options.get("actuactors_id")
|
||||
sensors = options.get("sensors_id")
|
||||
devices = actuators + sensors
|
||||
db_options = options.get("db_options")[0]
|
||||
db_password = db_options.get("db_password")
|
||||
db_database = db_options.get("db_database")
|
||||
db_username = db_options.get("db_username")
|
||||
db_type = db_options.get("db_type")
|
||||
db_host = db_options.get("db_host")
|
||||
db_port = db_options.get("db_port")
|
||||
|
||||
# Defaults
|
||||
share_data = options.get("share_data", True)
|
||||
autotrain = options.get("autotrain", True)
|
||||
autotrain_cadence = options.get("autotrain_cadence", "0 0 * * 0")
|
||||
|
||||
# Non-user config
|
||||
|
||||
f = Fernet(b"w2PWqacy0_e4XZ2Zb8BU6GauyRgiZXw12wbmi0A6CjQ=")
|
||||
extdb_password = f.decrypt(
|
||||
b"gAAAAABi_2EebCwQSA3Lbk3MPCXvH3I6G-w8Ijt0oYiqfmUdzdrMjVRQuTqbpqK-DQCsyVliUWFsvd1NulF-WBsLKOpwmiCp-w=="
|
||||
).decode("utf-8")
|
||||
extdb_database = "thesillyhomedb"
|
||||
extdb_username = "thesillyhome_general"
|
||||
extdb_host = "thesillyhomedb.cluster-cdioawtidgpj.eu-west-2.rds.amazonaws.com"
|
||||
extdb_port = 3306
|
||||
|
||||
|
||||
# Other helpers
|
||||
def extract_float_sensors(sensors: list):
|
||||
float_sensors_types = ["lux"]
|
||||
float_sensors = []
|
||||
for sensor in sensors:
|
||||
if sensor.split("_")[-1] in float_sensors_types:
|
||||
float_sensors.append(sensor)
|
||||
return float_sensors
|
||||
|
||||
|
||||
float_sensors = extract_float_sensors(sensors)
|
||||
|
||||
output_list_og = ["entity_id", "state"]
|
||||
output_list = ["entity_id", "state", "last_updated"]
|
||||
output_list_dup = ["entity_id", "state", "last_updated", "duplicate"]
|
||||
|
||||
|
||||
def replace_yaml():
|
||||
if os.environ.get("HA_ADDON") == "true":
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace("<ha_url>", "http://supervisor/core")
|
||||
content = content.replace("<ha_token>", "$SUPERVISOR_TOKEN")
|
||||
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "w") as file:
|
||||
file.write(content)
|
||||
return
|
||||
else:
|
||||
ha_options = options["ha_options"][0]
|
||||
ha_url = ha_options["ha_url"]
|
||||
ha_token = ha_options["ha_token"]
|
||||
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace("<ha_url>", ha_url)
|
||||
content = content.replace("<ha_token>", ha_token)
|
||||
|
||||
with open("/thesillyhome_src/appdaemon/appdaemon.yaml", "w") as file:
|
||||
file.write(content)
|
||||
return
|
||||
|
||||
|
||||
def run_cron():
|
||||
if autotrain == True:
|
||||
with open("/thesillyhome_src/startup/crontab", "r") as f:
|
||||
content = f.read()
|
||||
content = content.replace("<autotrain_cadence>", autotrain_cadence)
|
||||
with open("/thesillyhome_src/startup/crontab", "w") as file:
|
||||
file.write(content)
|
||||
|
||||
subprocess.run(["crontab", "/thesillyhome_src/startup/crontab"])
|
||||
logging.info(f"Runnining cron with cadence {autotrain_cadence}")
|
||||
return
|
||||
else:
|
||||
return
|
||||
|
||||
Reference in New Issue
Block a user