Adding placeholder folders
This commit is contained in:
@@ -204,9 +204,9 @@ class ModelExecutor(hass.Hass):
|
||||
actuators = tsh_config.actuators
|
||||
act_model_set = {}
|
||||
for act in actuators:
|
||||
if os.path.isfile(f"/thesillyhome_src/data/model/{act}/best_model.pkl"):
|
||||
if os.path.isfile(f"{tsh_config.data_dir}/model/{act}/best_model.pkl"):
|
||||
with open(
|
||||
f"/thesillyhome_src/data/model/{act}/best_model.pkl",
|
||||
f"{tsh_config.data_dir}/model/{act}/best_model.pkl",
|
||||
"rb",
|
||||
) as pickle_file:
|
||||
content = pickle.load(pickle_file)
|
||||
@@ -218,7 +218,7 @@ class ModelExecutor(hass.Hass):
|
||||
def get_base_columns(self):
|
||||
# Get feature list from parsed data header, set all columns to 0
|
||||
base_columns = pd.read_pickle(
|
||||
"/thesillyhome_src/data/parsed/act_states.pkl"
|
||||
f"{tsh_config.data_dir}/parsed/act_states.pkl"
|
||||
).columns
|
||||
base_columns = sorted(
|
||||
list(set(base_columns) - set(["entity_id", "state", "duplicate"]))
|
||||
@@ -240,7 +240,6 @@ class ModelExecutor(hass.Hass):
|
||||
devices = actuators + sensors
|
||||
now = datetime.datetime.now()
|
||||
|
||||
|
||||
if entity in devices:
|
||||
self.log(f"\n")
|
||||
self.log(f"<--- {entity} is {new} --->")
|
||||
@@ -291,7 +290,6 @@ class ModelExecutor(hass.Hass):
|
||||
)
|
||||
all_rules = all_rules.drop(columns=["index"])
|
||||
|
||||
|
||||
enabled_actuators = self.read_actuators()
|
||||
if entity in actuators:
|
||||
# Adding rules
|
||||
@@ -321,17 +319,25 @@ class ModelExecutor(hass.Hass):
|
||||
|
||||
rule_to_verify = df_sen_states_less.copy()
|
||||
rule_to_verify = rule_to_verify[
|
||||
self.unverified_features(rule_to_verify.columns.values.tolist())
|
||||
self.unverified_features(
|
||||
rule_to_verify.columns.values.tolist()
|
||||
)
|
||||
]
|
||||
rule_to_verify["entity_id"] = act
|
||||
|
||||
if self.verify_rules(act, rule_to_verify, prediction, all_rules):
|
||||
if self.verify_rules(
|
||||
act, rule_to_verify, prediction, all_rules
|
||||
):
|
||||
# Execute actions
|
||||
self.log(f"---Predicted {act} as {prediction}", level="INFO")
|
||||
self.log(
|
||||
f"---Predicted {act} as {prediction}", level="INFO"
|
||||
)
|
||||
if (prediction == 1) and (all_states[act]["state"] != "on"):
|
||||
self.log(f"---Turn on {act}")
|
||||
self.turn_on(act)
|
||||
elif (prediction == 0) and (all_states[act]["state"] != "off"):
|
||||
elif (prediction == 0) and (
|
||||
all_states[act]["state"] != "off"
|
||||
):
|
||||
self.log(f"---Turn off {act}")
|
||||
self.turn_off(act)
|
||||
else:
|
||||
|
||||
0
thesillyhome_src/data/config/.gitplaceholder
Normal file
0
thesillyhome_src/data/config/.gitplaceholder
Normal file
0
thesillyhome_src/data/model/.gitplaceholder
Normal file
0
thesillyhome_src/data/model/.gitplaceholder
Normal file
0
thesillyhome_src/data/parsed/.gitplaceholder
Normal file
0
thesillyhome_src/data/parsed/.gitplaceholder
Normal file
@@ -29,37 +29,39 @@ class homedb:
|
||||
self.database = tsh_config.db_database
|
||||
self.db_type = tsh_config.db_type
|
||||
self.share_data = tsh_config.share_data
|
||||
self.from_cache = False
|
||||
self.mydb = self.connect_internal_db()
|
||||
self.extdb = self.connect_external_db()
|
||||
|
||||
def connect_internal_db(self):
|
||||
if self.db_type == "mariadb":
|
||||
mydb = mysql.connector.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.username,
|
||||
password=self.password,
|
||||
database=self.database,
|
||||
)
|
||||
if not self.from_cache:
|
||||
if self.db_type == "mariadb":
|
||||
mydb = mysql.connector.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.username,
|
||||
password=self.password,
|
||||
database=self.database,
|
||||
)
|
||||
|
||||
elif self.db_type == "postgres":
|
||||
mydb = psycopg2.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.username,
|
||||
password=self.password,
|
||||
database=self.database,
|
||||
)
|
||||
elif self.db_type == "postgres":
|
||||
mydb = psycopg2.connect(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
user=self.username,
|
||||
password=self.password,
|
||||
database=self.database,
|
||||
)
|
||||
else:
|
||||
logging.info("DB type is mariadb or postgres.")
|
||||
return mydb
|
||||
else:
|
||||
logging.info("DB type is mariadb or postgres.")
|
||||
return mydb
|
||||
return None
|
||||
|
||||
def get_data(self, from_cache=False):
|
||||
def get_data(self):
|
||||
logging.info("Getting data from internal homeassistant db")
|
||||
|
||||
if from_cache and os.path.exists(
|
||||
f"{tsh_config.data_dir}/parsed/all_states.pkl"
|
||||
):
|
||||
if self.from_cache:
|
||||
logging.info("Using cached all_states.pkl")
|
||||
return pd.read_pickle(f"{tsh_config.data_dir}/parsed/all_states.pkl")
|
||||
|
||||
|
||||
@@ -98,6 +98,7 @@ def parse_data_from_db():
|
||||
Code to add one hot encoding for date time.
|
||||
This will help give features for time of day and day of the week.
|
||||
"""
|
||||
df_output["last_updated"] = pd.to_datetime(df_output["last_updated"])
|
||||
df_output["hour"] = df_output["last_updated"].dt.hour
|
||||
df_output["weekday"] = df_output["last_updated"].dt.date.apply(
|
||||
lambda x: x.weekday()
|
||||
|
||||
Reference in New Issue
Block a user