mirror of
https://github.com/Burningstone91/smart-home-setup.git
synced 2022-05-05 21:16:50 +03:00
Move custom components to HACS for easier updating
This commit is contained in:
@@ -40,6 +40,8 @@ from homeassistant.components.light import (
|
||||
is_on,
|
||||
COLOR_MODE_RGB,
|
||||
COLOR_MODE_RGBW,
|
||||
COLOR_MODE_HS,
|
||||
COLOR_MODE_XY,
|
||||
COLOR_MODE_COLOR_TEMP,
|
||||
COLOR_MODE_BRIGHTNESS,
|
||||
ATTR_SUPPORTED_COLOR_MODES,
|
||||
@@ -395,6 +397,12 @@ def _supported_features(hass: HomeAssistant, light: str):
|
||||
if COLOR_MODE_RGBW in supported_color_modes:
|
||||
supported.add("color")
|
||||
supported.add("brightness") # see above url
|
||||
if COLOR_MODE_XY in supported_color_modes:
|
||||
supported.add("color")
|
||||
supported.add("brightness") # see above url
|
||||
if COLOR_MODE_HS in supported_color_modes:
|
||||
supported.add("color")
|
||||
supported.add("brightness") # see above url
|
||||
if COLOR_MODE_COLOR_TEMP in supported_color_modes:
|
||||
supported.add("color_temp")
|
||||
supported.add("brightness") # see above url
|
||||
@@ -1042,7 +1050,10 @@ class SunLightSettings:
|
||||
def _replace_time(date: datetime.datetime, key: str) -> datetime.datetime:
|
||||
time = getattr(self, f"{key}_time")
|
||||
date_time = datetime.datetime.combine(date, time)
|
||||
utc_time = self.time_zone.localize(date_time).astimezone(dt_util.UTC)
|
||||
try: # HA ≤2021.05, https://github.com/basnijholt/adaptive-lighting/issues/128
|
||||
utc_time = self.time_zone.localize(date_time).astimezone(dt_util.UTC)
|
||||
except AttributeError: # HA ≥2021.06
|
||||
utc_time = date_time.replace(tzinfo=dt_util.DEFAULT_TIME_ZONE).astimezone(dt_util.UTC)
|
||||
return utc_time
|
||||
|
||||
location = self.astral_location
|
||||
|
||||
49
home-assistant/custom_components/adaptive_lighting/translations/da.json
Executable file
49
home-assistant/custom_components/adaptive_lighting/translations/da.json
Executable file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"title": "Adaptiv Belysning",
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Vælg et navn for denne Adaptive Belysning",
|
||||
"description": "Vælg et navn til denne konfiguration. Du kan køre flere konfigurationer af Adaptiv Belysning, og hver af dem kan indeholde flere lys!",
|
||||
"data": {
|
||||
"name": "Navn"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "Denne enhed er allerede konfigureret"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Adaptiv Belysnings indstillinger",
|
||||
"description": "Alle indstillinger tilhørende en Adaptiv Belysnings komponent. Indstillingernes navne svarer til YAML indstillingernes. Ingen indstillinger vises hvis du allerede har konfigureret den i YAML.",
|
||||
"data": {
|
||||
"lights": "lights: lyskilder",
|
||||
"initial_transition": "initial_transition: Hvor lang overgang når lyset går fra 'off' til 'on' eller når 'sleep_state' skiftes. (i sekunder)",
|
||||
"interval": "interval: Tid imellem opdateringer (i sekunder)",
|
||||
"max_brightness": "max_brightness: Højeste lysstyrke i cyklussen. (%)",
|
||||
"max_color_temp": "max_color_temp: Koldeste lystemperatur i cyklussen. (Kelvin)",
|
||||
"min_brightness": "min_brightness: Laveste lysstyrke i cyklussen. (%)",
|
||||
"min_color_temp": "min_color_temp: Varmeste lystemperatur i cyklussen. (Kelvin)",
|
||||
"only_once": "only_once: Juster udelukkende lysene adaptivt i øjeblikket de tændes.",
|
||||
"prefer_rgb_color": "prefer_rgb_color: Brug 'rgb_color' istedet for 'color_temp' når muligt.",
|
||||
"separate_turn_on_commands": "separate_turn_on_commands: Adskil kommandoerne for hver attribut (color, brightness, etc.) ved 'light.turn_on' (nødvendigt for bestemte lys).",
|
||||
"sleep_brightness": "sleep_brightness, Lysstyrke for Sleep Mode. (%)",
|
||||
"sleep_color_temp": "sleep_color_temp: Farvetemperatur under Sleep Mode. (Kelvin)",
|
||||
"sunrise_offset": "sunrise_offset: Hvor længe før (-) eller efter (+) at definere solopgangen i cyklussen (+/- sekunder)",
|
||||
"sunrise_time": "sunrise_time: Manuel overstyring af solopgangstidspunktet, hvis 'None', bruges det egentlige tidspunkt din lokation. (HH:MM:SS)",
|
||||
"sunset_offset": "sunset_offset: Hvor længe før (-) eller efter (+) at definere solnedgangen i cyklussen (+/- sekunder)",
|
||||
"sunset_time": "sunset_time: Manuel overstyring af solnedgangstidspunktet, hvis 'None', bruges det egentlige tidspunkt for din lokation. (HH:MM:SS)",
|
||||
"take_over_control": "take_over_control: Hvis andet end Adaptiv Belysning kalder 'light.turn_on' på et lys der allerede er tændt, afbryd adaptering af lyset indtil at det tændes igen.",
|
||||
"detect_non_ha_changes": "detect_non_ha_changes: Registrer alle ændringer på >10% på et lys (også udenfor HA), kræver at 'take_over_control' er slået til (kalder 'homeassistant.update_entity' hvert 'interval'!)",
|
||||
"transition": "Overgangsperiode når en ændring i lyset udføres (i sekunder)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"option_error": "Ugyldig indstilling"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,42 +3,42 @@
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Choose a name for the Adaptive Lighting",
|
||||
"description": "Every instance can contain multiple lights!",
|
||||
"title": "Choose a name for the Adaptive Lighting instance",
|
||||
"description": "Pick a name for this instance. You can run several instances of Adaptive lighting, each of these can contain multiple lights!",
|
||||
"data": {
|
||||
"name": "Name"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "Device is already configured"
|
||||
"already_configured": "This device is already configured"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Adaptive Lighting options",
|
||||
"description": "All settings for a Adaptive Lighting component. The option names correspond with the YAML settings. No options are shown if you have this entry defined in YAML.",
|
||||
"description": "All settings for a Adaptive Lighting component. The option names correspond with the YAML settings. No options are shown if you have the adaptive_lighting entry defined in your YAML configuration.",
|
||||
"data": {
|
||||
"lights": "lights",
|
||||
"initial_transition": "initial_transition, when lights go 'off' to 'on' or when 'sleep_state' changes",
|
||||
"interval": "interval, time between switch updates in seconds",
|
||||
"max_brightness": "max_brightness, in %",
|
||||
"max_color_temp": "max_color_temp, in Kelvin",
|
||||
"min_brightness": "min_brightness, in %",
|
||||
"min_color_temp": "min_color_temp, in Kelvin",
|
||||
"only_once": "only_once, only adapt the lights when turning them on",
|
||||
"prefer_rgb_color": "prefer_rgb_color, use 'rgb_color' over 'color_temp' when possible",
|
||||
"separate_turn_on_commands": "separate_turn_on_commands, for each attribute (color, brightness, etc.) in 'light.turn_on', required for some lights.",
|
||||
"sleep_brightness": "sleep_brightness, in %",
|
||||
"sleep_color_temp": "sleep_color_temp, in Kelvin",
|
||||
"sunrise_offset": "sunrise_offset, in +/- seconds",
|
||||
"sunrise_time": "sunrise_time, in 'HH:MM:SS' format (if 'None', it uses the actual sunrise time at your location)",
|
||||
"sunset_offset": "sunset_offset, in +/- seconds",
|
||||
"sunset_time": "sunset_time, in 'HH:MM:SS' format (if 'None', it uses the actual sunset time at your location)",
|
||||
"take_over_control": "take_over_control, if anything but Adaptive Lighting calls 'light.turn_on' when a light is already on, stop adapting that light until it (or the switch) toggles off -> on.",
|
||||
"detect_non_ha_changes": "detect_non_ha_changes, detects all >10% changes made to the lights (also outside of HA), requires 'take_over_control' to be enabled (calls 'homeassistant.update_entity' every 'interval'!)",
|
||||
"transition": "transition, in seconds"
|
||||
"initial_transition": "initial_transition: When lights turn 'off' to 'on' or when 'sleep_state' changes. (seconds)",
|
||||
"interval": "interval: Time between switch updates. (seconds)",
|
||||
"max_brightness": "max_brightness: Highest brightness of lights during a cycle. (%)",
|
||||
"max_color_temp": "max_color_temp: Coldest hue of the color temperature cycle. (Kelvin)",
|
||||
"min_brightness": "min_brightness: Lowest brightness of lights during a cycle. (%)",
|
||||
"min_color_temp": "min_color_temp, Warmest hue of the color temperature cycle. (%)",
|
||||
"only_once": "only_once: Only adapt the lights when turning them on.",
|
||||
"prefer_rgb_color": "prefer_rgb_color: Use 'rgb_color' rather than 'color_temp' when possible.",
|
||||
"separate_turn_on_commands": "separate_turn_on_commands: Separate the commands for each attribute (color, brightness, etc.) in 'light.turn_on' (required for some lights).",
|
||||
"sleep_brightness": "sleep_brightness, Brightness setting for Sleep Mode. (%)",
|
||||
"sleep_color_temp": "sleep_color_temp: Color temperature setting for Sleep Mode. (Kelvin)",
|
||||
"sunrise_offset": "sunrise_offset: How long before(-) or after(+) to define the sunrise point of the cycle (+/- seconds)",
|
||||
"sunrise_time": "sunrise_time: Manual override of the sunrise time, if 'None', it uses the actual sunrise time at your location (HH:MM:SS)",
|
||||
"sunset_offset": "sunset_offset: How long before(-) or after(+) to define the sunset point of the cycle (+/- seconds)",
|
||||
"sunset_time": "sunset_time: Manual override of the sunset time, if 'None', it uses the actual sunrise time at your location (HH:MM:SS)",
|
||||
"take_over_control": "take_over_control: If anything but Adaptive Lighting calls 'light.turn_on' when a light is already on, stop adapting that light until it (or the switch) toggles off -> on.",
|
||||
"detect_non_ha_changes": "detect_non_ha_changes: detects all >10% changes made to the lights (also outside of HA), requires 'take_over_control' to be enabled (calls 'homeassistant.update_entity' every 'interval'!)",
|
||||
"transition": "Transition time when applying a change to the lights (seconds)"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
49
home-assistant/custom_components/adaptive_lighting/translations/uk.json
Executable file
49
home-assistant/custom_components/adaptive_lighting/translations/uk.json
Executable file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"title": "Адаптивне освітлення",
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Оберіть ім’я для екземпляра адаптивного освітлення",
|
||||
"description": "Оберіть ім’я для цього екземпляра. Ви можете мати декілька екземплярів адаптивного освітлення, кожен може містити декілька приладів!",
|
||||
"data": {
|
||||
"name": "Ім’я"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "Цей пристрій вже налаштовано"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"title": "Опції адаптивного освітлення",
|
||||
"description": "Всі налаштування компонента адаптивного освітлення. Назви опцій відповідають налаштуванням у YAML. Опції не відображаються, якщо ви вже визначили їх у компоненті adaptive_lighting вашої YAML-конфігурації.",
|
||||
"data": {
|
||||
"lights": "прилади",
|
||||
"initial_transition": "initial_transition: Коли прилад вимикається (off), вмикається (on), або змінює 'sleep_state'. (секунди)",
|
||||
"interval": "interval: Час між оновленнями перемикача. (секунди)",
|
||||
"max_brightness": "max_brightness: Найвища яскравість світла під час циклу. (%)",
|
||||
"max_color_temp": "max_color_temp: Найхолодніший відтінок циклу кольорової температури. (Кельвін)",
|
||||
"min_brightness": "min_brightness: Найнижча яскравість світла під час циклу. (%)",
|
||||
"min_color_temp": "min_color_temp: Найтепліший відтінок циклу кольорової температури. (%)",
|
||||
"only_once": "only_once: Адаптувати світло лише після початкового увімкнення.",
|
||||
"prefer_rgb_color": "prefer_rgb_color: Використовувати 'rgb_color' замість 'color_temp', коли можливо.",
|
||||
"separate_turn_on_commands": "separate_turn_on_commands: Окремі команди для кожного атрибута (колір, яскравість, тощо.) в 'light.turn_on' (необхідні для деяких приладів).",
|
||||
"sleep_brightness": "sleep_brightness: Налаштування яскравості для Режиму сну. (%)",
|
||||
"sleep_color_temp": "sleep_color_temp: Температура кольору для Режиму сну. (Кельвін)",
|
||||
"sunrise_offset": "sunrise_offset: Як за довго до(-) або після(+) визначати точку сходу сонця для циклу (+/- секунд)",
|
||||
"sunrise_time": "sunrise_time: Ручний перезапис часу сходу сонця, якщо 'None', тоді використовується час сходу сонця у вашій локації (HH:MM:SS)",
|
||||
"sunset_offset": "sunset_offset: Як за довго до(-) або після(+) визначати точку заходу сонця для циклу (+/- секунд)",
|
||||
"sunset_time": "sunset_time: Ручний перезапис часу заходу сонця, якщо 'None', тоді використовується час заходу сонця у вашій локації (HH:MM:SS)",
|
||||
"take_over_control": "take_over_control: Якщо що-небудь, окрім Адаптивного освітлення, викликає 'light.turn_on', коли світло вже увімкнено, чи адаптувати освітлення допоки світло (або перемикач) перемкнеться (off -> on).",
|
||||
"detect_non_ha_changes": "detect_non_ha_changes: виявляти всі зміни >10% до освітлення (включаючи ті, що зроблені поза HA), вимагає, щоб 'take_over_control' був включений (виклик 'homeassistant.update_entity' кожного оновлення 'interval'!)",
|
||||
"transition": "Час переходу, який застосовується до освітлення (секунди)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"option_error": "Хибна опція"
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
BIN
home-assistant/custom_components/browser_mod/browser_mod.js.gz
Executable file
BIN
home-assistant/custom_components/browser_mod/browser_mod.js.gz
Executable file
Binary file not shown.
@@ -5,8 +5,12 @@ from homeassistant.components.camera import Camera
|
||||
|
||||
from .helpers import setup_platform, BrowserModEntity
|
||||
|
||||
import logging
|
||||
|
||||
PLATFORM = "camera"
|
||||
|
||||
LOGGER = logging.Logger(__name__)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
return setup_platform(hass, config, async_add_devices, PLATFORM, BrowserModCamera)
|
||||
@@ -30,7 +34,7 @@ class BrowserModCamera(Camera, BrowserModEntity):
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def camera_image(self):
|
||||
return base64.b64decode(self.data.split(",")[1])
|
||||
return base64.b64decode(self.data.split(",")[-1])
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
|
||||
@@ -46,7 +46,9 @@ def create_entity(hass, platform, deviceID, connection):
|
||||
or CONFIG_DISABLE_ALL in hass.data[DOMAIN][DATA_CONFIG].get(CONFIG_DISABLE, [])
|
||||
):
|
||||
return None
|
||||
adder = hass.data[DOMAIN][DATA_ADDERS][platform]
|
||||
adder = hass.data[DOMAIN][DATA_ADDERS].get(platform)
|
||||
if not adder:
|
||||
return None
|
||||
entity = adder(hass, deviceID, connection, get_alias(hass, deviceID))
|
||||
return entity
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .enums import HacsDisabledReason, HacsStage
|
||||
from .helpers.functions.logger import getLogger
|
||||
from .hacsbase.configuration import Configuration
|
||||
from .models.core import HacsCore
|
||||
from .models.frontend import HacsFrontend
|
||||
from .models.system import HacsSystem
|
||||
@@ -44,6 +45,7 @@ class HacsBaseAttributes:
|
||||
_default: Optional[AIOGitHubAPIRepository]
|
||||
_github: Optional[AIOGitHubAPI]
|
||||
_hass: Optional[HomeAssistant]
|
||||
_configuration: Optional[Configuration]
|
||||
_repository: Optional[AIOGitHubAPIRepository]
|
||||
_stage: HacsStage = HacsStage.SETUP
|
||||
_common: Optional[HacsCommon]
|
||||
@@ -111,6 +113,16 @@ class HacsBase(HacsBaseAttributes):
|
||||
"""Set the value for the default property."""
|
||||
self._hass = value
|
||||
|
||||
@property
|
||||
def configuration(self) -> Optional[Configuration]:
|
||||
"""Returns a Configuration object."""
|
||||
return self._configuration
|
||||
|
||||
@configuration.setter
|
||||
def configuration(self, value: Configuration) -> None:
|
||||
"""Set the value for the default property."""
|
||||
self._configuration = value
|
||||
|
||||
@property
|
||||
def integration_dir(self) -> pathlib.Path:
|
||||
"""Return the HACS integration dir."""
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.helpers.event import async_call_later
|
||||
|
||||
from custom_components.hacs.const import CLIENT_ID, DOMAIN, MINIMUM_HA_VERSION
|
||||
from custom_components.hacs.helpers.functions.configuration_schema import (
|
||||
RELEASE_LIMIT,
|
||||
hacs_config_option_schema,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
@@ -141,6 +142,9 @@ class HacsOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Handle a flow initialized by the user."""
|
||||
hacs: HacsBase = get_hacs()
|
||||
if user_input is not None:
|
||||
limit = int(user_input.get(RELEASE_LIMIT, 5))
|
||||
if limit <= 0 or limit > 100:
|
||||
return self.async_abort(reason="release_limit_value")
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
if hacs.configuration is None:
|
||||
|
||||
@@ -3,10 +3,10 @@ from aiogithubapi.common.const import ACCEPT_HEADERS
|
||||
|
||||
NAME_LONG = "HACS (Home Assistant Community Store)"
|
||||
NAME_SHORT = "HACS"
|
||||
INTEGRATION_VERSION = "1.12.3"
|
||||
INTEGRATION_VERSION = "1.13.2"
|
||||
DOMAIN = "hacs"
|
||||
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
||||
MINIMUM_HA_VERSION = "2020.12.0"
|
||||
MINIMUM_HA_VERSION = "2021.2.0"
|
||||
PROJECT_URL = "https://github.com/hacs/integration/"
|
||||
CUSTOM_UPDATER_LOCATIONS = [
|
||||
"{}/custom_components/custom_updater.py",
|
||||
|
||||
@@ -19,6 +19,7 @@ class LovelaceMode(str, Enum):
|
||||
|
||||
STORAGE = "storage"
|
||||
AUTO = "auto"
|
||||
AUTO_GEN = "auto-gen"
|
||||
YAML = "yaml"
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""Data handler for HACS."""
|
||||
import os
|
||||
|
||||
from queueman import QueueManager
|
||||
import asyncio
|
||||
|
||||
from custom_components.hacs.const import INTEGRATION_VERSION
|
||||
from custom_components.hacs.helpers.classes.manifest import HacsManifest
|
||||
@@ -11,11 +10,27 @@ from custom_components.hacs.helpers.functions.register_repository import (
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.store import (
|
||||
async_load_from_store,
|
||||
async_save_to_store_default_encoder,
|
||||
async_save_to_store,
|
||||
get_store_for_key,
|
||||
)
|
||||
from custom_components.hacs.share import get_hacs
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
||||
|
||||
def update_repository_from_storage(repository, storage_data):
|
||||
"""Merge in data from storage into the repo data."""
|
||||
repository.data.memorize_storage(storage_data)
|
||||
repository.data.update_data(storage_data)
|
||||
if repository.data.installed:
|
||||
return
|
||||
|
||||
repository.logger.debug(
|
||||
"%s Should be installed but is not... Fixing that!", repository
|
||||
)
|
||||
repository.data.installed = True
|
||||
|
||||
|
||||
class HacsData:
|
||||
"""HacsData class."""
|
||||
@@ -24,7 +39,6 @@ class HacsData:
|
||||
"""Initialize."""
|
||||
self.logger = getLogger()
|
||||
self.hacs = get_hacs()
|
||||
self.queue = QueueManager()
|
||||
self.content = {}
|
||||
|
||||
async def async_write(self):
|
||||
@@ -44,21 +58,19 @@ class HacsData:
|
||||
"onboarding_done": self.hacs.configuration.onboarding_done,
|
||||
},
|
||||
)
|
||||
await self._async_store_content_and_repos()
|
||||
for event in ("hacs/repository", "hacs/config"):
|
||||
self.hacs.hass.bus.async_fire(event, {})
|
||||
|
||||
async def _async_store_content_and_repos(self): # bb: ignore
|
||||
"""Store the main repos file and each repo that is out of date."""
|
||||
# Repositories
|
||||
self.content = {}
|
||||
for repository in self.hacs.repositories or []:
|
||||
self.queue.add(self.async_store_repository_data(repository))
|
||||
# Not run concurrently since this is bound by disk I/O
|
||||
for repository in self.hacs.repositories:
|
||||
await self.async_store_repository_data(repository)
|
||||
|
||||
if not self.queue.has_pending_tasks:
|
||||
self.logger.debug("Nothing in the queue")
|
||||
elif self.queue.running:
|
||||
self.logger.debug("Queue is already running")
|
||||
else:
|
||||
await self.queue.execute()
|
||||
await async_save_to_store(self.hacs.hass, "repositories", self.content)
|
||||
self.hacs.hass.bus.async_fire("hacs/repository", {})
|
||||
self.hacs.hass.bus.async_fire("hacs/config", {})
|
||||
|
||||
async def async_store_repository_data(self, repository):
|
||||
repository_manifest = repository.repository_manifest.manifest
|
||||
@@ -85,84 +97,83 @@ class HacsData:
|
||||
"topics": repository.data.topics,
|
||||
"version_installed": repository.data.installed_version,
|
||||
}
|
||||
if data:
|
||||
if repository.data.installed and (
|
||||
repository.data.installed_commit or repository.data.installed_version
|
||||
):
|
||||
await async_save_to_store(
|
||||
self.hacs.hass,
|
||||
f"hacs/{repository.data.id}.hacs",
|
||||
repository.data.to_json(),
|
||||
)
|
||||
self.content[str(repository.data.id)] = data
|
||||
self.content[str(repository.data.id)] = data
|
||||
|
||||
if (
|
||||
repository.data.installed
|
||||
and (repository.data.installed_commit or repository.data.installed_version)
|
||||
and (export := repository.data.export_data())
|
||||
):
|
||||
# export_data will return `None` if the memorized
|
||||
# data is already up to date which allows us to avoid
|
||||
# writing data that is already up to date or generating
|
||||
# executor jobs to check the data on disk to see
|
||||
# if a write is needed.
|
||||
await async_save_to_store_default_encoder(
|
||||
self.hacs.hass,
|
||||
f"hacs/{repository.data.id}.hacs",
|
||||
export,
|
||||
)
|
||||
repository.data.memorize_storage(export)
|
||||
|
||||
async def restore(self):
|
||||
"""Restore saved data."""
|
||||
hacs = await async_load_from_store(self.hacs.hass, "hacs")
|
||||
repositories = await async_load_from_store(self.hacs.hass, "repositories")
|
||||
repositories = await async_load_from_store(self.hacs.hass, "repositories") or {}
|
||||
|
||||
if not hacs and not repositories:
|
||||
# Assume new install
|
||||
self.hacs.status.new = True
|
||||
return True
|
||||
self.logger.info("Restore started")
|
||||
self.hacs.status.new = False
|
||||
|
||||
# Hacs
|
||||
self.hacs.configuration.frontend_mode = hacs.get("view", "Grid")
|
||||
self.hacs.configuration.frontend_compact = hacs.get("compact", False)
|
||||
self.hacs.configuration.onboarding_done = hacs.get("onboarding_done", False)
|
||||
|
||||
# Repositories
|
||||
hass = self.hacs.hass
|
||||
stores = {}
|
||||
|
||||
try:
|
||||
if not hacs and not repositories:
|
||||
# Assume new install
|
||||
self.hacs.status.new = True
|
||||
return True
|
||||
self.logger.info("Restore started")
|
||||
self.hacs.status.new = False
|
||||
await self.register_unknown_repositories(repositories)
|
||||
|
||||
# Hacs
|
||||
self.hacs.configuration.frontend_mode = hacs.get("view", "Grid")
|
||||
self.hacs.configuration.frontend_compact = hacs.get("compact", False)
|
||||
self.hacs.configuration.onboarding_done = hacs.get("onboarding_done", False)
|
||||
for entry, repo_data in repositories.items():
|
||||
if self.async_restore_repository(entry, repo_data):
|
||||
stores[entry] = get_store_for_key(hass, f"hacs/{entry}.hacs")
|
||||
|
||||
# Repositories
|
||||
stores = {}
|
||||
for entry in repositories or []:
|
||||
stores[entry] = get_store_for_key(self.hacs.hass, f"hacs/{entry}.hacs")
|
||||
|
||||
stores_exist = {}
|
||||
|
||||
def _populate_stores():
|
||||
for entry in repositories or []:
|
||||
stores_exist[entry] = os.path.exists(stores[entry].path)
|
||||
|
||||
await self.hacs.hass.async_add_executor_job(_populate_stores)
|
||||
|
||||
# Repositories
|
||||
for entry in repositories or []:
|
||||
self.queue.add(
|
||||
self.async_restore_repository(
|
||||
entry, repositories[entry], stores[entry], stores_exist[entry]
|
||||
)
|
||||
)
|
||||
|
||||
await self.queue.execute()
|
||||
def _load_from_storage():
|
||||
for entry, store in stores.items():
|
||||
if os.path.exists(store.path) and (data := store.load()):
|
||||
update_repository_from_storage(self.hacs.get_by_id(entry), data)
|
||||
|
||||
await hass.async_add_executor_job(_load_from_storage)
|
||||
self.logger.info("Restore done")
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
self.logger.critical(f"[{exception}] Restore Failed!")
|
||||
self.logger.critical(f"[{exception}] Restore Failed!", exc_info=exception)
|
||||
return False
|
||||
return True
|
||||
|
||||
async def async_restore_repository(
|
||||
self, entry, repository_data, store, store_exists
|
||||
):
|
||||
if not self.hacs.is_known(entry):
|
||||
await register_repository(
|
||||
repository_data["full_name"], repository_data["category"], False
|
||||
)
|
||||
repository = [
|
||||
x
|
||||
for x in self.hacs.repositories
|
||||
if str(x.data.id) == str(entry)
|
||||
or x.data.full_name == repository_data["full_name"]
|
||||
async def register_unknown_repositories(self, repositories):
|
||||
"""Registry any unknown repositories."""
|
||||
register_tasks = [
|
||||
register_repository(repo_data["full_name"], repo_data["category"], False)
|
||||
for entry, repo_data in repositories.items()
|
||||
if not self.hacs.is_known(entry)
|
||||
]
|
||||
if not repository:
|
||||
self.logger.error(f"Did not find {repository_data['full_name']} ({entry})")
|
||||
return
|
||||
|
||||
repository = repository[0]
|
||||
if register_tasks:
|
||||
await asyncio.gather(*register_tasks)
|
||||
|
||||
@callback
|
||||
def async_restore_repository(self, entry, repository_data):
|
||||
full_name = repository_data["full_name"]
|
||||
if not (repository := self.hacs.get_by_name(full_name)):
|
||||
self.logger.error(f"Did not find {full_name} ({entry})")
|
||||
return False
|
||||
# Restore repository attributes
|
||||
repository.data.id = entry
|
||||
self.hacs.async_set_repository_id(repository, entry)
|
||||
repository.data.authors = repository_data.get("authors", [])
|
||||
repository.data.description = repository_data.get("description")
|
||||
repository.releases.last_release_object_downloads = repository_data.get(
|
||||
@@ -195,12 +206,4 @@ class HacsData:
|
||||
repository.data.installed_version = INTEGRATION_VERSION
|
||||
repository.data.installed = True
|
||||
|
||||
restored = store_exists and await store.async_load() or {}
|
||||
|
||||
if restored:
|
||||
repository.data.update_data(restored)
|
||||
if not repository.data.installed:
|
||||
repository.logger.debug(
|
||||
"Should be installed but is not... Fixing that!"
|
||||
)
|
||||
repository.data.installed = True
|
||||
return True
|
||||
|
||||
@@ -71,13 +71,15 @@ class System:
|
||||
ha_version = None
|
||||
disabled = False
|
||||
running = False
|
||||
lovelace_mode = "storage"
|
||||
lovelace_mode = "yaml"
|
||||
|
||||
|
||||
class Hacs(HacsBase, HacsHelpers):
|
||||
"""The base class of HACS, nested throughout the project."""
|
||||
|
||||
repositories = []
|
||||
_repositories = []
|
||||
_repositories_by_id = {}
|
||||
_repositories_by_full_name = {}
|
||||
repo = None
|
||||
data_repo = None
|
||||
data = None
|
||||
@@ -90,30 +92,67 @@ class Hacs(HacsBase, HacsHelpers):
|
||||
recuring_tasks = []
|
||||
common = HacsCommon()
|
||||
|
||||
@property
|
||||
def repositories(self):
|
||||
"""Return the full repositories list."""
|
||||
return self._repositories
|
||||
|
||||
def async_set_repositories(self, repositories):
|
||||
"""Set the list of repositories."""
|
||||
self._repositories = []
|
||||
self._repositories_by_id = {}
|
||||
self._repositories_by_full_name = {}
|
||||
|
||||
for repository in repositories:
|
||||
self.async_add_repository(repository)
|
||||
|
||||
def async_set_repository_id(self, repository, repo_id):
|
||||
"""Update a repository id."""
|
||||
existing_repo_id = str(repository.data.id)
|
||||
if existing_repo_id == repo_id:
|
||||
return
|
||||
if existing_repo_id != "0":
|
||||
raise ValueError(
|
||||
f"The repo id for {repository.data.full_name_lower} is already set to {existing_repo_id}"
|
||||
)
|
||||
repository.data.id = repo_id
|
||||
self._repositories_by_id[repo_id] = repository
|
||||
|
||||
def async_add_repository(self, repository):
|
||||
"""Add a repository to the list."""
|
||||
if repository.data.full_name_lower in self._repositories_by_full_name:
|
||||
raise ValueError(
|
||||
f"The repo {repository.data.full_name_lower} is already added"
|
||||
)
|
||||
self._repositories.append(repository)
|
||||
repo_id = str(repository.data.id)
|
||||
if repo_id != "0":
|
||||
self._repositories_by_id[repo_id] = repository
|
||||
self._repositories_by_full_name[repository.data.full_name_lower] = repository
|
||||
|
||||
def async_remove_repository(self, repository):
|
||||
"""Remove a repository from the list."""
|
||||
if repository.data.full_name_lower not in self._repositories_by_full_name:
|
||||
return
|
||||
self._repositories.remove(repository)
|
||||
repo_id = str(repository.data.id)
|
||||
if repo_id in self._repositories_by_id:
|
||||
del self._repositories_by_id[repo_id]
|
||||
del self._repositories_by_full_name[repository.data.full_name_lower]
|
||||
|
||||
def get_by_id(self, repository_id):
|
||||
"""Get repository by ID."""
|
||||
try:
|
||||
for repository in self.repositories:
|
||||
if str(repository.data.id) == str(repository_id):
|
||||
return repository
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
return None
|
||||
return self._repositories_by_id.get(str(repository_id))
|
||||
|
||||
def get_by_name(self, repository_full_name):
|
||||
"""Get repository by full_name."""
|
||||
try:
|
||||
repository_full_name_lower = repository_full_name.lower()
|
||||
for repository in self.repositories:
|
||||
if repository.data.full_name_lower == repository_full_name_lower:
|
||||
return repository
|
||||
except (Exception, BaseException): # pylint: disable=broad-except
|
||||
pass
|
||||
return None
|
||||
if repository_full_name is None:
|
||||
return None
|
||||
return self._repositories_by_full_name.get(repository_full_name.lower())
|
||||
|
||||
def is_known(self, repository_id):
|
||||
"""Return a bool if the repository is known."""
|
||||
return str(repository_id) in [str(x.data.id) for x in self.repositories]
|
||||
return str(repository_id) in self._repositories_by_id
|
||||
|
||||
@property
|
||||
def sorted_by_name(self):
|
||||
|
||||
@@ -384,7 +384,7 @@ class HacsRepository(RepositoryHelpers):
|
||||
self.hacs.common.installed.remove(self.data.id)
|
||||
for repository in self.hacs.repositories:
|
||||
if repository.data.id == self.data.id:
|
||||
self.hacs.repositories.remove(repository)
|
||||
self.hacs.async_remove_repository(repository)
|
||||
|
||||
async def uninstall(self):
|
||||
"""Run uninstall tasks."""
|
||||
@@ -418,7 +418,6 @@ class HacsRepository(RepositoryHelpers):
|
||||
|
||||
async def remove_local_directory(self):
|
||||
"""Check the local directory."""
|
||||
import shutil
|
||||
from asyncio import sleep
|
||||
|
||||
try:
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
"""Repository data."""
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
import json
|
||||
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
@@ -51,6 +54,7 @@ class RepositoryData:
|
||||
stargazers_count: int = 0
|
||||
topics: List[str] = []
|
||||
zip_release: bool = False
|
||||
_storage_data: Optional[dict] = None
|
||||
|
||||
@property
|
||||
def stars(self):
|
||||
@@ -66,64 +70,80 @@ class RepositoryData:
|
||||
|
||||
def to_json(self):
|
||||
"""Export to json."""
|
||||
return attr.asdict(self)
|
||||
return attr.asdict(self, filter=lambda attr, _: attr.name != "_storage_data")
|
||||
|
||||
def memorize_storage(self, data) -> None:
|
||||
"""Memorize the storage data."""
|
||||
self._storage_data = data
|
||||
|
||||
def export_data(self) -> Optional[dict]:
|
||||
"""Export to json if the data has changed.
|
||||
|
||||
Returns the data to export if the data needs
|
||||
to be written.
|
||||
|
||||
Returns None if the data has not changed.
|
||||
"""
|
||||
export = json.loads(json.dumps(self.to_json(), cls=JSONEncoder))
|
||||
return None if self._storage_data == export else export
|
||||
|
||||
@staticmethod
|
||||
def create_from_dict(source: dict):
|
||||
"""Set attributes from dicts."""
|
||||
data = RepositoryData()
|
||||
for key in source:
|
||||
print(key)
|
||||
if key in data.__dict__:
|
||||
if key == "pushed_at":
|
||||
if source[key] == "":
|
||||
continue
|
||||
if "Z" in source[key]:
|
||||
setattr(
|
||||
data,
|
||||
key,
|
||||
datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%SZ"),
|
||||
)
|
||||
else:
|
||||
setattr(
|
||||
data,
|
||||
key,
|
||||
datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%S"),
|
||||
)
|
||||
elif key == "id":
|
||||
setattr(data, key, str(source[key]))
|
||||
elif key == "country":
|
||||
if isinstance(source[key], str):
|
||||
setattr(data, key, [source[key]])
|
||||
else:
|
||||
setattr(data, key, source[key])
|
||||
if key not in data.__dict__:
|
||||
continue
|
||||
if key == "pushed_at":
|
||||
if source[key] == "":
|
||||
continue
|
||||
if "Z" in source[key]:
|
||||
setattr(
|
||||
data,
|
||||
key,
|
||||
datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%SZ"),
|
||||
)
|
||||
else:
|
||||
setattr(
|
||||
data,
|
||||
key,
|
||||
datetime.strptime(source[key], "%Y-%m-%dT%H:%M:%S"),
|
||||
)
|
||||
elif key == "id":
|
||||
setattr(data, key, str(source[key]))
|
||||
elif key == "country":
|
||||
if isinstance(source[key], str):
|
||||
setattr(data, key, [source[key]])
|
||||
else:
|
||||
setattr(data, key, source[key])
|
||||
else:
|
||||
setattr(data, key, source[key])
|
||||
return data
|
||||
|
||||
def update_data(self, data: dict):
|
||||
"""Update data of the repository."""
|
||||
for key in data:
|
||||
if key in self.__dict__:
|
||||
if key == "pushed_at":
|
||||
if data[key] == "":
|
||||
continue
|
||||
if "Z" in data[key]:
|
||||
setattr(
|
||||
self,
|
||||
key,
|
||||
datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%SZ"),
|
||||
)
|
||||
else:
|
||||
setattr(
|
||||
self, key, datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%S")
|
||||
)
|
||||
elif key == "id":
|
||||
setattr(self, key, str(data[key]))
|
||||
elif key == "country":
|
||||
if isinstance(data[key], str):
|
||||
setattr(self, key, [data[key]])
|
||||
else:
|
||||
setattr(self, key, data[key])
|
||||
if key not in self.__dict__:
|
||||
continue
|
||||
if key == "pushed_at":
|
||||
if data[key] == "":
|
||||
continue
|
||||
if "Z" in data[key]:
|
||||
setattr(
|
||||
self,
|
||||
key,
|
||||
datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%SZ"),
|
||||
)
|
||||
else:
|
||||
setattr(
|
||||
self, key, datetime.strptime(data[key], "%Y-%m-%dT%H:%M:%S")
|
||||
)
|
||||
elif key == "id":
|
||||
setattr(self, key, str(data[key]))
|
||||
elif key == "country":
|
||||
if isinstance(data[key], str):
|
||||
setattr(self, key, [data[key]])
|
||||
else:
|
||||
setattr(self, key, data[key])
|
||||
else:
|
||||
setattr(self, key, data[key])
|
||||
|
||||
@@ -210,37 +210,41 @@ async def download_content(repository):
|
||||
@concurrent(10)
|
||||
async def dowload_repository_content(repository, content):
|
||||
"""Download content."""
|
||||
repository.logger.debug(f"Downloading {content.name}")
|
||||
try:
|
||||
repository.logger.debug(f"Downloading {content.name}")
|
||||
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
filecontent = await async_download_file(content.download_url)
|
||||
|
||||
if filecontent is None:
|
||||
if filecontent is None:
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
return
|
||||
|
||||
# Save the content of the file.
|
||||
if repository.content.single or content.path is None:
|
||||
local_directory = repository.content.path.local
|
||||
|
||||
else:
|
||||
_content_path = content.path
|
||||
if not repository.data.content_in_root:
|
||||
_content_path = _content_path.replace(
|
||||
f"{repository.content.path.remote}", ""
|
||||
)
|
||||
|
||||
local_directory = f"{repository.content.path.local}/{_content_path}"
|
||||
local_directory = local_directory.split("/")
|
||||
del local_directory[-1]
|
||||
local_directory = "/".join(local_directory)
|
||||
|
||||
# Check local directory
|
||||
pathlib.Path(local_directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
local_file_path = (f"{local_directory}/{content.name}").replace("//", "/")
|
||||
|
||||
result = await async_save_file(local_file_path, filecontent)
|
||||
if result:
|
||||
repository.logger.info(f"Download of {content.name} completed")
|
||||
return
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
return
|
||||
|
||||
# Save the content of the file.
|
||||
if repository.content.single or content.path is None:
|
||||
local_directory = repository.content.path.local
|
||||
|
||||
else:
|
||||
_content_path = content.path
|
||||
if not repository.data.content_in_root:
|
||||
_content_path = _content_path.replace(
|
||||
f"{repository.content.path.remote}", ""
|
||||
)
|
||||
|
||||
local_directory = f"{repository.content.path.local}/{_content_path}"
|
||||
local_directory = local_directory.split("/")
|
||||
del local_directory[-1]
|
||||
local_directory = "/".join(local_directory)
|
||||
|
||||
# Check local directory
|
||||
pathlib.Path(local_directory).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
local_file_path = (f"{local_directory}/{content.name}").replace("//", "/")
|
||||
|
||||
result = await async_save_file(local_file_path, filecontent)
|
||||
if result:
|
||||
repository.logger.info(f"Download of {content.name} completed")
|
||||
return
|
||||
repository.validate.errors.append(f"[{content.name}] was not downloaded.")
|
||||
except (Exception, BaseException) as exception: # pylint: disable=broad-except
|
||||
repository.validate.errors.append(f"Download was not completed [{exception}]")
|
||||
|
||||
@@ -45,15 +45,10 @@ async def register_repository(full_name, category, check=True, ref=None):
|
||||
f"Validation for {full_name} failed with {exception}."
|
||||
) from None
|
||||
|
||||
exists = (
|
||||
False
|
||||
if str(repository.data.id) == "0"
|
||||
else [x for x in hacs.repositories if str(x.data.id) == str(repository.data.id)]
|
||||
)
|
||||
|
||||
if exists:
|
||||
if exists[0] in hacs.repositories:
|
||||
hacs.repositories.remove(exists[0])
|
||||
if str(repository.data.id) != "0" and (
|
||||
exists := hacs.get_by_id(repository.data.id)
|
||||
):
|
||||
hacs.async_remove_repository(exists)
|
||||
|
||||
else:
|
||||
if hacs.hass is not None and (
|
||||
@@ -67,4 +62,4 @@ async def register_repository(full_name, category, check=True, ref=None):
|
||||
"repository_id": repository.data.id,
|
||||
},
|
||||
)
|
||||
hacs.repositories.append(repository)
|
||||
hacs.async_add_repository(repository)
|
||||
|
||||
@@ -2,43 +2,78 @@
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util import json as json_util
|
||||
|
||||
from custom_components.hacs.const import VERSION_STORAGE
|
||||
from .logger import getLogger
|
||||
|
||||
_LOGGER = getLogger()
|
||||
|
||||
|
||||
class HACSStore(Store):
|
||||
"""A subclass of Store that allows multiple loads in the executor."""
|
||||
|
||||
def load(self):
|
||||
"""Load the data from disk if version matches."""
|
||||
data = json_util.load_json(self.path)
|
||||
if data == {} or data["version"] != self.version:
|
||||
return None
|
||||
return data["data"]
|
||||
|
||||
|
||||
def get_store_key(key):
|
||||
"""Return the key to use with homeassistant.helpers.storage.Storage."""
|
||||
return key if "/" in key else f"hacs.{key}"
|
||||
|
||||
|
||||
def _get_store_for_key(hass, key, encoder):
|
||||
"""Create a Store object for the key."""
|
||||
return HACSStore(hass, VERSION_STORAGE, get_store_key(key), encoder=encoder)
|
||||
|
||||
|
||||
def get_store_for_key(hass, key):
|
||||
"""Create a Store object for the key."""
|
||||
key = key if "/" in key else f"hacs.{key}"
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
return Store(hass, VERSION_STORAGE, key, encoder=JSONEncoder)
|
||||
return _get_store_for_key(hass, key, JSONEncoder)
|
||||
|
||||
|
||||
async def async_load_from_store(hass, key):
|
||||
"""Load the retained data from store and return de-serialized data."""
|
||||
store = get_store_for_key(hass, key)
|
||||
restored = await store.async_load()
|
||||
if restored is None:
|
||||
return {}
|
||||
return restored
|
||||
return await get_store_for_key(hass, key).async_load() or {}
|
||||
|
||||
|
||||
async def async_save_to_store_default_encoder(hass, key, data):
|
||||
"""Generate store json safe data to the filesystem.
|
||||
|
||||
The data is expected to be encodable with the default
|
||||
python json encoder. It should have already been passed through
|
||||
JSONEncoder if needed.
|
||||
"""
|
||||
await _get_store_for_key(hass, key, None).async_save(data)
|
||||
|
||||
|
||||
async def async_save_to_store(hass, key, data):
|
||||
"""Generate dynamic data to store and save it to the filesystem."""
|
||||
"""Generate dynamic data to store and save it to the filesystem.
|
||||
|
||||
The data is only written if the content on the disk has changed
|
||||
by reading the existing content and comparing it.
|
||||
|
||||
If the data has changed this will generate two executor jobs
|
||||
|
||||
If the data has not changed this will generate one executor job
|
||||
"""
|
||||
current = await async_load_from_store(hass, key)
|
||||
if current is None or current != data:
|
||||
await get_store_for_key(hass, key).async_save(data)
|
||||
return
|
||||
_LOGGER.debug(
|
||||
"Did not store data for '%s'. Content did not change",
|
||||
key if "/" in key else f"hacs.{key}",
|
||||
get_store_key(key),
|
||||
)
|
||||
|
||||
|
||||
async def async_remove_store(hass, key):
|
||||
"""Remove a store element that should no longer be used"""
|
||||
"""Remove a store element that should no longer be used."""
|
||||
if "/" not in key:
|
||||
return
|
||||
await get_store_for_key(hass, key).async_remove()
|
||||
|
||||
@@ -87,7 +87,7 @@ async def async_install_repository(repository):
|
||||
await hacs.hass.async_add_executor_job(backup.create)
|
||||
|
||||
if repository.data.zip_release and version != repository.data.default_branch:
|
||||
await repository.download_zip_files(repository)
|
||||
await repository.download_zip_files(repository.validate)
|
||||
else:
|
||||
await download_content(repository)
|
||||
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
"aiogithubapi>=21.4.0",
|
||||
"awesomeversion>=21.2.2",
|
||||
"backoff>=1.10.0",
|
||||
"hacs_frontend==20210429001005",
|
||||
"hacs_frontend==20210620205902",
|
||||
"queueman==0.5"
|
||||
],
|
||||
"version": "1.12.3"
|
||||
"version": "1.13.2"
|
||||
}
|
||||
@@ -12,4 +12,4 @@ class HacsCore:
|
||||
|
||||
config_path = attr.ib(Path)
|
||||
ha_version = attr.ib(str)
|
||||
lovelace_mode = LovelaceMode("storage")
|
||||
lovelace_mode = LovelaceMode("yaml")
|
||||
|
||||
@@ -1,17 +1,23 @@
|
||||
"""Remove HACS."""
|
||||
from typing import TYPE_CHECKING
|
||||
from ..const import DOMAIN
|
||||
from ..enums import HacsDisabledReason
|
||||
from ..share import get_hacs
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
||||
async def async_remove_entry(hass, config_entry):
|
||||
|
||||
async def async_remove_entry(hass: "HomeAssistant", config_entry: "ConfigEntry"):
|
||||
"""Handle removal of an entry."""
|
||||
hacs = get_hacs()
|
||||
hacs.log.info("Disabling HACS")
|
||||
hacs.log.info("Removing recurring tasks")
|
||||
for task in hacs.recuring_tasks:
|
||||
task()
|
||||
if config_entry.state == "loaded":
|
||||
|
||||
if str(config_entry.state) in ["ConfigEntryState.LOADED", "loaded"]:
|
||||
hacs.log.info("Removing sensor")
|
||||
try:
|
||||
await hass.config_entries.async_forward_entry_unload(config_entry, "sensor")
|
||||
|
||||
@@ -14,7 +14,7 @@ from custom_components.hacs.const import (
|
||||
INTEGRATION_VERSION,
|
||||
STARTUP,
|
||||
)
|
||||
from custom_components.hacs.enums import HacsDisabledReason, HacsStage
|
||||
from custom_components.hacs.enums import HacsDisabledReason, HacsStage, LovelaceMode
|
||||
from custom_components.hacs.hacsbase.configuration import Configuration
|
||||
from custom_components.hacs.hacsbase.data import HacsData
|
||||
from custom_components.hacs.helpers.functions.constrains import check_constrains
|
||||
@@ -127,7 +127,7 @@ async def async_hacs_startup():
|
||||
|
||||
try:
|
||||
lovelace_info = await system_health_info(hacs.hass)
|
||||
except (TypeError, HomeAssistantError):
|
||||
except (TypeError, KeyError, HomeAssistantError):
|
||||
# If this happens, the users YAML is not valid, we assume YAML mode
|
||||
lovelace_info = {"mode": "yaml"}
|
||||
hacs.log.debug(f"Configuration type: {hacs.configuration.config_type}")
|
||||
@@ -136,6 +136,9 @@ async def async_hacs_startup():
|
||||
hacs.core.config_path = hacs.hass.config.path()
|
||||
hacs.system.ha_version = HAVERSION
|
||||
|
||||
hacs.system.lovelace_mode = lovelace_info.get("mode", "yaml")
|
||||
hacs.core.lovelace_mode = LovelaceMode(lovelace_info.get("mode", "yaml"))
|
||||
|
||||
# Setup websocket API
|
||||
await async_setup_hacs_websockt_api()
|
||||
|
||||
@@ -145,7 +148,6 @@ async def async_hacs_startup():
|
||||
# Clear old storage files
|
||||
await async_clear_storage()
|
||||
|
||||
hacs.system.lovelace_mode = lovelace_info.get("mode", "yaml")
|
||||
hacs.enable()
|
||||
hacs.github = GitHub(
|
||||
hacs.configuration.token,
|
||||
|
||||
@@ -42,8 +42,16 @@ async def async_setup_frontend():
|
||||
hass.data["frontend_extra_module_url"].add("/hacsfiles/iconset.js")
|
||||
|
||||
# Register www/community for all other files
|
||||
use_cache = hacs.core.lovelace_mode == "storage"
|
||||
hacs.log.info(
|
||||
"%s mode, cache for /hacsfiles/: %s",
|
||||
hacs.core.lovelace_mode,
|
||||
use_cache,
|
||||
)
|
||||
hass.http.register_static_path(
|
||||
URL_BASE, hass.config.path("www/community"), cache_headers=False
|
||||
URL_BASE,
|
||||
hass.config.path("www/community"),
|
||||
cache_headers=use_cache,
|
||||
)
|
||||
|
||||
hacs.frontend.version_running = FE_VERSION
|
||||
@@ -63,7 +71,7 @@ async def async_setup_frontend():
|
||||
"name": "hacs-frontend",
|
||||
"embed_iframe": True,
|
||||
"trust_external": False,
|
||||
"js_url": "/hacsfiles/frontend/entrypoint.js",
|
||||
"js_url": f"/hacsfiles/frontend/entrypoint.js?hacstag={FE_VERSION}",
|
||||
}
|
||||
},
|
||||
require_admin=True,
|
||||
|
||||
@@ -10,7 +10,6 @@ from custom_components.hacs.helpers.functions.filters import (
|
||||
from custom_components.hacs.helpers.functions.information import (
|
||||
get_integration_manifest,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsIntegration(HacsRepository):
|
||||
|
||||
@@ -5,7 +5,6 @@ from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.filters import (
|
||||
get_first_directory_in_directory,
|
||||
)
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsNetdaemon(HacsRepository):
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.information import find_file_name
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsPlugin(HacsRepository):
|
||||
|
||||
@@ -3,7 +3,6 @@ from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.information import find_file_name
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsPythonScript(HacsRepository):
|
||||
|
||||
@@ -3,7 +3,6 @@ from custom_components.hacs.enums import HacsCategory
|
||||
from custom_components.hacs.helpers.classes.exceptions import HacsException
|
||||
from custom_components.hacs.helpers.classes.repository import HacsRepository
|
||||
from custom_components.hacs.helpers.functions.information import find_file_name
|
||||
from custom_components.hacs.helpers.functions.logger import getLogger
|
||||
|
||||
|
||||
class HacsTheme(HacsRepository):
|
||||
|
||||
@@ -30,6 +30,10 @@
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"abort": {
|
||||
"not_setup": "HACS is not setup.",
|
||||
"release_limit_value": "The release limit needs to be between 1 and 100"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
|
||||
@@ -94,4 +94,4 @@ def compose_node(self, parent, index):
|
||||
self.ascend_resolver()
|
||||
return node
|
||||
|
||||
yaml.composer.Composer.compose_node = compose_node
|
||||
yaml.composer.Composer.compose_node = compose_node
|
||||
|
||||
Reference in New Issue
Block a user