Compare commits
4 Commits
master
...
refactor_d
| Author | SHA1 | Date |
|---|---|---|
|
|
d5c4b1e762 | |
|
|
0e695105eb | |
|
|
a5c26d09ed | |
|
|
fc41a56667 |
61
.env
|
|
@ -1,61 +0,0 @@
|
|||
UID=1000
|
||||
GID=1004
|
||||
DIR=/srv/docker
|
||||
DIR_LOCAL=/opt/docker_data
|
||||
DOMAIN=milvert.com
|
||||
DOMAIN_PEEK=peekskog.se
|
||||
|
||||
SECRETSDIR=/home/simon/repo/Docker/secrets
|
||||
|
||||
CLOUDFLARE_IPS=173.245.48.0/20,103.21.244.0/22,103.22.200.0/22,103.31.4.0/22,141.101.64.0/18,108.162.192.0/18,190.93.240.0/20,188.114.96.0/20,197.234.240.0/22,198.41.128.0/17,162.158.0.0/15,104.16.0.0/13,104.24.0.0/14,172.64.0.0/13,131.0.72.0/22,2400:cb00::/32,2606:4700::/32,2803:f800::/32,2405:b500::/32,2405:8100::/32,2a06:98c0::/29,2c0f:f248::/32
|
||||
LOCAL_IPS=127.0.0.1/32,10.0.0.0/8,192.168.0.0/16,172.16.0.0/12
|
||||
|
||||
MYSQL_USERNAME=root
|
||||
MYSQL_PASSWORD=qwerty
|
||||
MYSQL_ROOT_PASSWORD=bajsa
|
||||
|
||||
RSS_USERNAME=ttrss
|
||||
RSS_PASSWORD=bajsa
|
||||
RSS_DB=ttrss
|
||||
|
||||
POSTGRES_USER=root
|
||||
POSTGRES_PASSWORD=qwerty
|
||||
PGADMIN_DEFAULT_EMAIL=kalle@asd.com
|
||||
PGADMIN_DEFAULT_PASSWORD=bajsa
|
||||
|
||||
INFLUX_ADMIN=simon
|
||||
INFLUX_ADMIN_PASSWORD=bajsa123
|
||||
|
||||
LIBRESPEED_PW=NDVlMzA0MzQ0YmM3NTA0OGIxMzEzMjk4
|
||||
|
||||
PIHOLE_PW=NmE4Njc4Y2RjMmUxZmIxMWZkOTg1NTZj
|
||||
|
||||
UUID=1000
|
||||
PGID=1004
|
||||
TZ=Europe/Stockholm
|
||||
|
||||
TGRAMAPI=5306622927:AAGsDD83H1joum_hlTiCtSFWwIyM3nEEntU
|
||||
TGRAMCHAT=-1005306622927
|
||||
TGRAM_SHOUTRRR_ADDRESS=telegram://5306622927:AAGsDD83H1joum_hlTiCtSFWwIyM3nEEntU@telegram?chats=-1001662562579
|
||||
|
||||
ZWAVE_SECRET=bpWkvM6NwNU2S7oYJxNJ
|
||||
|
||||
VWSFRIEND_USERNAME='admin'
|
||||
VWSFRIEND_HOSTNAME='vwsfriend'
|
||||
VWSFRIEND_PASSWORD='bajsa123'
|
||||
WECONNECT_USER='simon@milvert.com'
|
||||
WECONNECT_PASSWORD='khLe86qM925qkB'
|
||||
WECONNECT_SPIN=9331
|
||||
WECONNECT_INTERVAL=180
|
||||
VW_DB_HOSTNAME='postgres'
|
||||
VW_DB_NAME='vwfriend'
|
||||
VW_DB_USER='vwfriend'
|
||||
VW_DB_PASSWORD='icCJ8iwKJBeRBg'
|
||||
ADDITIONAL_PARAMETERS=-vv --mqttbroker mqtt --mqttport 1883 -mu simon -mp bajsa123
|
||||
|
||||
|
||||
DEBUG_ABOVE=--mqttbroker mqtt --mqttport 1883 -mu simon -mp bajsa123
|
||||
|
||||
UPTIME_KUMA_PASSWORD=bajsa123
|
||||
UPTIME_KUMA_USER=simon@milvert.com
|
||||
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
UID=1000
|
||||
GID=1000
|
||||
DIR=./data
|
||||
DOMAIN=localhost
|
||||
|
||||
|
||||
MYSQL_USERNAME=root
|
||||
MYSQL_PASSWORD=qwerty
|
||||
|
||||
POSTGRES_USER=root
|
||||
POSTGRES_PASSWORD=qwerty
|
||||
PGADMIN_DEFAULT_EMAIL=kalle@asd.com
|
||||
PGADMIN_DEFAULT_PASSWORD=bajsa
|
||||
|
||||
PUID=1000
|
||||
PGID=1000
|
||||
TZ=Europe/Stockholm
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
UID=1000
|
||||
GID=1000
|
||||
DIR=/srv/docker
|
||||
DOMAIN=milvert.com
|
||||
|
||||
|
||||
MYSQL_USERNAME=root
|
||||
MYSQL_PASSWORD=qwerty
|
||||
|
||||
POSTGRES_USER=root
|
||||
POSTGRES_PASSWORD=qwerty
|
||||
PGADMIN_DEFAULT_EMAIL=kalle@asd.com
|
||||
PGADMIN_DEFAULT_PASSWORD=bajsa
|
||||
|
||||
PUID=1000
|
||||
PGID=1000
|
||||
TZ=Europe/Stockholm
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
librespeed/config/log/*
|
||||
**/[Ll]ogs
|
||||
**/[Ll]og
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
[submodule "diabets_app"]
|
||||
path = diabets_app
|
||||
url = git@milvert.com:simon/diabets_app.git
|
||||
[submodule "ha/config/custom_components/huawei_solar"]
|
||||
path = ha/config/custom_components/huawei_solar
|
||||
url = https://github.com/wlcrs/huawei_solar.git
|
||||
[submodule "ha/config/custom_components/nordpool"]
|
||||
path = ha/config/custom_components/nordpool
|
||||
url = https://github.com/custom-components/nordpool.git
|
||||
|
|
@ -1,184 +0,0 @@
|
|||
http:
|
||||
pprof:
|
||||
port: 6060
|
||||
enabled: false
|
||||
address: 0.0.0.0:80
|
||||
session_ttl: 720h
|
||||
users:
|
||||
- name: simon
|
||||
password: $2a$10$LmhzPrlAZ8gzqXuibTTlPud.vOXgkpa3zhzrVj8xgLqFEnHQTFt7e
|
||||
auth_attempts: 5
|
||||
block_auth_min: 15
|
||||
http_proxy: ""
|
||||
language: ""
|
||||
theme: auto
|
||||
dns:
|
||||
bind_hosts:
|
||||
- 0.0.0.0
|
||||
port: 53
|
||||
anonymize_client_ip: false
|
||||
ratelimit: 20
|
||||
ratelimit_subnet_len_ipv4: 24
|
||||
ratelimit_subnet_len_ipv6: 56
|
||||
ratelimit_whitelist: []
|
||||
refuse_any: true
|
||||
upstream_dns:
|
||||
- https://dns.cloudflare.com/dns-query
|
||||
- https://dns.google/dns-query
|
||||
- https://security.cloudflare-dns.com/dns-query
|
||||
upstream_dns_file: ""
|
||||
bootstrap_dns:
|
||||
- 9.9.9.10
|
||||
- 149.112.112.10
|
||||
- 2620:fe::10
|
||||
- 2620:fe::fe:10
|
||||
fallback_dns:
|
||||
- 1.1.1.2
|
||||
upstream_mode: load_balance
|
||||
fastest_timeout: 1s
|
||||
allowed_clients: []
|
||||
disallowed_clients: []
|
||||
blocked_hosts:
|
||||
- version.bind
|
||||
- id.server
|
||||
- hostname.bind
|
||||
trusted_proxies:
|
||||
- 127.0.0.0/8
|
||||
- ::1/128
|
||||
cache_size: 4194304
|
||||
cache_ttl_min: 0
|
||||
cache_ttl_max: 0
|
||||
cache_optimistic: false
|
||||
bogus_nxdomain: []
|
||||
aaaa_disabled: false
|
||||
enable_dnssec: false
|
||||
edns_client_subnet:
|
||||
custom_ip: ""
|
||||
enabled: false
|
||||
use_custom: false
|
||||
max_goroutines: 300
|
||||
handle_ddr: true
|
||||
ipset: []
|
||||
ipset_file: ""
|
||||
bootstrap_prefer_ipv6: false
|
||||
upstream_timeout: 10s
|
||||
private_networks: []
|
||||
use_private_ptr_resolvers: false
|
||||
local_ptr_upstreams: []
|
||||
use_dns64: false
|
||||
dns64_prefixes: []
|
||||
serve_http3: false
|
||||
use_http3_upstreams: false
|
||||
serve_plain_dns: true
|
||||
hostsfile_enabled: true
|
||||
tls:
|
||||
enabled: false
|
||||
server_name: ""
|
||||
force_https: false
|
||||
port_https: 443
|
||||
port_dns_over_tls: 853
|
||||
port_dns_over_quic: 853
|
||||
port_dnscrypt: 0
|
||||
dnscrypt_config_file: ""
|
||||
allow_unencrypted_doh: false
|
||||
certificate_chain: ""
|
||||
private_key: ""
|
||||
certificate_path: ""
|
||||
private_key_path: ""
|
||||
strict_sni_check: false
|
||||
querylog:
|
||||
dir_path: ""
|
||||
ignored: []
|
||||
interval: 2160h
|
||||
size_memory: 1000
|
||||
enabled: true
|
||||
file_enabled: true
|
||||
statistics:
|
||||
dir_path: ""
|
||||
ignored: []
|
||||
interval: 24h
|
||||
enabled: true
|
||||
filters:
|
||||
- enabled: false
|
||||
url: https://adguardteam.github.io/HostlistsRegistry/assets/filter_1.txt
|
||||
name: AdGuard DNS filter
|
||||
id: 1
|
||||
- enabled: false
|
||||
url: https://adguardteam.github.io/HostlistsRegistry/assets/filter_2.txt
|
||||
name: AdAway Default Blocklist
|
||||
id: 2
|
||||
whitelist_filters: []
|
||||
user_rules: []
|
||||
dhcp:
|
||||
enabled: false
|
||||
interface_name: ""
|
||||
local_domain_name: lan
|
||||
dhcpv4:
|
||||
gateway_ip: ""
|
||||
subnet_mask: ""
|
||||
range_start: ""
|
||||
range_end: ""
|
||||
lease_duration: 86400
|
||||
icmp_timeout_msec: 1000
|
||||
options: []
|
||||
dhcpv6:
|
||||
range_start: ""
|
||||
lease_duration: 86400
|
||||
ra_slaac_only: false
|
||||
ra_allow_slaac: false
|
||||
filtering:
|
||||
blocking_ipv4: ""
|
||||
blocking_ipv6: ""
|
||||
blocked_services:
|
||||
schedule:
|
||||
time_zone: UTC
|
||||
ids: []
|
||||
protection_disabled_until: null
|
||||
safe_search:
|
||||
enabled: false
|
||||
bing: true
|
||||
duckduckgo: true
|
||||
google: true
|
||||
pixabay: true
|
||||
yandex: true
|
||||
youtube: true
|
||||
blocking_mode: default
|
||||
parental_block_host: family-block.dns.adguard.com
|
||||
safebrowsing_block_host: standard-block.dns.adguard.com
|
||||
rewrites:
|
||||
- domain: '*.milvert.com'
|
||||
answer: milvert.com
|
||||
- domain: milvert.com
|
||||
answer: 10.0.0.3
|
||||
safebrowsing_cache_size: 1048576
|
||||
safesearch_cache_size: 1048576
|
||||
parental_cache_size: 1048576
|
||||
cache_time: 30
|
||||
filters_update_interval: 24
|
||||
blocked_response_ttl: 10
|
||||
filtering_enabled: true
|
||||
parental_enabled: false
|
||||
safebrowsing_enabled: false
|
||||
protection_enabled: false
|
||||
clients:
|
||||
runtime_sources:
|
||||
whois: true
|
||||
arp: true
|
||||
rdns: true
|
||||
dhcp: true
|
||||
hosts: true
|
||||
persistent: []
|
||||
log:
|
||||
enabled: true
|
||||
file: ""
|
||||
max_backups: 0
|
||||
max_size: 100
|
||||
max_age: 3
|
||||
compress: false
|
||||
local_time: false
|
||||
verbose: false
|
||||
os:
|
||||
group: ""
|
||||
user: ""
|
||||
rlimit_nofile: 0
|
||||
schema_version: 28
|
||||
|
|
@ -1,247 +0,0 @@
|
|||
###############################################################
|
||||
# Authelia configuration #
|
||||
###############################################################
|
||||
server:
|
||||
address: 'tcp://:9091'
|
||||
|
||||
log:
|
||||
level: info
|
||||
|
||||
# This secret can also be set using the env variables AUTHELIA_JWT_SECRET_FILE
|
||||
# I used this site to generate the secret: https://www.grc.com/passwords.htm
|
||||
#jwt_secret: SECRET_GOES_HERE # use docker secret file instead AUTHELIA_JWT_SECRET_FILE
|
||||
|
||||
# https://docs.authelia.com/configuration/miscellaneous.html#default-redirection-url
|
||||
# default_redirection_url: https://authelia.milvert.com
|
||||
|
||||
webauthn: #FIDO2 Authentication
|
||||
disable: false
|
||||
enable_passkey_login: true
|
||||
display_name: Authelia
|
||||
attestation_conveyance_preference: direct
|
||||
timeout: 60s
|
||||
filtering:
|
||||
prohibit_backup_eligibility: false
|
||||
selection_criteria:
|
||||
attachment: ''
|
||||
discoverability: 'preferred'
|
||||
user_verification: 'preferred'
|
||||
metadata:
|
||||
enabled: false
|
||||
validate_trust_anchor: true
|
||||
validate_entry: true
|
||||
validate_status: true
|
||||
validate_entry_permit_zero_aaguid: false
|
||||
|
||||
totp:
|
||||
issuer: authelia.com
|
||||
period: 30
|
||||
skew: 1
|
||||
|
||||
# Enable the following for Duo Push Notification support
|
||||
# https://www.authelia.com/docs/features/2fa/push-notifications.html
|
||||
#duo_api:
|
||||
# hostname: api-123456789.example.com
|
||||
# integration_key: ABCDEF
|
||||
# # This secret can also be set using the env variables AUTHELIA_DUO_API_SECRET_KEY_FILE
|
||||
# secret_key: # use docker secret file instead AUTHELIA_DUO_API_SECRET_KEY_FILE
|
||||
|
||||
authentication_backend:
|
||||
file:
|
||||
path: /config/users_database.yml
|
||||
# customize passwords based on https://docs.authelia.com/configuration/authentication/file.html
|
||||
password:
|
||||
algorithm: argon2id
|
||||
iterations: 1
|
||||
salt_length: 16
|
||||
parallelism: 8
|
||||
memory: 1024 # blocks this much of the RAM. Tune this.
|
||||
|
||||
# https://docs.authelia.com/configuration/access-control.html
|
||||
access_control:
|
||||
default_policy: deny
|
||||
rules:
|
||||
# Rules applied to everyone
|
||||
- domain: authelia.milvert.com
|
||||
policy: bypass
|
||||
# - domain: radarr.example.com
|
||||
# policy: bypass
|
||||
# subject: "group:admins"
|
||||
# resources:
|
||||
# - "^/api/.*$"
|
||||
# - domain: "*.example.com"
|
||||
# policy: one_factor
|
||||
- domain: "*.milvert.com"
|
||||
policy: two_factor
|
||||
- domain: "milvert.com"
|
||||
policy: two_factor
|
||||
#- domain:
|
||||
#- 'uptime.example.com'
|
||||
#subject: 'oauth2:client:uptime-kuma'
|
||||
#policy: 'one_factor'
|
||||
|
||||
session:
|
||||
name: authelia_session
|
||||
# This secret can also be set using the env variables AUTHELIA_SESSION_SECRET_FILE
|
||||
# Used a different secret, but the same site as jwt_secret above.
|
||||
# secret: SECRET_GOES_HERE # use docker secret file instead AUTHELIA_SESSION_SECRET_FILE
|
||||
expiration: 3600 # 1 hour
|
||||
inactivity: 1800 # 30 min
|
||||
cookies:
|
||||
- domain: 'milvert.com'
|
||||
authelia_url: https://authelia.milvert.com
|
||||
|
||||
# Optional. Can improve performance on a busy system. If not enabled, session info is stored in memory.
|
||||
# redis:
|
||||
# host: redis
|
||||
# port: 6379
|
||||
# This secret can also be set using the env variables AUTHELIA_SESSION_REDIS_PASSWORD_FILE
|
||||
# password: authelia
|
||||
|
||||
regulation:
|
||||
max_retries: 3
|
||||
find_time: 120
|
||||
ban_time: 300
|
||||
|
||||
storage:
|
||||
|
||||
# For local storage, uncomment lines below and comment out mysql. https://docs.authelia.com/configuration/storage/sqlite.html
|
||||
#local:
|
||||
#path: /config/db.sqlite3
|
||||
postgres:
|
||||
# MySQL allows running multiple authelia instances. Create database and enter details below.
|
||||
address: 'tcp://postgres:5432'
|
||||
#host: postgres
|
||||
#port: 5432
|
||||
database: 'authelia'
|
||||
username: 'authelia'
|
||||
# Password can also be set using a secret: https://docs.authelia.com/configuration/secrets.html
|
||||
# password: use docker secret file instead AUTHELIA_STORAGE_MYSQL_PASSWORD_FILE
|
||||
|
||||
notifier:
|
||||
disable_startup_check: false
|
||||
#smtp:
|
||||
#username: SMTP_USERNAME
|
||||
# This secret can also be set using the env variables AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE
|
||||
# password: # use docker secret file instead AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE
|
||||
#host: SMTP_HOST
|
||||
#port: 587 #465
|
||||
#sender: SENDER_EMAIL
|
||||
# For testing purpose, notifications can be sent in a file. Be sure map the volume in docker-compose.
|
||||
filesystem:
|
||||
filename: /tmp/authelia/notification.txt
|
||||
|
||||
|
||||
identity_providers:
|
||||
oidc:
|
||||
hmac_secret: 'akVs2Tr510MpfECDciJhpSI6SiHKhdiGefG2wMzPSuUhRlWNB0VNwDTxsFNZrRrw'
|
||||
jwks:
|
||||
- key_id: 'milvert_authelia_oidc_key'
|
||||
algorithm: 'RS256'
|
||||
use: 'sig'
|
||||
key: |
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKQIBAAKCAgEA1Yxr3WL300u245uZrBOgZtX63IwtFT9NDighnIz/PcFiYbUw
|
||||
lsrXi5HBJXuIEbxJDcdSdvPhusx08wizPuEnTLVphOgwQ8Xhab3qKLfmwW8yHGsX
|
||||
9+osJNhAzmXJXAMbjgz2Rqd7tuOT2PkyYF707FQBRlYNhcMfi06WVhlo4WFPab95
|
||||
seKmj3bSIHlmbPnrL9GqOhAtV018COCbMXM2pu6yQOTkdSltZyg5L1+QkSf2MAUN
|
||||
VjjTzWbjI8en9vQfZZjA1h7O0bpR/WOmPv9S+SdmnHE9FewOXux3aljC0qrTHeAh
|
||||
GIjq+8fzREN0Xvvqox5ZWwBKmPax4ed448Vm/8U3rZQ02Ftpr97w2inL/MT0UTmS
|
||||
wKudIlzmkuejqy0jiZ/aAX6JpE5OLsm1zhJSLJFx/vNxByh1kd2EFon22bUa5ZLF
|
||||
buVU1WMkhr1Nc8vTCgnr0Y3XKbB1koGJFwK6lg9L0Tstrt+SY34K2iRtFce/Lt2k
|
||||
KFkO+hfx3J8hRg2DcazR9bZTsjsK+OHw9sNaFMrkAf4Rd8Z27yRtSRdZXefgz88G
|
||||
1xjqgdZmjupKgRPJzCro4hbvmH6x1L8Q3ZzR5fstP8rui8m9UIsSCwLdzGlc7x7L
|
||||
toQckn+EFlZ0kLl1e3nlMDUpOaezx7TXt1OxlJtiX7MmGfhUcY+8k3+JS+0CAwEA
|
||||
AQKCAgALpHU5A61JOsaE0DXnmMZU9PORVJ8cmm/kBrIWa+V6g3GOrCczAWoQ9OhX
|
||||
181KUj6VXrmTPKag3Ya+vUmwcuwGlZlkIpiHCWHJtw3uHGCbSSmtPPV4ka7Ib3XR
|
||||
CuGYf57f9rryjS0EgpHL8YIamPK3c7kCEaz5DvNIUAeIOChsqTaAKG1FEntMNQkt
|
||||
thCsfk+hMsgaFEm0icfqX/x2DLb9EORs/02pSZHqXtoHSCmEkG4ungflHIIHn8Vg
|
||||
bQEuSI7xpgtVYSabbpILw4QLyTXH2asRemb/K/h4mmHETYSJocCSz2ZehRBym6sa
|
||||
nKbaitd2/y+V84Udo8186HbBzEBaNekr8IVDfY1NDi99ZgSUJBS0jDCUb84A6Ucf
|
||||
CRDZofvjNKV90x7wlZPz5T8N+lpDSH+ThwU0T1k8aydRA6DL/otFNfOS6vaqBdg3
|
||||
Gvpd7SQUT88u1l7rVZwsJ+uGBq9Sx+ScCnjl04jc62hDUy51hR+mOfrWNCJGqfJ9
|
||||
YJlaH2bZJuzKAyXAEYjJJuYfYPpTDVZ2glzSM72ytmPZseB6KwDJ8gZFtbdHUi0V
|
||||
eol49mOCKwBsaLjUh7rqix2WkO6yjcch17HrvpBEUZw+B0FIOPUdC2iH26lpOk0e
|
||||
QiuAPXZXcch00ta9UMBUfr8O8LVznm6L751UdaYnpNbw+2VHrQKCAQEA9soiuZYt
|
||||
jJmteohGKe2YOCOfzxirE2XqW8g5RWtWb84pEt+oZLEzrKFL4fvKYCEumfnLIrTe
|
||||
E6XrwVNbmRLxhRzPJi2ZgAqImy138BqeY9ygorUDKJP343JMBOKQvxCXF/ZvYYqn
|
||||
AXN6xt+1X2nlgBxWUJr7oqp2DJ/X7rBH2xRB8UITPInZCv9gxgRTWe5j8GToZJ2b
|
||||
S8VxgETl2IyBRE9H6knRZibD8uZKksLCPFIQdf/dkneiPTVW/PhvLzbASY3jOLJT
|
||||
O62xTkeykGEsdgAVYtBYuBrP86ujHHaqO1nGVMAYXVINNukrqXuF3n8XXCCmgFue
|
||||
Ibdus2UDct/7qwKCAQEA3YSyEVg8aSiF+cGqv1CpJFzdM169zDNxFooD7b7oj/FX
|
||||
oMZMKQ1EGcqWJ8ivseM1OYoc2FlVZF+bem1VNEDcswTEuqI7YnRJcua5vbBMBQP+
|
||||
FO0a8rbI3poctY/1hPt/oZ0twX7mBMLzljJ4kQLaC4LLQDlhhP4SriqWoXx6BfFV
|
||||
AZEbcNlzyOHGIUdA9ahyVB5isYC3ru6lZltAg+2+zHucLvNZ0H/jVAgjH8qOxoZh
|
||||
m6XILdQVdFMhZqmdLWPfFgZGqL/zc3qHrIWWvunawcIEnUZvVkTnCTSfIFvfsErJ
|
||||
jlT7hVUxNLQqed/DIsX9bz/Vj0Uj/7IOCcwBFiv+xwKCAQEAgUo4kv3IcyZ3qgAG
|
||||
siSVYKPZL0f/KDR2aQsmZeXr7LsW7ZpawBXNupzuAkBFL8NyhJxG/bG/vo9oSWoA
|
||||
TNuNyGzlYVz2VAwwsQtLEHEBldOaHGO0eee7MF9K0CxaJJ7uaVFj3Lgk8i+rnNDK
|
||||
VmhGIa220pe/SOMA4XBEUfnsSyv7uAcjyM129boA2vydJjosBV74GO4w06tm4Qo3
|
||||
WBGUD1Nxm558o4WflntriiOaWrurgAZB8F/YkTSGlBUbOqL2bhJ1fdh+nn9KqnYJ
|
||||
aHZgMpmsmo4ITLtPQpsi4uCQInPP4cqZeRppbeEOTMY3xe7TMCKy2AAnggZ1amp7
|
||||
Og157QKCAQBvfoyJwlmVrDnSvbGNXbHkhYEoi/UHxJSU/Z6AmWhAmv66A1TuDKlM
|
||||
OfVdzNrxtCRj86oVEtGyFZUSB9ms1UDAMhQ6w9ET+ufFF3BBk2yP0bSfH8BCjdGI
|
||||
iRUOJYk0B8nztEMFczOfDejAnmKkykSpKonWp4r3/1Gzq+fpG9fnCdL5WOnw4OIw
|
||||
J8MrmMuPWdtBj5GpOdo6CA/j9uYAATfZgBXaY82+7b+j2fyj0bYPIjAawVSCDI9H
|
||||
31eebpyX7f6o/TuvT/3fD7seEJcRPG9IurjL2FnNmByZO40kIlnyR5IvO4LlVz3P
|
||||
Ayel9AQpinHG/uAknm5CEoKSV8XsPPSdAoIBAQDweDT7RGHYHQp0INcQ3JxjrfN3
|
||||
PcaeVths+7KA+pAq+id1grv48Yg+bo/vfoA25SSV6UrtWBW8JUUtFcRIH+UFnQ7c
|
||||
rZkmI/l6lzdyJ3akzIJRAKvo7JGmT4NqTCjmug0Oo6feTjwuBisGRA7UFB/7gjJa
|
||||
v9IhIt51N7Dl+SHK+drYGoErbzurxCOmuE0+GCnZ2qvdafVbk6zh4U2pZ2feOfqu
|
||||
mPM3FMJdnSrXYtVWAY7hfSIsF/Ndh+kdkQi/s6TsZHqZ3PLTKWUk5ETwFpTqEXaM
|
||||
DsGaWut89Ik9YrcAQVKXez5jVJRsYXeMCznEXed6fLssXgmJT2OlrEgSQhEj
|
||||
-----END RSA PRIVATE KEY-----
|
||||
enable_client_debug_messages: true
|
||||
minimum_parameter_entropy: 8
|
||||
enforce_pkce: 'public_clients_only'
|
||||
enable_pkce_plain_challenge: false
|
||||
enable_jwt_access_token_stateless_introspection: false
|
||||
discovery_signed_response_alg: 'none'
|
||||
discovery_signed_response_key_id: ''
|
||||
require_pushed_authorization_requests: false
|
||||
authorization_policies:
|
||||
policy_name:
|
||||
default_policy: 'one_factor'
|
||||
rules:
|
||||
- policy: 'deny'
|
||||
subject: 'group:services'
|
||||
lifespans:
|
||||
access_token: '1h'
|
||||
authorize_code: '1m'
|
||||
id_token: '1h'
|
||||
refresh_token: '90m'
|
||||
cors:
|
||||
endpoints:
|
||||
- 'authorization'
|
||||
- 'token'
|
||||
- 'revocation'
|
||||
- 'introspection'
|
||||
allowed_origins:
|
||||
- 'https://authelia.milvert.com'
|
||||
allowed_origins_from_client_redirect_uris: false
|
||||
clients:
|
||||
- client_id: 'aN0VgMKamGZvleUplkT3W7kvXJmvsWmy4C9Obd6u5XGqL7A9B7CP2xxdSIv4ljIA'
|
||||
client_name: 'Grafana'
|
||||
client_secret: '$pbkdf2-sha512$310000$X53J.7eRdnYPuVSG6Uc0vw$y/PP9Wt5sHUrovp5hnXcJe6gias2t9h.PYj6iP0cMS1F2pDd98tzamSuoaU2b89vGONWdX0MaLKVs.6MFzCLEg'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
redirect_uris:
|
||||
- 'https://data.milvert.com/login/generic_oauth'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'profile'
|
||||
- 'groups'
|
||||
- 'email'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
- client_id: 'MlMNM1K1vGR3wHBPNsZZ7J66u1cGkMGlzBoZoYJwuc80quRsjrlV9jEZlMLTTGmT'
|
||||
client_name: 'Gitea'
|
||||
client_secret: '$pbkdf2-sha512$310000$E2hUgSHeRFIhAr5bQsDAFg$1qPDiXvtmvwVhwWb./gie6F2CCI80oQQkXln.xd.q.HNVI00kn1D5esj0faJrJhHgNjV0udqrBD5SdIVD8vXow'
|
||||
public: false
|
||||
authorization_policy: 'one_factor'
|
||||
redirect_uris:
|
||||
- 'https://gitea.milvert.com/user/oauth2/Authelia/callback'
|
||||
scopes:
|
||||
- 'openid'
|
||||
- 'email'
|
||||
- 'profile'
|
||||
userinfo_signed_response_alg: 'none'
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
users:
|
||||
simon:
|
||||
displayname: "Simon Milvert"
|
||||
password: "$argon2id$v=19$m=65536,t=3,p=4$a/EFVtykPGlbBXR0bsS2xw$AfqJHwoJK+iEtvbsCR9l5kCDC3DviHpjRCxijyET1o8"
|
||||
email: simon@milvert.com
|
||||
groups:
|
||||
- admins
|
||||
- dev
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
{
|
||||
"carConnectivity": {
|
||||
"log_level": "info",
|
||||
"connectors": [
|
||||
{
|
||||
"type": "skoda",
|
||||
"config": {
|
||||
"netrc": "/root/.netrc",
|
||||
"api_log_level": "warning",
|
||||
"interval": 600,
|
||||
"max_age": 300
|
||||
}
|
||||
}
|
||||
],
|
||||
"plugins": [
|
||||
{
|
||||
"type": "mqtt",
|
||||
"config": {
|
||||
"broker": "mqtt",
|
||||
"username": "simon",
|
||||
"password": "bajsa123"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "mqtt_homeassistant",
|
||||
"config": {}
|
||||
}
|
||||
,
|
||||
{
|
||||
"type": "database",
|
||||
"config": {
|
||||
"db_url": "sqlite:///carconnectivity.db"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# Läs secrets
|
||||
PW=$(cat /run/secrets/skoda_password)
|
||||
SPIN=$(cat /run/secrets/skoda_spin)
|
||||
|
||||
cat > /root/.netrc <<EOF
|
||||
machine skoda
|
||||
login simon@milvert.com
|
||||
password $PW
|
||||
account $SPIN
|
||||
EOF
|
||||
|
||||
chmod 600 /root/.netrc
|
||||
|
||||
# Kör original-cmd
|
||||
exec "$@"
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 75ba28f3afa2b0c4baab06823411f02e1dea5f20
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
db:
|
||||
path: diun.db
|
||||
|
||||
watch:
|
||||
workers: 20
|
||||
schedule: "0 */6 * * *"
|
||||
firstCheckNotif: false
|
||||
|
||||
providers:
|
||||
docker:
|
||||
watchByDefault: false
|
||||
|
||||
notif:
|
||||
telegram:
|
||||
token: 5306622927:AAGsDD83H1joum_hlTiCtSFWwIyM3nEEntU
|
||||
chatIDs:
|
||||
- -1001662562579
|
||||
templateBody: |
|
||||
Image {{ .Entry.Image }} is updated, see {{ .Entry.Image.HubLink }}. Tag {{ .Entry.Manifest.Tag }}
|
||||
|
||||
webhook:
|
||||
endpoint: https://webhook.site/f0524ac9-d54c-43cd-9cc0-88b28ae66b3c
|
||||
method: GET
|
||||
headers:
|
||||
content-type: application/json
|
||||
timeout: 10s
|
||||
|
|
@ -2,6 +2,33 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
|
||||
gitea:
|
||||
image: gitea/gitea:1.12
|
||||
container_name: gitea
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
environment:
|
||||
- USER_UID=1001
|
||||
- USER_GID=1005
|
||||
volumes:
|
||||
#- /var/lib/gitea:/data
|
||||
- ${DIR}/gitea:/data
|
||||
- /home/git/.ssh:/data/git/.ssh
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
ports:
|
||||
- "127.0.0.1:2222:22"
|
||||
- "3000:3000"
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.gitea-service.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.gitea-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.gitea-secure.rule=Host(`gitea.${DOMAIN}`)"
|
||||
- "traefik.http.routers.gitea-secure.tls.certresolver=milvert"
|
||||
- "traefik.http.routers.gitea-secure.tls=true"
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
container_name: grafana
|
||||
|
|
@ -18,7 +45,7 @@ services:
|
|||
- "traefik.http.services.grafana-service.loadbalancer.server.port=3000"
|
||||
|
||||
influx:
|
||||
image: quay.io/influxdb/influxdb:2.0.0-beta
|
||||
image: influxdb
|
||||
container_name: influxdb
|
||||
networks:
|
||||
- backend
|
||||
|
|
@ -29,8 +56,15 @@ services:
|
|||
- "traefik.http.routers.influx.rule=host(`influx.${DOMAIN}`)"
|
||||
- "traefik.http.services.influx-service.loadBalancer.server.port=8086"
|
||||
restart: always
|
||||
volumes:
|
||||
- ${DIR}/influx-data:/var/lib/influxdb
|
||||
environment:
|
||||
- INFLUX_TOKEN=-I2TAAlmXdJ06yMw9wf-Mztf83ZzYZUdE8Ov2hMrnYabvXUnxL2cZfDzNHR6LTbW7ND5rE95CdvV2-Cy8IMHZQ==
|
||||
- INFLUXDB_DB=SOME_DB_NAME # set any other to create database on initialization
|
||||
- INFLUXDB_HTTP_ENABLED=true
|
||||
- INFLUXDB_HTTP_AUTH_ENABLED=true
|
||||
|
||||
- INFLUXDB_ADMIN_USER=SECURE_USERNAME
|
||||
- INFLUXDB_ADMIN_PASSWORD=SECURE_PASS
|
||||
|
||||
telegraf:
|
||||
container_name: telegraf
|
||||
|
|
@ -40,10 +74,8 @@ services:
|
|||
- backend
|
||||
ports:
|
||||
- 8081:8081
|
||||
environment:
|
||||
- INFLUX_TOKEN=-I2TAAlmXdJ06yMw9wf-Mztf83ZzYZUdE8Ov2hMrnYabvXUnxL2cZfDzNHR6LTbW7ND5rE95CdvV2-Cy8IMHZQ==
|
||||
volumes:
|
||||
- ./telegraf.conf:/etc/telegraf/telegraf.conf
|
||||
- ./data-handling/telegraf.conf:/etc/telegraf/telegraf.conf
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
db:
|
||||
image: mariadb:10.5.6
|
||||
container_name: mariaDB
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/database:/var/lib/mysql:rw
|
||||
ports:
|
||||
- "3307:3306"
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=false"
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
|
||||
- MYSQL_USER=${MYSQL_USERNAME}
|
||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD}
|
||||
|
||||
web_db:
|
||||
image: adminer
|
||||
container_name: adminer
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.middlewares.webdb-mid.ipallowlist.sourcerange=127.0.0.1/32, 10.0.0.1/24"
|
||||
- "traefik.http.routers.webdb-secure.middlewares=webdb-mid"
|
||||
- "traefik.http.routers.webdb-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.webdb-secure.rule=Host(`webdb.${DOMAIN}`)"
|
||||
- "traefik.http.routers.webdb-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.webdb-secure.tls=true"
|
||||
- "traefik.http.services.webdb-service.loadbalancer.server.port=8080"
|
||||
depends_on:
|
||||
- db
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
|
||||
- MYSQL_USER=${MYSQL_USERNAME}
|
||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD}
|
||||
- PMA_ARBITRARY=1
|
||||
|
||||
postgres:
|
||||
image: postgres:13.6
|
||||
container_name: postgres
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/database_pg/data:/var/lib/postgresql/data
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=false"
|
||||
environment:
|
||||
- TZ=${TZ}
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin_container
|
||||
image: dpage/pgadmin4
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: ${PGADMIN_DEFAULT_EMAIL}
|
||||
PGADMIN_DEFAULT_PASSWORD: ${PGADMIN_DEFAULT_PASSWORD}
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/database_pg/pgadmin:/root/.pgadmin
|
||||
networks:
|
||||
- backend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- postgres
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
# - "traefik.http.middlewares.webpg-mid.ipwhitelist.sourcerange=127.0.0.1/32, 10.0.0.1/24"
|
||||
#- "traefik.http.routers.webpg-secure.middlewares=webpg-mid"
|
||||
- "traefik.http.routers.webpg-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.webpg-secure.rule=Host(`webpg.${DOMAIN}`)"
|
||||
- "traefik.http.routers.webpg-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.webpg-secure.tls=true"
|
||||
#- "traefik.http.services.webpg-service.loadbalancer.server.port=8080"
|
||||
|
||||
redis:
|
||||
image: redis:7.0-alpine
|
||||
container_name: redis
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/redis:/var/lib/redis
|
||||
#entrypoint: redis-server --appendonly yes --requirepass $REDIS_PASSWORD --maxmemory 512mb --maxmemory-policy allkeys-lru
|
||||
labels:
|
||||
- diun.enable=true
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
external: true
|
||||
backend:
|
||||
external: false
|
||||
|
||||
|
|
@ -1,21 +1,14 @@
|
|||
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
node-red:
|
||||
image: nodered/node-red:3.1.6
|
||||
|
||||
# image: nodered/node-red-dev:3.0.0-beta.4-14
|
||||
node-red:
|
||||
image: nodered/node-red:latest
|
||||
container_name: "node-red"
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
- TZ=${TZ}
|
||||
- TZ=Europe/Stockholm
|
||||
ports:
|
||||
- "1881:1880"
|
||||
#devices:
|
||||
|
|
@ -25,39 +18,29 @@ services:
|
|||
volumes:
|
||||
- ${DIR}/nodered:/data
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.node-red.rule=Host(`nodered.${DOMAIN}`)"
|
||||
- "traefik.http.routers.node-red.entrypoints=web"
|
||||
- "traefik.http.services.node-red-service.loadbalancer.server.port=1880"
|
||||
- "traefik.http.routers.node-red-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.node-red-secure.rule=Host(`nodered.${DOMAIN}`)"
|
||||
- "traefik.http.routers.node-red-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.node-red-secure.tls.certresolver=milvert"
|
||||
- "traefik.http.routers.node-red-secure.tls=true"
|
||||
- "traefik.enable=true"
|
||||
|
||||
|
||||
huawei_inverter:
|
||||
image: huawei_inverter:1.0
|
||||
build: ../Inverter_huawei
|
||||
container_name: huawei_inverter
|
||||
dns:
|
||||
- 10.0.0.201
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
zigbee2mqtt:
|
||||
container_name: zigbee2mqtt
|
||||
networks:
|
||||
- backend
|
||||
restart: unless-stopped
|
||||
image: koenkk/zigbee2mqtt:1.13.0
|
||||
volumes:
|
||||
- ${DIR}/zigbee2matt:/app/data
|
||||
- ${DIR}/zigbee2mqtt_node_log:/root/.npm
|
||||
devices:
|
||||
- /dev/ttyACM1:/dev/ttyACM0
|
||||
environment:
|
||||
- INVERTER_HOST=10.0.2.20
|
||||
- MQTT_HOST=mqtt
|
||||
- USE_CREDENTIALS=YES
|
||||
- USER_NAME=simon
|
||||
- PASSWORD=bajsa123
|
||||
- LOGLEVEL=INFO
|
||||
- MQTT_TOPIC=inverter
|
||||
- DATA_MODE=INVERTER # INVERTER or OFFLINE
|
||||
|
||||
- UID=1000
|
||||
- GID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
nextcloud:
|
||||
container_name: nextcloud
|
||||
image: nextcloud:27
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
dns:
|
||||
- 10.0.0.3
|
||||
- 8.8.8.8
|
||||
ports:
|
||||
- "8009:80"
|
||||
networks:
|
||||
- backend
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.nextcloud.entrypoints=web-secure"
|
||||
- "traefik.http.routers.nextcloud.middlewares=nextcloud,nextcloud_redirect"
|
||||
- "traefik.http.routers.nextcloud.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.nextcloud.rule=Host(`moln.${DOMAIN}`)"
|
||||
- "traefik.http.middlewares.nextcloud.headers.customFrameOptionsValue=ALLOW-FROM https://milvert.com"
|
||||
- "traefik.http.middlewares.nextcloud.headers.contentSecurityPolicy=frame-ancestors 'self' milvert.com"
|
||||
- "traefik.http.middlewares.nextcloud.headers.stsSeconds=155520011"
|
||||
- "traefik.http.middlewares.nextcloud.headers.stsIncludeSubdomains=true"
|
||||
- "traefik.http.middlewares.nextcloud.headers.stsPreload=true"
|
||||
- "traefik.http.middlewares.nextcloud.headers.customFrameOptionsValue=SAMEORIGIN"
|
||||
- "traefik.http.middlewares.nextcloud.headers.referrerPolicy=no-referrer"
|
||||
- "traefik.http.middlewares.nextcloud.headers.browserXSSFilter=true"
|
||||
- "traefik.http.middlewares.nextcloud.headers.contentTypeNosniff=true"
|
||||
- "traefik.http.middlewares.nextcloud_redirect.redirectregex.regex=/.well-known/(card|cal)dav"
|
||||
- "traefik.http.middlewares.nextcloud_redirect.redirectregex.replacement=/remote.php/dav/"
|
||||
environment:
|
||||
- POSTGRES_DB=nextcloud
|
||||
- POSTGRES_USER=nextcloud
|
||||
- POSTGRES_PASSWORD=bajsa
|
||||
- POSTGRES_HOST=postgres
|
||||
- NEXTCLOUD_ADMIN_USER=admin
|
||||
- NEXTCLOUD_ADMIN_PASSWORD=bajsa123
|
||||
- REDIS_HOST=redis
|
||||
- NEXTCLOUD_TRUSTED_DOMAINS=moln.milvert.com
|
||||
- TRUSTED_PROXIES=172.19.0.0/16
|
||||
volumes:
|
||||
# - ${DIR}/nextcloud-www:/var/www/html
|
||||
- ./nextcloud/cronjob:/var/spool/cron/crontabs/www-data
|
||||
#- /srv/owncloud:/var/www/html/data
|
||||
- /media/NAS:/media/NAS
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
external: true
|
||||
backend:
|
||||
external: false
|
||||
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
wordpress:
|
||||
container_name: peek
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
depends_on:
|
||||
- db_peek
|
||||
image: wordpress:5.6.0-apache
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/peekskog/wp:/var/www/html
|
||||
- ./wp/php.conf.ini:/usr/local/etc/php/conf.d/uploads.ini
|
||||
ports:
|
||||
- "8008:80"
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
environment:
|
||||
WORDPRESS_DB_HOST: db_peek:3306
|
||||
WORDPRESS_DB_USER: wordpress
|
||||
WORDPRESS_DB_PASSWORD: peekskog
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
# middleware to redirect www to non-www
|
||||
- "traefik.http.middlewares.www-redirect.redirectregex.regex=^https://www.peekskog.se/(.*)"
|
||||
- "traefik.http.middlewares.www-redirect.redirectregex.replacement=https://peekskog.se/$${1}"
|
||||
- "traefik.http.middlewares.www-redirect.redirectregex.permanent=true"
|
||||
# https router
|
||||
- "traefik.http.routers.peek-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.peek-secure.rule=Host(`peekskog.se`) || Host(`www.peekskog.se`)"
|
||||
- "traefik.http.routers.peek-secure.middlewares=www-redirect,peek-middle"
|
||||
- "traefik.http.routers.peek-secure.middlewares=peek-middle"
|
||||
- "traefik.http.middlewares.peek-middle.headers.contentSecurityPolicy=frame-ancestors 'self' peekskog.se"
|
||||
- "traefik.http.middlewares.peek-middle.headers.framedeny=true"
|
||||
- "traefik.http.middlewares.peek-middle.headers.stsSeconds=155520011"
|
||||
- "traefik.http.middlewares.peek-middle.headers.stsIncludeSubdomains=true"
|
||||
- "traefik.http.middlewares.peek-middle.headers.stsPreload=true"
|
||||
- "traefik.http.middlewares.peek-middle.headers.customFrameOptionsValue=SAMEORIGIN"
|
||||
- "traefik.http.middlewares.peek-middle.headers.referrerPolicy=no-referrer"
|
||||
- "traefik.http.middlewares.peek-middle.headers.SSLRedirect=true"
|
||||
- "traefik.http.middlewares.peek-middle.headers.forceSTSHeader=true"
|
||||
- "traefik.http.middlewares.peek-middle.headers.browserXSSFilter=true"
|
||||
- "traefik.http.middlewares.peek-middle.headers.contentTypeNosniff=true"
|
||||
#- "traefik.http.routers.peek-secure.rule=Host(`${DOMAIN_PEEK}`)"
|
||||
- "traefik.http.routers.peek-secure.tls.certresolver=peek"
|
||||
- "traefik.http.routers.peek-secure.tls=true"
|
||||
|
||||
db_peek:
|
||||
container_name: db_peek
|
||||
image: mysql:5.7
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/peekskog/db:/var/lib/mysql
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: peekskog
|
||||
MYSQL_DATABASE: wordpress
|
||||
MYSQL_USER: wordpress
|
||||
MYSQL_PASSWORD: peekskog
|
||||
|
||||
phpmyadmin_peek:
|
||||
depends_on:
|
||||
- db_peek
|
||||
image: phpmyadmin/phpmyadmin
|
||||
container_name: phpmyadmin_peek
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- '8007:80'
|
||||
environment:
|
||||
PMA_HOST: db_peek
|
||||
labels:
|
||||
- "traefik.enable=false"
|
||||
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
external: true
|
||||
backend:
|
||||
external: false
|
||||
|
||||
|
|
@ -1,296 +0,0 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
|
||||
zigbee_home_2:
|
||||
container_name: zigbee_home_2
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- "8088:8080"
|
||||
image: koenkk/zigbee2mqtt:2.7
|
||||
restart: always
|
||||
volumes:
|
||||
- ./zigbee_home_2:/app/data
|
||||
- ./zigbee_home_2/configuration_home_2.yaml:/app/data/configuration.yaml
|
||||
devices:
|
||||
- /dev/ttyUSB-zigbee_home_2:/dev/ttyACM0
|
||||
environment:
|
||||
- UID=1000
|
||||
- GID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.zighome-service.loadBalancer.server.port=8080"
|
||||
- "traefik.http.routers.zighome-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.zighome-secure.rule=Host(`zig2.${DOMAIN}`)"
|
||||
- "traefik.http.routers.zighome-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.zighome-secure.middlewares=chain-authelia@file"
|
||||
- "traefik.http.routers.zighome-secure.tls=true"
|
||||
|
||||
zigbee2mqtt:
|
||||
container_name: zigbee2mqtt
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
image: koenkk/zigbee2mqtt:2.6
|
||||
restart: always
|
||||
volumes:
|
||||
- ${DIR}/zigbee2matt:/app/data
|
||||
- ./zigbee2mqtt/configuration.yaml:/app/data/configuration.yaml
|
||||
- ${DIR}/zigbee2mqtt_node_log:/root/.npm
|
||||
devices:
|
||||
- /dev/ttyACM1:/dev/ttyACM0
|
||||
environment:
|
||||
- UID=1000
|
||||
- GID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.zig1-service.loadBalancer.server.port=8081"
|
||||
- "traefik.http.routers.zig1-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.zig1-secure.rule=Host(`zig1.${DOMAIN}`)"
|
||||
- "traefik.http.routers.zig1-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.zig1-secure.middlewares=chain-authelia@file"
|
||||
- "traefik.http.routers.zig1-secure.tls=true"
|
||||
|
||||
|
||||
influx:
|
||||
image: influxdb:2.7
|
||||
container_name: influxdb
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
ports:
|
||||
- "8087:8086"
|
||||
volumes:
|
||||
# - ${DIR}/influx-data:/var/lib/influxdb
|
||||
- ${DIR}/influx_2-data:/var/lib/influxdb2
|
||||
#- ${DIR}/influx-data/influxdb.conf:/root/influxdb/influxdb.conf:ro
|
||||
environment:
|
||||
- INFLUXDB_DB=db0
|
||||
- INFLUXDB_ADMIN_USER=${INFLUX_ADMIN}
|
||||
- INFLUXDB_ADMIN_PASSWORD=${INFLUX_ADMIN_PASSWORD}
|
||||
- INFLUXD_LOG_LEVEL=INFO
|
||||
#- DOCKER_INFLUXDB_INIT_MODE=upgrade
|
||||
#- DOCKER_INFLUXDB_INIT_PASSWORD=${INFLUX_ADMIN_PASSWORD}
|
||||
#- DOCKER_INFLUXDB_INIT_USERNAME=${INFLUX_ADMIN}
|
||||
#- DOCKER_INFLUXDB_INIT_ORG=milvert
|
||||
#- DOCKER_INFLUXDB_INIT_BUCKET=MilvertBucket
|
||||
#- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=MilvertAuth
|
||||
#- DOCKER_INFLUXDB_INIT_UPGRADE_V1_CONFIG=/root/influxdb/influxdb.conf
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.influx-service.loadBalancer.server.port=8086"
|
||||
#- "traefik.http.routers.influx.entryPoints=influx"
|
||||
- "traefik.http.routers.influx-secure.rule=host(`influx.${DOMAIN}`)"
|
||||
- "traefik.http.routers.influx-secure.entryPoints=web-secure"
|
||||
- "traefik.http.routers.influx-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.influx-secure.tls=true"
|
||||
- "traefik.http.routers.influx-secure.middlewares=chain-no-auth@file"
|
||||
#- "traefik.http.middlewares.webpg-mid.ipwhitelist.sourcerange=127.0.0.1/32, 10.0.0.1/24"
|
||||
# - "traefik.http.routers.influx-secure.middlewares=localNetwork@file"
|
||||
|
||||
telegraf:
|
||||
image: telegraf:1.25
|
||||
restart: always
|
||||
container_name: telegraf
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- "8081:8081"
|
||||
volumes:
|
||||
- ./telegraf:/etc/telegraf
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
depends_on:
|
||||
- influx
|
||||
environment:
|
||||
- INFLUXDB_URL=http://influxdb:8086
|
||||
- INFLUXDB_USERNAME=${INFLUX_ADMIN}
|
||||
- INFLUXDB_PASSWORD=${INFLUX_ADMIN_PASSWORD}
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=false"
|
||||
- "traefik.http.services.telegraf-service.loadBalancer.server.port=8081"
|
||||
- "traefik.http.routers.telegraf-secure.entryPoints=influx"
|
||||
- "traefik.http.routers.telegraf-secure.rule=host(`telegraf.${DOMAIN}`)"
|
||||
#- "traefik.http.routers.influx-secure.middlewares=localNetwork@file"
|
||||
|
||||
gitea:
|
||||
image: gitea/gitea:1.21
|
||||
container_name: gitea
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
environment:
|
||||
- USER_UID=1001
|
||||
- USER_GID=1005
|
||||
volumes:
|
||||
#- /var/lib/gitea:/data
|
||||
- ${DIR}/gitea:/data
|
||||
- /home/git/.ssh:/data/git/.ssh
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
ports:
|
||||
- "127.0.0.1:2222:22"
|
||||
- "3000:3000"
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.gitea-service.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.gitea-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.gitea-secure.rule=Host(`gitea.${DOMAIN}`)"
|
||||
- "traefik.http.routers.gitea-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.gitea-secure.tls=true"
|
||||
- "traefik.http.routers.gitea-secure.middlewares=chain-no-auth@file"
|
||||
|
||||
|
||||
mqtt:
|
||||
# image: eclipse-mosquitto:1.6.13
|
||||
image: eclipse-mosquitto:2.0.18
|
||||
container_name: mqtt
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
ports:
|
||||
- "1883:1883"
|
||||
- "9003:9001"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
volumes:
|
||||
- ./mosquitto/mosquitto.conf:/mosquitto/config/mosquitto.conf
|
||||
- ${DIR_LOCAL}/mqtt/data:/mqtt/data
|
||||
- ${DIR_LOCAL}/mqtt/log:/mqtt/log
|
||||
labels:
|
||||
- "traefik.enable=false"
|
||||
- diun.enable=true
|
||||
#- "traefik.http.routers.mqtt.entrypoints=mqtt_http"
|
||||
#- "traefik.http.routers.mqtt.rule=Host(`mqtt.${DOMAIN}`)"
|
||||
#- "traefik.http.routers.mqtt.tls.certresolver=milvert_dns"
|
||||
#- "traefik.http.routers.mqtt.tls=false"
|
||||
#- "traefik.http.services.mqtt_http.loadBalancer.server.port=9001"
|
||||
#- "traefik.http.routers.mqtt.middlewares=localNetwork@file"
|
||||
#- "traefik.tcp.routers.mqtt_tcp.entrypoints=mqtt_stcp"
|
||||
#- "traefik.tcp.routers.mqtt_tcp.rule=HostSNI(`mqtt.milvert.com`)"
|
||||
#- "traefik.tcp.routers.mqtt_tcp.tls=true"
|
||||
#- "traefik.tcp.routers.mqtt_tcp.tls.certresolver=milvert_dns"
|
||||
#- "traefik.tcp.routers.mqtt_tcp.service=mqtt_tcp"
|
||||
#- "traefik.tcp.services.mqtt_tcp.loadBalancer.server.port=1883"
|
||||
|
||||
mqtt_landet:
|
||||
image: eclipse-mosquitto:2.0.9
|
||||
container_name: mqtt_landet
|
||||
logging:
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
ports:
|
||||
- "1884:1884"
|
||||
- "9002:9002"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=${TZ}
|
||||
labels:
|
||||
- diun.enable=true
|
||||
volumes:
|
||||
- ./mosquitto/mosquitto_landet.conf:/mosquitto/config/mosquitto.conf
|
||||
- ${DIR}/mqtt_landet/data:/mosquitto/data
|
||||
- ${DIR}/mqtt_landet/log:/mosquitto/log
|
||||
|
||||
diun:
|
||||
image: ghcr.io/crazy-max/diun:4
|
||||
container_name: diun
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR}/diun:/data
|
||||
- "./diun/diun.yml:/diun.yml:ro"
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
environment:
|
||||
- TZ=${TZ}
|
||||
- LOG_LEVEL=info
|
||||
restart: unless-stopped
|
||||
labels:
|
||||
- diun.enable=true
|
||||
|
||||
|
||||
#pihole:
|
||||
#image: pihole/pihole:2023.03.1
|
||||
#container_name: pihole
|
||||
#ports:
|
||||
#- "53:53/tcp"
|
||||
#- "53:53/udp"
|
||||
#- "8001:80"
|
||||
#dns:
|
||||
#- 127.0.0.1
|
||||
#- 9.9.9.9
|
||||
#environment:
|
||||
#- TZ=${TZ}
|
||||
#- WEBPASSWORD=${PIHOLE_PW}
|
||||
#- ServerIP=10.0.201
|
||||
#- PIHOLE_DNS_=127.0.0.1;9.9.9.9;149.112.112.112;1.1.1.1
|
||||
#- DNSSEC='true'
|
||||
##- VIRTUAL_HOST=pihole.milvert.com # Same as port traefik config
|
||||
#- WEBTHEME=default-dark
|
||||
#- PIHOLE_DOMAIN=milvert.com
|
||||
#volumes:
|
||||
#- ${DIR}/pihole/etc:/etc/pihole
|
||||
#- ${DIR}/pihole/dns:/etc/dnsmasq.d
|
||||
#restart: unless-stopped
|
||||
#networks:
|
||||
#backend:
|
||||
#labels:
|
||||
#- diun.enable=true
|
||||
#- "traefik.enable=true"
|
||||
#- "traefik.http.middlewares.pihole-admin.addprefix.prefix=/admin"
|
||||
#- "traefik.http.services.pihole.loadbalancer.server.port=80"
|
||||
#- "traefik.http.routers.pihole-secure.middlewares=localNetwork@file"
|
||||
#- "traefik.http.routers.pihole-secure.entrypoints=web-secure"
|
||||
#- "traefik.http.routers.pihole-secure.rule=Host(`pihole.${DOMAIN}`)"
|
||||
#- "traefik.http.routers.pihole-secure.tls.certresolver=milvert_dns"
|
||||
#- "traefik.http.routers.pihole-secure.tls=true"
|
||||
#- "traefik.http.routers.pihole-secure.middlewares=chain-no-auth@file"
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
external: true
|
||||
backend:
|
||||
external: false
|
||||
|
|
@ -1,31 +1,26 @@
|
|||
version: '3'
|
||||
|
||||
|
||||
services:
|
||||
milvert:
|
||||
container_name: milvert
|
||||
milvert-nginx:
|
||||
container_name: milvert-nginx
|
||||
image: nginx:latest
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- 8004:80
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.milvertcom-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.milvertcom-secure.middlewares=default-headers@file"
|
||||
- "traefik.http.routers.milvertcom-secure.rule=Host(`${DOMAIN}`)"
|
||||
- "traefik.http.routers.milvertcom-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.milvertcom-secure.tls=true"
|
||||
#- "traefik.http.routers.milvert-secure.priority=1"
|
||||
|
||||
#HTTPS
|
||||
- "traefik.http.routers.milvert-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.milvert-secure.rule=Host(`www.${DOMAIN}`, `${DOMAIN}`)"
|
||||
- "traefik.http.routers.milvert-secure.priority=1"
|
||||
# SSL
|
||||
- "traefik.http.routers.milvert-secure.tls.certresolver=milvert"
|
||||
- "traefik.http.routers.milvert-secure.tls=true"
|
||||
volumes:
|
||||
- "${DIR_LOCAL}/milvert-nginx/conf:/etc/nginx/conf.d"
|
||||
- "${DIR_LOCAL}/milvert-nginx/html:/html"
|
||||
- "./milvert.com/conf:/etc/nginx/conf.d"
|
||||
- "./milvert.com/html:/html"
|
||||
|
||||
whoami:
|
||||
# A container that exposes an API to show its IP address
|
||||
|
|
@ -33,155 +28,16 @@ services:
|
|||
image: containous/whoami
|
||||
networks:
|
||||
- backend
|
||||
- frontend
|
||||
ports:
|
||||
- 8005:80
|
||||
# The HTTP port
|
||||
- "81:80"
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.whoami-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.whoami-secure.rule=Host(`whoami.${DOMAIN}`)"
|
||||
- "traefik.http.routers.whoami-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.whoami-secure.tls.certresolver=milvert"
|
||||
- "traefik.http.routers.whoami-secure.tls=true"
|
||||
# - "traefik.http.routers.whoami-secure.middlewares=no-chain-auth@file"
|
||||
- "traefik.http.routers.whoami-secure.middlewares=chain-authelia@file"
|
||||
|
||||
ttrss:
|
||||
# A container that exposes an API to show its IP address
|
||||
container_name: ttrss
|
||||
image: wangqiru/ttrss:latest
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
- PUID=${UID}
|
||||
- PGID=${GID}
|
||||
- SELF_URL_PATH=https://rss.milvert.com
|
||||
- DB_USER=${RSS_USERNAME}
|
||||
- DB_NAME=${RSS_USERNAME}
|
||||
- DB_PASS=${RSS_PASSWORD}
|
||||
- DB_HOST=postgres
|
||||
- DB_PORT=5432
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.rss-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.rss-secure.rule=Host(`rss.${DOMAIN}`)"
|
||||
- "traefik.http.routers.rss-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.rss-secure.tls=true"
|
||||
|
||||
diak:
|
||||
image: diak:1.0
|
||||
build: ./diabets_app/client
|
||||
container_name: diak
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- 8003:80
|
||||
#devices:
|
||||
#- path to device:path to device
|
||||
restart: unless-stopped
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.diak-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.diak-secure.rule=Host(`diak.${DOMAIN}`)"
|
||||
- "traefik.http.routers.diak-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.diak-secure.tls=true"
|
||||
|
||||
|
||||
|
||||
syncthing:
|
||||
image: linuxserver/syncthing
|
||||
container_name: syncthing
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
- UMASK_SET=022
|
||||
volumes:
|
||||
- /mnt/johanna_backup/syncthing/config:/config
|
||||
- /mnt/johanna_backup/data1:/data1
|
||||
- /mnt/johanna_backup/data2:/data2
|
||||
ports:
|
||||
- 8384:8384
|
||||
- 22000:22000
|
||||
- 21027:21027/udp
|
||||
networks:
|
||||
- frontend
|
||||
- backend
|
||||
restart: unless-stopped
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.sync_johanna-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.sync_johanna-secure.rule=Host(`sync.${DOMAIN}`)"
|
||||
- "traefik.http.routers.sync_johanna-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.sync_johanna-secure.tls=true"
|
||||
|
||||
torrent:
|
||||
image: linuxserver/rutorrent
|
||||
container_name: torrent
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- 51413:51413
|
||||
- 6881:6881/udp
|
||||
volumes:
|
||||
- ./torrent/config:/config
|
||||
- /media/NAS:/downloads
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.torrent-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.torrent-secure.rule=Host(`torrent.${DOMAIN}`)"
|
||||
- "traefik.http.routers.torrent-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.torrent-secure.tls=true"
|
||||
|
||||
|
||||
librespeed:
|
||||
image: ghcr.io/linuxserver/librespeed
|
||||
container_name: librespeed
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
- PASSWORD=${LIBRESPEED_PW}
|
||||
- CUSTOM_RESULTS=false
|
||||
- DB_TYPE=sqlite
|
||||
volumes:
|
||||
- ./librespeed/config:/config
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.librespeed.entrypoints=web-secure"
|
||||
- "traefik.http.routers.librespeed.rule=Host(`librespeed.${DOMAIN}`)"
|
||||
- "traefik.http.routers.librespeed.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.librespeed.tls=true"
|
||||
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
external: true
|
||||
|
|
|
|||
|
|
@ -1,62 +1,56 @@
|
|||
version: '3'
|
||||
|
||||
secrets:
|
||||
authelia_jwt_secret:
|
||||
file: $SECRETSDIR/authelia_jwt_secret
|
||||
authelia_oidc_key_secret:
|
||||
file: $SECRETSDIR/authelia_oidc_key
|
||||
authelia_oidc_hamc_secret:
|
||||
file: $SECRETSDIR/authelia_oidc_hamc
|
||||
authelia_oidc_pem_secret:
|
||||
file: $SECRETSDIR/authelia_oidc_pem
|
||||
authelia_session_secret:
|
||||
file: $SECRETSDIR/authelia_session_secret
|
||||
authelia_storage_postgres_password:
|
||||
file: $SECRETSDIR/authelia_storage_postgres_password
|
||||
authelia_storage_encryption_key_file:
|
||||
file: $SECRETSDIR/authelia_storage_encryption_key_file
|
||||
authelia_notifier_smtp_password:
|
||||
file: $SECRETSDIR/authelia_notifier_smtp_password
|
||||
authelia_duo_api_secret_key:
|
||||
file: $SECRETSDIR/authelia_duo_api_secret_key
|
||||
vwfriend_password:
|
||||
file: $SECRETSDIR/vwfriend_password
|
||||
vwconnect_password:
|
||||
file: $SECRETSDIR/vwconnect_password
|
||||
skoda_password:
|
||||
file: $SECRETSDIR/skoda_password
|
||||
skoda_spin:
|
||||
file: $SECRETSDIR/skoda_spin
|
||||
carconnect_grafana_pw:
|
||||
file: $SECRETSDIR/carconnect_grafana_pw
|
||||
|
||||
|
||||
########################### EXTENSION FIELDS ##########################
|
||||
# Helps eliminate repetition of sections
|
||||
# More Info on how to use this: https://github.com/htpcBeginner/docker-traefik/pull/228
|
||||
#######################################################################
|
||||
x-environment: &default-tz-puid-pgid
|
||||
- TZ=$TZ
|
||||
- PUID=${UID}
|
||||
- PGID=${GID}
|
||||
|
||||
x-common-keys-monitoring: &common-keys-monitoring
|
||||
networks:
|
||||
- backend
|
||||
security_opt:
|
||||
- no-new-privileges:true restart: always
|
||||
services:
|
||||
|
||||
reverse-proxy:
|
||||
# The official v2.0 Traefik docker image
|
||||
#image: traefik:v2.11
|
||||
image: traefik:v3.3
|
||||
container_name: "traefik"
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
syncthing:
|
||||
image: linuxserver/syncthing
|
||||
container_name: syncthing
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
- UMASK_SET=022
|
||||
volumes:
|
||||
- ./syncthing/config:/config
|
||||
- /mnt/johanna_backup:/data1
|
||||
ports:
|
||||
- 8384:8384
|
||||
- 22000:22000
|
||||
- 21027:21027/udp
|
||||
networks:
|
||||
- frontend
|
||||
- backend
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
mqtt:
|
||||
image: eclipse-mosquitto:1.6.10
|
||||
container_name: mqtt
|
||||
networks:
|
||||
- frontend
|
||||
- backend
|
||||
restart: always
|
||||
ports:
|
||||
- "1883:1883"
|
||||
- "9001:9001"
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
- TZ=Europe/Stockholm
|
||||
volumes:
|
||||
- ./main/mosquitto/mosquitto.conf:/mqtt/mosquitto.conf
|
||||
- ${DIR}/mqtt/data:/mqtt/data
|
||||
- ${DIR}/mqtt/log:/mqtt/log
|
||||
labels:
|
||||
- "traefik.http.routers.mqtt.rule=Host(`mqtt.${DOMAIN}`)"
|
||||
- "traefik.http.routers.mqtt.entrypoints=mqtt"
|
||||
- "traefik.http.services.mqtt-service.loadbalancer.server.port=9001"
|
||||
- "traefik.enable=true"
|
||||
|
||||
|
||||
traefik:
|
||||
# The official v2.0 Traefik docker image
|
||||
image: traefik
|
||||
container_name: "traefik"
|
||||
networks:
|
||||
- backend
|
||||
- frontend
|
||||
|
|
@ -64,815 +58,28 @@ services:
|
|||
# The HTTP port
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
# Insecure port
|
||||
- "8080:8080"
|
||||
# Influx
|
||||
- "8086:8086"
|
||||
# Mqtt
|
||||
- "9001:9001"
|
||||
- "8883:8883"
|
||||
volumes:
|
||||
# So that Traefik can listen to the Docker events
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ${DIR_LOCAL}/traefik/log:/log:rw
|
||||
- ./traefik.yml:/etc/traefik/traefik.yml
|
||||
- ./traefik:/rules
|
||||
- ./static_config.yml:/etc/traefik/static_config.yml
|
||||
# - "./log.json:/etc/traefik/log.json"
|
||||
# - ./acme.json:/acme.json
|
||||
- ./letsencrypt/:/letsencrypt:rw
|
||||
# - ./letsencrypt/acme_peek_staged.json:/letsencrypt/acme_peek_staged.json
|
||||
environment:
|
||||
- CF_API_EMAIL=simon@milvert.com
|
||||
#- CF_DNS_API_TOKEN=48d9ae3752afb6e73d99d23c432ba8e38b24c
|
||||
- CF_DNS_API_TOKEN=m-X93yWXyvQ2vDhfNLURcQTWOqle13aBbw7g2Zxg
|
||||
- CLOUDFLARE_IPS
|
||||
- LOCAL_IPS
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.http.routers.zighome-secure.middlewares=chain-authelia@file"
|
||||
|
||||
authelia:
|
||||
image: authelia/authelia:4
|
||||
container_name: authelia
|
||||
volumes:
|
||||
- ./authelia:/config
|
||||
- ./notify.txt:/tmp/authelia/notification.txt
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
# The HTTP port
|
||||
- "9091:9091"
|
||||
environment:
|
||||
- TZ=$TZ
|
||||
- PUID=$PUID
|
||||
- PGID=$PGID
|
||||
- AUTHELIA_IDENTITY_VALIDATION_RESET_PASSWORD_JWT_SECRET_FILE=/run/secrets/authelia_jwt_secret
|
||||
- AUTHELIA_SESSION_SECRET_FILE=/run/secrets/authelia_session_secret
|
||||
- AUTHELIA_STORAGE_POSTGRES_PASSWORD_FILE=/run/secrets/authelia_storage_postgres_password
|
||||
#- AUTHELIA_NOTIFIER_SMTP_PASSWORD_FILE=/run/secrets/authelia_notifier_smtp_password
|
||||
- AUTHELIA_DUO_API_SECRET_KEY_FILE=/run/secrets/authelia_duo_api_secret_key
|
||||
- AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE=/run/secrets/authelia_storage_encryption_key_file
|
||||
# - AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE=/run/secrets/authelia_oidc_hamc_secret
|
||||
#- AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER=/run/secrets/authelia_oidc_pem_secret
|
||||
- AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_KEY=/run/secrets/authelia_oidc_key_secret
|
||||
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.authelia.entrypoints=web-secure"
|
||||
- "traefik.http.routers.authelia.rule=Host(`authelia.${DOMAIN}`)"
|
||||
- "traefik.http.services.authelia-service.loadbalancer.server.port=9091"
|
||||
- "traefik.http.routers.authelia.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.authelia.tls=true"
|
||||
- "traefik.http.middlewares.authelia.forwardauth.address=http://authelia:9091/api/authz/forward-auth?authelia_url=https://authelia.${DOMAIN}" # yamllint disable-line rule:line-length
|
||||
- "traefik.http.middlewares.authelia.forwardauth.trustForwardHeader=true"
|
||||
- "traefik.http.middlewares.authelia.forwardauth.authResponseHeaders=Remote-User,Remote-Groups,Remote-Name,Remote-Email" # yamllint disable-line rule:line-length
|
||||
secrets:
|
||||
- authelia_jwt_secret
|
||||
- authelia_oidc_pem_secret
|
||||
- authelia_oidc_hamc_secret
|
||||
- authelia_oidc_key_secret
|
||||
- authelia_session_secret
|
||||
- authelia_storage_postgres_password
|
||||
- authelia_notifier_smtp_password
|
||||
- authelia_duo_api_secret_key
|
||||
- authelia_storage_encryption_key_file
|
||||
|
||||
ddns-updater:
|
||||
image: qmcgaw/ddns-updater:2.7
|
||||
container_name: ddns-updater
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- 8000:8000/tcp
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/ddns-updater:/updater/data
|
||||
environment:
|
||||
PUID: 1000
|
||||
PGID: 1004
|
||||
TZ: ${TZ}
|
||||
PERIOD: 1h
|
||||
UPDATE_COOLDOWN_PERIOD: 5m
|
||||
PUBLICIP_DNS_TIMEOUT: 3s
|
||||
HTTP_TIMEOUT: 10s
|
||||
# Web UI
|
||||
LISTENING_PORT: 8000
|
||||
# Backup
|
||||
BACKUP_PERIOD: 96h # 0 to disable
|
||||
BACKUP_DIRECTORY: /updater/data/backups
|
||||
# Other
|
||||
LOG_LEVEL: info
|
||||
SHOUTRRR_ADDRESSES: $TGRAM_SHOUTRRR_ADDRESS
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.ddnsupdater.rule=Host(`ddns.${DOMAIN}`)"
|
||||
- "traefik.http.routers.ddnsupdater.entrypoints=web-secure"
|
||||
- "traefik.http.services.ddnsupdater.loadbalancer.server.port=8000"
|
||||
- "traefik.http.routers.ddnsupdater.middlewares=chain-authelia@file"
|
||||
- "traefik.http.routers.ddnsupdater.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.ddnsupdater.tls=true"
|
||||
|
||||
|
||||
adguard:
|
||||
container_name: adguard
|
||||
image: adguard/adguardhome:v0.107.52
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
docker_vlan:
|
||||
ipv4_address: 10.0.0.204
|
||||
ports:
|
||||
- 53/udp
|
||||
- 67/udp
|
||||
- 68/tcp
|
||||
- 68/udp
|
||||
- 80/tcp
|
||||
- 443/tcp
|
||||
- 853/tcp
|
||||
- 3000/tcp
|
||||
volumes:
|
||||
- ./adguard/conf:/opt/adguardhome/conf
|
||||
- ./adguard/work:/opt/adguardhome/work
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.adguard.loadbalancer.server.port=80"
|
||||
- "traefik.http.routers.adguard.entrypoints=web-secure"
|
||||
- "traefik.http.routers.adguard.rule=Host(`adguard.${DOMAIN}`)"
|
||||
- "traefik.http.routers.adguard.middlewares=chain-authelia@file"
|
||||
- "traefik.http.routers.adguard.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.adguard.tls=true"
|
||||
|
||||
|
||||
######################### DATABASE ############################
|
||||
#
|
||||
# DATABASE
|
||||
#
|
||||
###############################################################
|
||||
|
||||
db:
|
||||
image: mariadb:10.5.6
|
||||
container_name: mariaDB
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/database:/var/lib/mysql:rw
|
||||
ports:
|
||||
- "3307:3306"
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=false"
|
||||
environment:
|
||||
- PUID=${UID}
|
||||
- PGID=${GID}
|
||||
- TZ=${TZ}
|
||||
- MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
|
||||
- MYSQL_USER=${MYSQL_USERNAME}
|
||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD}
|
||||
|
||||
web_db:
|
||||
image: adminer
|
||||
container_name: adminer
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.middlewares.webdb-mid.ipallowlist.sourcerange=127.0.0.1/32, 10.0.0.1/24"
|
||||
- "traefik.http.routers.webdb-secure.middlewares=webdb-mid"
|
||||
- "traefik.http.routers.webdb-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.webdb-secure.rule=Host(`webdb.${DOMAIN}`)"
|
||||
- "traefik.http.routers.webdb-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.webdb-secure.tls=true"
|
||||
- "traefik.http.services.webdb-service.loadbalancer.server.port=8080"
|
||||
depends_on:
|
||||
- db
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD}
|
||||
- MYSQL_USER=${MYSQL_USERNAME}
|
||||
- MYSQL_PASSWORD=${MYSQL_PASSWORD}
|
||||
- PMA_ARBITRARY=1
|
||||
|
||||
postgres:
|
||||
image: postgres:15
|
||||
container_name: postgres
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/database_pg/data:/var/lib/postgresql/data
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=false"
|
||||
environment:
|
||||
- PUID=${UID}
|
||||
- PGID=${GID}
|
||||
- TZ=${TZ}
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin_container
|
||||
image: dpage/pgadmin4
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
environment:
|
||||
PUID: ${UID}
|
||||
PGID: ${GID}
|
||||
TZ: ${TZ}
|
||||
PGADMIN_DEFAULT_EMAIL: ${PGADMIN_DEFAULT_EMAIL}
|
||||
PGADMIN_DEFAULT_PASSWORD: ${PGADMIN_DEFAULT_PASSWORD}
|
||||
volumes:
|
||||
- ${DIR}/database_pg/pgadmin:/root/.pgadmin
|
||||
networks:
|
||||
- backend
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- postgres
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
#- "traefik.http.middlewares.webpg-mid.ipwhitelist.sourcerange=127.0.0.1/32, 10.0.0.1/24"
|
||||
# # # - "traefik.http.routers.webpg-secure.middlewares=webpg-mid"
|
||||
- "traefik.http.routers.webpg-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.webpg-secure.rule=Host(`webpg.${DOMAIN}`)"
|
||||
- "traefik.http.routers.webpg-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.webpg-secure.tls=true"
|
||||
#- "traefik.http.services.webpg-service.loadbalancer.server.port=8080"
|
||||
|
||||
redis:
|
||||
image: redis:7.0-alpine
|
||||
container_name: redis
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/redis:/var/lib/redis
|
||||
#entrypoint: redis-server --appendonly yes --requirepass $REDIS_PASSWORD --maxmemory 512mb --maxmemory-policy allkeys-lru
|
||||
labels:
|
||||
- diun.enable=true
|
||||
|
||||
######################### HOME ############################
|
||||
#
|
||||
# HOME
|
||||
#
|
||||
###############################################################
|
||||
|
||||
ha:
|
||||
container_name: ha
|
||||
image: homeassistant/home-assistant:2026.1
|
||||
restart: always
|
||||
privileged: true
|
||||
networks:
|
||||
backend:
|
||||
docker_vlan:
|
||||
ipv4_address: 10.0.0.203
|
||||
depends_on:
|
||||
- postgres
|
||||
ports:
|
||||
- 8123:8123
|
||||
volumes:
|
||||
- ./ha/config:/config
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.ha.entrypoints=web-secure"
|
||||
- "traefik.http.routers.ha.rule=Host(`ha.${DOMAIN}`)"
|
||||
- "traefik.http.routers.ha.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.ha.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.ha.tls=true"
|
||||
- "traefik.http.services.ha.loadbalancer.server.port=8123"
|
||||
|
||||
hassconf:
|
||||
container_name: hassconf
|
||||
image: causticlab/hass-configurator-docker:0.5.2
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
ports:
|
||||
- 3218:3218
|
||||
volumes:
|
||||
- ./ha/hass_config:/config
|
||||
- ./ha/config:/hass-config
|
||||
depends_on:
|
||||
- ha
|
||||
labels:
|
||||
- diun.enable=true
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
|
||||
appdaemon:
|
||||
container_name: appdaemon
|
||||
image: acockburn/appdaemon:4.4.2
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- ha
|
||||
labels:
|
||||
- diun.enable=true
|
||||
volumes:
|
||||
- ./ha/appdaemon_config:/conf
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
ports:
|
||||
- 5050:5050
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
# The following values ar overridden in docker-compose.override.yml
|
||||
- HA_URL=http://ha:8123
|
||||
- DASH_URL=http://appdaemon:5050
|
||||
- TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJmZmM0YTI1ZjVlYWM0NGY5OTA3OGFmOWJiMTJmYmUzZCIsImlhdCI6MTY5MzczMDQwNSwiZXhwIjoyMDA5MDkwNDA1fQ.YVH8WhH6FMvTkecJ-taCACP6kVG9is2hHmTR3tk3cns
|
||||
|
||||
zwave-js:
|
||||
container_name: zwave-js
|
||||
image: zwavejs/zwave-js-ui:9
|
||||
restart: always
|
||||
tty: true
|
||||
stop_signal: SIGINT
|
||||
environment:
|
||||
- SESSION_SECRET=${ZWAVE_SECRET}
|
||||
- ZWAVEJS_EXTERNAL_CONFIG=/usr/src/app/store/.config-db
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
networks:
|
||||
- backend
|
||||
devices:
|
||||
- '/dev/serial/by-id/usb-0658_0200-if00:/dev/zwave'
|
||||
volumes:
|
||||
- ./ha/zwave-config:/usr/src/app/store
|
||||
ports:
|
||||
- '8091:8091' # port for web interface
|
||||
- '3002:3000' # port for Z-Wave JS websocket server
|
||||
labels:
|
||||
- diun.enable=true
|
||||
|
||||
esphome:
|
||||
container_name: esphome
|
||||
image: ghcr.io/esphome/esphome:2025.7.3
|
||||
volumes:
|
||||
- ./esphome:/config
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
restart: always
|
||||
ports:
|
||||
- 6052:6052
|
||||
- 6053:6053
|
||||
# privileged: true # Testa utan
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
- USERNAME=${ESPHOME_USERNAME}
|
||||
- PASSWORD=${ESPHOME_PASSWORD}
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
|
||||
evcc:
|
||||
command:
|
||||
- evcc
|
||||
container_name: evcc
|
||||
image: evcc/evcc:0.300.3
|
||||
ports:
|
||||
- 7070:7070/tcp
|
||||
volumes:
|
||||
- "./evcc/evcc.yaml:/etc/evcc.yaml:ro"
|
||||
- ./evcc/evcc:/root/.evcc
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.evcc.entrypoints=web-secure"
|
||||
- "traefik.http.routers.evcc.rule=Host(`box.${DOMAIN}`)"
|
||||
- "traefik.http.services.evcc.loadbalancer.server.port=7070"
|
||||
- "traefik.http.routers.evcc.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.evcc.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.evcc.tls=true"
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:10.3.1
|
||||
container_name: grafana
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ./grafana/grafana.ini:/etc/grafana/grafana.ini
|
||||
# Data persistency
|
||||
# sudo mkdir -p /srv/docker/grafana/data; chown 472:472 /srv/docker/grafana/data
|
||||
- "${DIR_LOCAL}/grafana:/var/lib/grafana"
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.grafana-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.grafana-secure.rule=Host(`data.${DOMAIN}`)"
|
||||
- "traefik.http.routers.grafana-secure.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.grafana-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.grafana-secure.tls=true"
|
||||
|
||||
node-red:
|
||||
image: nodered/node-red:4.1
|
||||
# command: ["node-red", "--safe"]
|
||||
container_name: "node-red"
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
- TZ=${TZ}
|
||||
ports:
|
||||
- "1881:1880"
|
||||
#devices:
|
||||
#- /dev/ttyAMA0
|
||||
restart: unless-stopped
|
||||
user: ${UID}
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/nodered:/data
|
||||
- ./node-red/settings.js:/data/settings.js
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.node-red-service.loadbalancer.server.port=1880"
|
||||
- "traefik.http.routers.node-red-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.node-red-secure.rule=Host(`nodered.${DOMAIN}`)"
|
||||
- "traefik.http.routers.node-red-secure.middlewares=chain-authelia@file"
|
||||
- "traefik.http.routers.node-red-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.node-red-secure.tls=true"
|
||||
|
||||
nextcloud:
|
||||
container_name: nextcloud
|
||||
image: nextcloud:28
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "5m"
|
||||
max-file: "3"
|
||||
restart: always
|
||||
ports:
|
||||
- "8009:80"
|
||||
networks:
|
||||
- backend
|
||||
depends_on:
|
||||
- redis
|
||||
- postgres
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.nextcloud.entrypoints=web-secure"
|
||||
- "traefik.http.routers.nextcloud.middlewares=nextcloud,nextcloud_redirect"
|
||||
- "traefik.http.routers.nextcloud.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.nextcloud.rule=Host(`moln.${DOMAIN}`)"
|
||||
- "traefik.http.middlewares.nextcloud.headers.customFrameOptionsValue=ALLOW-FROM https://milvert.com"
|
||||
- "traefik.http.middlewares.nextcloud.headers.contentSecurityPolicy=frame-ancestors 'self' milvert.com"
|
||||
- "traefik.http.middlewares.nextcloud.headers.stsSeconds=155520011"
|
||||
- "traefik.http.middlewares.nextcloud.headers.stsIncludeSubdomains=true"
|
||||
- "traefik.http.middlewares.nextcloud.headers.stsPreload=true"
|
||||
- "traefik.http.middlewares.nextcloud.headers.customFrameOptionsValue=SAMEORIGIN"
|
||||
- "traefik.http.middlewares.nextcloud.headers.referrerPolicy=no-referrer"
|
||||
- "traefik.http.middlewares.nextcloud.headers.browserXSSFilter=true"
|
||||
- "traefik.http.middlewares.nextcloud.headers.contentTypeNosniff=true"
|
||||
- "traefik.http.middlewares.nextcloud_redirect.redirectregex.regex=/.well-known/(card|cal)dav"
|
||||
- "traefik.http.middlewares.nextcloud_redirect.redirectregex.replacement=/remote.php/dav/"
|
||||
environment:
|
||||
- POSTGRES_DB=nextcloud
|
||||
- POSTGRES_USER=nextcloud
|
||||
- POSTGRES_PASSWORD=bajsa123
|
||||
- POSTGRES_HOST=postgres
|
||||
- NEXTCLOUD_ADMIN_USER=admin
|
||||
- NEXTCLOUD_ADMIN_PASSWORD=bajsa123
|
||||
- NEXTCLOUD_TRUSTED_DOMAINS=moln.milvert.com
|
||||
- REDIS_HOST=redis
|
||||
- TRUSTED_PROXIES=172.19.0.0/16
|
||||
- OVERWRITECLIURL=https://moln.milvert.com
|
||||
- OVERWRITEPROTOCOL=https
|
||||
- OVERWRITEHOST=moln.milvert.com
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/nextcloud:/var/www/html
|
||||
- ./nextcloud/cronjob:/var/spool/cron/crontabs/www-data
|
||||
- /srv/owncloud:/var/www/html/data
|
||||
- /media/NAS:/media/NAS
|
||||
- /mnt/gunnar:/media/gunnar
|
||||
|
||||
gitea:
|
||||
image: gitea/gitea:1.21
|
||||
container_name: gitea
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "5"
|
||||
networks:
|
||||
- backend
|
||||
restart: always
|
||||
environment:
|
||||
- USER_UID=1001
|
||||
- USER_GID=1005
|
||||
volumes:
|
||||
#- /var/lib/gitea:/data
|
||||
- ${DIR}/gitea:/data
|
||||
- ./gitea/app.ini:/data/gitea/conf/app.ini
|
||||
- /home/git/.ssh:/data/git/.ssh
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
ports:
|
||||
- "127.0.0.1:2222:22"
|
||||
- "3000:3000"
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.gitea-service.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.gitea-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.gitea-secure.rule=Host(`gitea.${DOMAIN}`)"
|
||||
- "traefik.http.routers.gitea-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.gitea-secure.tls=true"
|
||||
- "traefik.http.routers.gitea-secure.middlewares=chain-no-auth@file"
|
||||
|
||||
|
||||
uptime_kuma:
|
||||
image: louislam/uptime-kuma
|
||||
container_name: uptime_kuma
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
- ADMIN_PASSWORD=${UPTIME_KUMA_PASSWORD}
|
||||
- ADMIN_EMAIL=${UPTIME_KUMA_USER}
|
||||
restart: always
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ${DIR_LOCAL}/uptime_kuma:/data
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.uptime-service.loadbalancer.server.port=3001"
|
||||
- "traefik.http.routers.uptime-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.uptime-secure.rule=Host(`uptime.${DOMAIN}`)"
|
||||
- "traefik.http.routers.uptime-secure.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.uptime-secure.tls=true"
|
||||
- "traefik.http.routers.uptime-secure.middlewares=chain-authelia@file"
|
||||
healthcheck:
|
||||
disable: true
|
||||
######################### WEB ############################
|
||||
#
|
||||
# WEB
|
||||
#
|
||||
###############################################################
|
||||
|
||||
librespeed:
|
||||
image: ghcr.io/linuxserver/librespeed:5.2.5
|
||||
container_name: librespeed
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
- PASSWORD=${LIBRESPEED_PW}
|
||||
- CUSTOM_RESULTS=false
|
||||
- DB_TYPE=sqlite
|
||||
volumes:
|
||||
- ./librespeed/config:/config
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.routers.librespeed.entrypoints=web-secure"
|
||||
- "traefik.http.routers.librespeed.rule=Host(`librespeed.${DOMAIN}`)"
|
||||
- "traefik.http.routers.librespeed.middlewares=chain-authelia@file"
|
||||
- "traefik.http.routers.librespeed.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.librespeed.tls=true"
|
||||
|
||||
|
||||
jelu:
|
||||
image: wabayang/jelu
|
||||
container_name: jelu
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
ports:
|
||||
# The HTTP port
|
||||
- 11111:11111
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ./jelu/config:/config
|
||||
- ${DIR_LOCAL}/jelu/database:/database
|
||||
- ${DIR_LOCAL}/jelu/files/images:/files/images
|
||||
- ${DIR_LOCAL}/jelu/files/imports:/files/imports
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
restart: unless-stopped
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.jelu-service.loadbalancer.server.port=11111"
|
||||
- "traefik.http.routers.jelu.entrypoints=web-secure"
|
||||
- "traefik.http.routers.jelu.rule=Host(`jelu.${DOMAIN}`)"
|
||||
- "traefik.http.routers.jelu.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.jelu.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.jelu.tls=true"
|
||||
|
||||
|
||||
vwsfriend:
|
||||
image: tillsteinbach/vwsfriend:0.24.7
|
||||
container_name: vwfriend
|
||||
ports:
|
||||
- ${VWSFRIEND_PORT-4000}:${VWSFRIEND_PORT-4000}
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ./vwsfriend_data:/config
|
||||
environment:
|
||||
- VWSFRIEND_USERNAME=${VWSFRIEND_USERNAME-admin}
|
||||
- VWSFRIEND_PASSWORD=${VWSFRIEND_PASSWORD}
|
||||
- VWSFRIEND_PORT=${VWSFRIEND_PORT-4000}
|
||||
- WECONNECT_USER
|
||||
- WECONNECT_PASSWORD
|
||||
- WECONNECT_SPIN
|
||||
- WECONNECT_INTERVAL
|
||||
- DATABASE_URL=postgresql://${VW_DB_USER}:${VW_DB_PASSWORD}@${VW_DB_HOSTNAME}:5432/${VW_DB_NAME}
|
||||
- ADDITIONAL_PARAMETERS=--with-database --with-abrp ${ADDITIONAL_PARAMETERS}
|
||||
depends_on:
|
||||
- postgres
|
||||
restart: unless-stopped
|
||||
- ./main/traefik.yml:/etc/traefik/traefik.yml
|
||||
- ./main/static_config.yml:/etc/traefik/static_config.yml
|
||||
# - "./log.json:/etc/traefik/log.json"
|
||||
#- ./acme.json:/acme.json
|
||||
- ./main/letsencrypt:/letsencrypt
|
||||
dns:
|
||||
- 8.8.8.8
|
||||
healthcheck:
|
||||
test: (wget -qO- http://localhost:${VWSFRIEND_PORT-4000}/healthcheck | grep "ok" -q) || exit 1
|
||||
interval: 60s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
secrets:
|
||||
- vwfriend_password
|
||||
- vwconnect_password
|
||||
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.vwsfriend-service.loadbalancer.server.port=4000"
|
||||
- "traefik.http.routers.vwsfriend.entrypoints=web-secure"
|
||||
- "traefik.http.routers.vwsfriend.rule=Host(`vw.${DOMAIN}`)"
|
||||
- "traefik.http.routers.vwsfriend.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.vwsfriend.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.vwsfriend.tls=true"
|
||||
|
||||
vwgrafana:
|
||||
image: tillsteinbach/vwsfriend-grafana:0.24.5
|
||||
container_name: vwgrafana
|
||||
ports:
|
||||
- ${GF_SERVER_HTTP_PORT-3001}:${GF_SERVER_HTTP_PORT-3000}
|
||||
networks:
|
||||
backend:
|
||||
volumes:
|
||||
- ./vwfriend_grafana/data:/var/lib/grafana
|
||||
- ./vwfriend_grafana/dashboard:/home/grafana/dummy
|
||||
environment:
|
||||
- PUID=${UUID}
|
||||
- PGID=${PGID}
|
||||
- TZ=${TZ}
|
||||
- GF_SECURITY_ADMIN_USER=${VWSFRIEND_USERNAME-admin}
|
||||
- GF_SECURITY_ADMIN_PASSWORD=${VWSFRIEND_PASSWORD-secret}
|
||||
- GF_SERVER_HTTP_PORT=${GF_SERVER_HTTP_PORT-3000}
|
||||
- DB_USER=${VW_DB_USER-admin}
|
||||
- DB_PASSWORD=${VW_DB_PASSWORD-secret}
|
||||
- DB_HOSTNAME=${vw_DB_HOSTNAME-postgres}
|
||||
- DB_PORT=${DB_PORT-5432}
|
||||
- DB_NAME=${VW_DB_NAME-vwsfriend}
|
||||
- VWSFRIEND_USERNAME=${VWSFRIEND_USERNAME-admin}
|
||||
- VWSFRIEND_PASSWORD=${VWSFRIEND_PASSWORD-secret}
|
||||
- VWSFRIEND_HOSTNAME=${VWSFRIEND_HOSTNAME-vwsfriendbackend}
|
||||
- VWSFRIEND_PORT=${VWSFRIEND_PORT-4000}
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "(wget -qO- http://localhost:${GF_SERVER_HTTP_PORT-3000}/api/health | grep '\"database\": \"ok\"' -q) || exit 1"]
|
||||
interval: 60s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.vwgrafana.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.vwgrafana.entrypoints=web-secure"
|
||||
- "traefik.http.routers.vwgrafana.rule=Host(`vwgrafana.${DOMAIN}`)"
|
||||
- "traefik.http.routers.vwgrafana.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.vwgrafana.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.vwgrafana.tls=true"
|
||||
|
||||
# weconnect_mqtt:
|
||||
#image: "tillsteinbach/weconnect-mqtt:0.49.1"
|
||||
#container_name: weconnect_mqtt
|
||||
#restart: unless-stopped
|
||||
#networks:
|
||||
#backend:
|
||||
#labels:
|
||||
#- diun.enable=true
|
||||
#environment:
|
||||
#- TZ=$TZ
|
||||
#- LC_ALL=sv_SE
|
||||
#- USER=${WECONNECT_USER}
|
||||
#- PASSWORD=${WECONNECT_PASSWORD}
|
||||
#- BROKER_ADDRESS=mqtt
|
||||
#- ADDITIONAL_PARAMETERS=--mqtt-username simon --mqtt-password bajsa123 --spin 9331 -vv
|
||||
|
||||
carconnectivity:
|
||||
image: "tillsteinbach/carconnectivity-mqtt:latest"
|
||||
container_name: carconnectivity
|
||||
volumes:
|
||||
- ./carconnectivity/carconnectivity.json:/carconnectivity.json
|
||||
- ./carconnectivity/entrypoint.sh:/root/entrypoint.sh:ro
|
||||
- ${DIR_LOCAL}/carconnectivity/state:/state
|
||||
- ${DIR_LOCAL}/carconnectivity/sqlite.db:/carconnectivity.db:rw
|
||||
environment:
|
||||
- "ADDITIONAL_INSTALLS=carconnectivity-plugin-mqtt_homeassistant carconnectivity-plugin-database"
|
||||
- TZ=$TZ
|
||||
- LC_ALL=sv_SE
|
||||
- TMPDIR=/state
|
||||
secrets:
|
||||
- skoda_password
|
||||
- skoda_spin
|
||||
entrypoint: ["/root/entrypoint.sh"]
|
||||
command: ["carconnectivity-mqtt", "/carconnectivity.json"]
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
|
||||
carconnectivity-grafana:
|
||||
image: "ghcr.io/tillsteinbach/carconnectivity-grafana:edge"
|
||||
container_name: carconnect-grafana
|
||||
volumes:
|
||||
- ${DIR_LOCAL}/carconnect-grafana:/var/lib/grafana
|
||||
- ${DIR_LOCAL}/carconnectivity/sqlite.db:/carconnectivity.db:ro
|
||||
- ./carconnectivity/carconnectivity-sqlite.yaml:/etc/grafana/provisioning/datasources/carconnectivity-sqlite.yml
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1004
|
||||
- TZ=Europe/Stockholm
|
||||
- GF_SECURITY_ADMIN_USER=admin
|
||||
- GF_SECURITY_ADMIN_PASSWORD_FILE=/run/secrets/carconnect_grafana_pw
|
||||
- GF_PLUGINS_TRUSTED_FILES=/carconnectivity.db
|
||||
secrets:
|
||||
- carconnect_grafana_pw
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3123:3000"
|
||||
networks:
|
||||
- backend
|
||||
labels:
|
||||
- diun.enable=true
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.carconnect-grafana.loadbalancer.server.port=3000"
|
||||
- "traefik.http.routers.carconnect-grafana.entrypoints=web-secure"
|
||||
- "traefik.http.routers.carconnect-grafana.rule=Host(`skoda.${DOMAIN}`)"
|
||||
- "traefik.http.routers.carconnect-grafana.middlewares=chain-no-auth@file"
|
||||
- "traefik.http.routers.carconnect-grafana.tls.certresolver=milvert_dns"
|
||||
- "traefik.http.routers.carconnect-grafana.tls=true"
|
||||
- "traefik.http.routers.traefik.rule=Host(`tra.${DOMAIN}`)"
|
||||
- "traefik.http.routers.traefik-secure.entrypoints=web-secure"
|
||||
- "traefik.http.routers.traefik-secure.rule=Host(`tra.${DOMAIN}`)"
|
||||
- "traefik.http.routers.traefik-secure.tls.certresolver=milvert"
|
||||
- "traefik.http.routers.traefik-secure.tls=true"
|
||||
- "traefik.http.services.traefik-service.loadbalancer.server.port=8080"
|
||||
- "traefik.http.routers.traefik-secure.middlewares=traefik-auth"
|
||||
- "traefik.http.middlewares.traefik-auth.basicauth.users=simon:$$apr1$$o3vIe.DX$$3XExiBMe8rUo3HujDSYEo0"
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
|
|
@ -880,19 +87,3 @@ networks:
|
|||
backend:
|
||||
external: false
|
||||
|
||||
# MACVLAN guide: https://blog.oddbit.com/post/2018-03-12-using-docker-macvlan-networks/
|
||||
# ip link add vlan-shim link eno1 type macvlan mode bridge
|
||||
# ip addr add 10.0.0.223/32 dev vlan-shim
|
||||
# ip link set vlan-shim up
|
||||
# ip route add 10.0.0.192/27 dev vlan-shim
|
||||
docker_vlan:
|
||||
external: true
|
||||
driver: macvlan
|
||||
driver_opts:
|
||||
parent: eno1
|
||||
ipam:
|
||||
config:
|
||||
- subnet: 10.0.0.200/27
|
||||
|
||||
volumes:
|
||||
vwfriend_grafana:
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
esphome:
|
||||
name: $devicename
|
||||
friendly_name: $friendly_name
|
||||
|
||||
esp32:
|
||||
board: esp32-s3-devkitc-1
|
||||
flash_size: 16MB
|
||||
framework:
|
||||
type: esp-idf
|
||||
|
||||
psram:
|
||||
mode: octal
|
||||
speed: 80MHz
|
||||
|
||||
|
||||
|
||||
packages:
|
||||
sensors: !include nfc-playbox/sensors.yaml
|
||||
hw: !include nfc-playbox/hw.yaml
|
||||
images: !include nfc-playbox/images.yaml
|
||||
ui: !include nfc-playbox/ui.yaml
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
|
||||
# Enable logging
|
||||
logger:
|
||||
|
||||
# Enable Home Assistant API
|
||||
api:
|
||||
encryption:
|
||||
key: !secret esphome_api_key_nfc1
|
||||
|
||||
|
||||
ota:
|
||||
- platform: esphome
|
||||
password: !secret esphome_ota_pass_nfc1
|
||||
|
||||
wifi:
|
||||
ssid: !secret wifi_ssid
|
||||
password: !secret wifi_password
|
||||
power_save_mode: none
|
||||
output_power: 17dB
|
||||
use_address: 10.0.3.29
|
||||
domain: .milvert.com
|
||||
# Enable fallback hotspot (captive portal) in case wifi connection fails
|
||||
ap:
|
||||
ssid: ${friendly_name} Hotspot
|
||||
password: !secret esphome_fallback_ap_password
|
||||
|
||||
captive_portal:
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
# Gitignore settings for ESPHome
|
||||
# This is an example and may include too much for your use-case.
|
||||
# You can modify this file to suit your needs.
|
||||
/.esphome/
|
||||
/secrets.yaml
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
substitutions:
|
||||
devicename: nfc-playbox-1
|
||||
friendly_name: NFC Playbox_1
|
||||
SCREEN_W: "320"
|
||||
SCREEN_H: "240"
|
||||
|
||||
RIGHT_COL_W: "80"
|
||||
ICON_W: "32"
|
||||
ICON_H: "32"
|
||||
MARGIN: "8"
|
||||
HIT_PAD: "6"
|
||||
|
||||
globals:
|
||||
- id: current_image
|
||||
type: int
|
||||
initial_value: '0'
|
||||
|
||||
- id: show_volume
|
||||
type: bool
|
||||
initial_value: 'false'
|
||||
|
||||
- id: local_vol
|
||||
type: float
|
||||
initial_value: '0.0'
|
||||
|
||||
<<: !include .base.yaml
|
||||
<<: !include .base.nfc-playbox.yaml
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
|
||||
spi:
|
||||
- id: spi_bus
|
||||
clk_pin: 39 # LCD_SCLK
|
||||
mosi_pin: 38 # LCD_MOSI
|
||||
miso_pin: 40 # (MISO finns men används ej av LCD)
|
||||
|
||||
- id: spi_bus_rc522
|
||||
clk_pin: 9
|
||||
mosi_pin: 14
|
||||
miso_pin: 13
|
||||
|
||||
i2c:
|
||||
- sda: 48 # TP_SDA
|
||||
scl: 47 # TP_SCL
|
||||
id: i2c_bus
|
||||
scan: true
|
||||
frequency: 400kHz
|
||||
|
||||
|
||||
time:
|
||||
- platform: sntp
|
||||
id: esptime
|
||||
timezone: "Europe/Stockholm"
|
||||
|
||||
|
||||
light:
|
||||
- platform: monochromatic
|
||||
name: "LCD Backlight"
|
||||
id: backlight
|
||||
default_transition_length: 0s
|
||||
output: lcd_bl
|
||||
restore_mode: ALWAYS_ON
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
image:
|
||||
- file: "nfc-playbox/images/spellista_dolly.jpeg"
|
||||
id: spellista_dolly
|
||||
type: RGB
|
||||
|
||||
- file: "nfc-playbox/images/spellista_godnattstund.jpg"
|
||||
id: spellista_godnattstund
|
||||
type: RGB
|
||||
|
||||
- file: "nfc-playbox/images/spellista_disco.jpg"
|
||||
id: spellista_disco
|
||||
type: RGB
|
||||
|
||||
- file: "nfc-playbox/images/spellista_rosahelikopter.jpg"
|
||||
id: spellista_rosahelikopter
|
||||
type: RGB
|
||||
|
||||
- file: "nfc-playbox/images/background.jpg"
|
||||
id: background
|
||||
type: RGB
|
||||
|
Before Width: | Height: | Size: 3.3 KiB |
|
Before Width: | Height: | Size: 657 B |
|
Before Width: | Height: | Size: 645 B |
|
Before Width: | Height: | Size: 403 B |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 21 KiB |
|
|
@ -1,129 +0,0 @@
|
|||
sensor:
|
||||
- platform: wifi_signal
|
||||
name: "NFC Playbox WiFi RSSI"
|
||||
update_interval: 30s
|
||||
|
||||
- platform: uptime
|
||||
name: "NFC Playbox Uptime"
|
||||
|
||||
- platform: homeassistant
|
||||
id: spotvol
|
||||
entity_id: media_player.ada
|
||||
attribute: volume_level
|
||||
on_value:
|
||||
then:
|
||||
- lambda: |-
|
||||
ESP_LOGI("vol", "HA volume_level x=%.3f", (float)x);
|
||||
if (!isnan(x)) {
|
||||
id(local_vol) = x;
|
||||
ESP_LOGI("vol", "local_vol updated to %.3f", id(local_vol));
|
||||
} else {
|
||||
ESP_LOGW("vol", "HA volume_level is NaN (ignoring)");
|
||||
}
|
||||
- component.update: display_main
|
||||
# filters:
|
||||
# - multiply: 100
|
||||
|
||||
binary_sensor:
|
||||
- platform: homeassistant
|
||||
entity_id: input_boolean.esp32_spotify_volume
|
||||
id: display_volume
|
||||
|
||||
script:
|
||||
- id: wake_screen
|
||||
mode: restart
|
||||
then:
|
||||
if:
|
||||
condition:
|
||||
light.is_off: backlight
|
||||
then:
|
||||
- light.turn_on:
|
||||
id: backlight
|
||||
brightness: 100%
|
||||
transition_length: 0s
|
||||
- delay: 60s
|
||||
- light.turn_off: backlight
|
||||
- lambda: |-
|
||||
id(current_image) = 0;
|
||||
- component.update: display_main
|
||||
|
||||
- id: send_volume_debounced
|
||||
mode: restart
|
||||
then:
|
||||
- delay: 400ms
|
||||
- homeassistant.service:
|
||||
service: media_player.volume_set
|
||||
data:
|
||||
entity_id: media_player.ada
|
||||
volume_level: !lambda "ESP_LOGI(\"vol\", \"debounced send volume_set=%.3f\", id(local_vol)); return id(local_vol);"
|
||||
|
||||
- id: volume_flash_hide
|
||||
mode: restart
|
||||
then:
|
||||
- delay: 5s
|
||||
- lambda: |-
|
||||
id(show_volume) = false;
|
||||
- component.update: display_main
|
||||
|
||||
- id: set_vol_and_flash
|
||||
mode: restart
|
||||
parameters:
|
||||
level: float
|
||||
then:
|
||||
- lambda: |-
|
||||
float clamped = std::max(0.0f, std::min(0.4f, level));
|
||||
ESP_LOGI("vol", "set_vol_and_flash target=%.3f (clamped)", clamped);
|
||||
id(local_vol) = clamped;
|
||||
id(show_volume) = true;
|
||||
ESP_LOGI("vol", "show_volume=true, local_vol=%.3f", id(local_vol));
|
||||
- component.update: display_main
|
||||
- script.execute: send_volume_debounced
|
||||
- script.execute: volume_flash_hide
|
||||
|
||||
- id: volume_up_trigger
|
||||
then:
|
||||
- lambda: |-
|
||||
float step = 0.02f;
|
||||
float target = id(local_vol) + step;
|
||||
ESP_LOGI("vol", "volume_up_trigger step=%.2f from=%.3f -> %.3f", step, id(local_vol), target);
|
||||
id(set_vol_and_flash)->execute(target);
|
||||
|
||||
- id: volume_down_trigger
|
||||
then:
|
||||
- lambda: |-
|
||||
float step = 0.02f;
|
||||
float target = id(local_vol) - step;
|
||||
ESP_LOGI("vol", "volume_down_trigger step=%.2f from=%.3f -> %.3f", step, id(local_vol), target);
|
||||
id(set_vol_and_flash)->execute(target);
|
||||
|
||||
|
||||
rc522_spi:
|
||||
spi_id: spi_bus_rc522
|
||||
cs_pin: 12
|
||||
reset_pin: 11
|
||||
update_interval: 3s
|
||||
on_tag:
|
||||
then:
|
||||
- homeassistant.tag_scanned: !lambda 'return x;'
|
||||
- lambda: |-
|
||||
std::string uid = x;
|
||||
ESP_LOGI("nfc", "Tag scanned: %s", uid.c_str());
|
||||
//
|
||||
if (uid == "04-A1-4E-94-2E-02-89") { // spellista_dolly
|
||||
id(current_image) = 1;
|
||||
} else if (uid == "04-01-BA-52-2E-02-89") { // spellista_godnattstund
|
||||
id(current_image) = 2;
|
||||
} else if (uid == "04-51-0C-91-2E-02-89") { // spellista_disco
|
||||
id(current_image) = 3;
|
||||
} else if (uid == "04-01-DF-98-2E-02-89") { // spellista_rosahelikopter
|
||||
id(current_image) = 4;
|
||||
} else {
|
||||
id(current_image) = 0;
|
||||
}
|
||||
ESP_LOGI("nfc", "current_image set to %d", id(current_image));
|
||||
- component.update: display_main
|
||||
|
||||
output:
|
||||
- platform: ledc
|
||||
pin: 1
|
||||
id: lcd_bl
|
||||
|
|
@ -1,151 +0,0 @@
|
|||
touchscreen:
|
||||
- platform: cst816
|
||||
id: tp
|
||||
i2c_id: i2c_bus
|
||||
interrupt_pin: 46 # TP_INT
|
||||
reset_pin: 21 # TP_RESET
|
||||
transform:
|
||||
mirror_x: false
|
||||
mirror_y: true
|
||||
swap_xy: true
|
||||
calibration:
|
||||
x_min: 0
|
||||
x_max: 320
|
||||
y_min: 30
|
||||
y_max: 240
|
||||
on_touch:
|
||||
then:
|
||||
- lambda: |-
|
||||
int px = touch.x;
|
||||
int py = touch.y;
|
||||
ESP_LOGI("touch", "Pixel (%d,%d)", px, py);
|
||||
|
||||
const int SCREEN_W = ${SCREEN_W};
|
||||
const int SCREEN_H = ${SCREEN_H};
|
||||
const int RIGHT_COL_W= ${RIGHT_COL_W};
|
||||
const int X_SPLIT = SCREEN_W - RIGHT_COL_W;
|
||||
const int Y_MID = SCREEN_H / 2;
|
||||
const int CX_RIGHT = X_SPLIT + RIGHT_COL_W / 2;
|
||||
const int CY_UP = Y_MID / 2;
|
||||
const int CY_DOWN = Y_MID + (Y_MID / 2);
|
||||
const int RING_R = 45;
|
||||
|
||||
auto hit = [&](int cx, int cy) {
|
||||
int dx = px - cx;
|
||||
int dy = py - cy;
|
||||
return dx*dx + dy*dy <= RING_R*RING_R;
|
||||
};
|
||||
|
||||
if (hit(CX_RIGHT, CY_UP)) {
|
||||
ESP_LOGI("touch", "VOLUME UP pressed");
|
||||
id(volume_up_trigger).execute();
|
||||
} else if (hit(CX_RIGHT, CY_DOWN)) {
|
||||
id(volume_down_trigger).execute();
|
||||
ESP_LOGI("touch", "VOLUME DOWN pressed");
|
||||
}
|
||||
- script.execute: wake_screen
|
||||
|
||||
display:
|
||||
- platform: ili9xxx
|
||||
id: display_main
|
||||
model: ST7789V
|
||||
cs_pin: 45 # LCD_CS
|
||||
dc_pin: 42 # LCD_DC
|
||||
reset_pin: 0 # LCD_RST
|
||||
spi_id: spi_bus
|
||||
dimensions:
|
||||
width: 240
|
||||
height: 320
|
||||
rotation: 90 # matchar Waveshares exempel (~EXAMPLE_LCD_ROTATION=1)
|
||||
invert_colors: true
|
||||
update_interval: never
|
||||
lambda: |-
|
||||
const int SCREEN_W = ${SCREEN_W};
|
||||
const int SCREEN_H = ${SCREEN_H};
|
||||
const int RIGHT_COL_W = ${RIGHT_COL_W};
|
||||
const int X_SPLIT = SCREEN_W - RIGHT_COL_W;
|
||||
const int Y_MID = SCREEN_H / 2;
|
||||
|
||||
const int CX_RIGHT = X_SPLIT + RIGHT_COL_W / 2;
|
||||
const int CY_UP = Y_MID / 2;
|
||||
const int CY_DOWN = Y_MID + (Y_MID / 2);
|
||||
switch (id(current_image)) {
|
||||
case 1:
|
||||
it.image(0, 0, id(spellista_dolly));
|
||||
break;
|
||||
case 2:
|
||||
it.image(0, 0, id(spellista_godnattstund));
|
||||
break;
|
||||
case 3:
|
||||
it.image(0, 0, id(spellista_disco));
|
||||
break;
|
||||
case 4:
|
||||
it.image(0, 0, id(spellista_rosahelikopter));
|
||||
break;
|
||||
default:
|
||||
// valfri default-bild/blank
|
||||
it.image(0, 0, id(background)); // behåll din gamla default om du vill
|
||||
break;
|
||||
}
|
||||
// Svart background till höger
|
||||
it.filled_rectangle(X_SPLIT, 0, SCREEN_W - 1, SCREEN_H - 1, id(my_black));
|
||||
|
||||
it.line(X_SPLIT, 0, X_SPLIT, SCREEN_H - 1);
|
||||
it.line(X_SPLIT, Y_MID, SCREEN_W - 1, Y_MID);
|
||||
|
||||
|
||||
// Rita minus-cirkel (volym up)
|
||||
it.print(CX_RIGHT, CY_UP, id(font_icon_spotify), id(my_white), TextAlign::CENTER, "\U000F0417");
|
||||
// Rita minus-cirkel (volym ned)
|
||||
it.print(CX_RIGHT, CY_DOWN, id(font_icon_spotify), id(my_white), TextAlign::CENTER, "\U000F0376");
|
||||
|
||||
|
||||
if (id(show_volume)) {
|
||||
ESP_LOGI("display", "VOLUME is %f", id(local_vol));
|
||||
it.filled_circle(230, 122, 35, id(my_green));
|
||||
it.printf(230, 145, id(font40), id(my_black), TextAlign::BOTTOM_CENTER, "%.0f", id(local_vol) * 100.0f);
|
||||
|
||||
// HA_value-> to slow?
|
||||
// it.printf(230, 145, id(font40), id(my_black), TextAlign::BOTTOM_CENTER, "%.0f", id(spotvol).state);
|
||||
}
|
||||
|
||||
color:
|
||||
- id: my_red
|
||||
red: 100%
|
||||
green: 0%
|
||||
- id: my_green
|
||||
red: 11.76%
|
||||
green: 84.31%
|
||||
blue: 37.65%
|
||||
- id: my_white
|
||||
red: 1.0000
|
||||
green: 1.0000
|
||||
blue: 1.0000
|
||||
- id: my_grey
|
||||
red: 0.6000
|
||||
green: 0.6000
|
||||
blue: 0.6000
|
||||
- id: my_black
|
||||
red: 0.0000
|
||||
green: 0.0000
|
||||
blue: 0.0000
|
||||
|
||||
font:
|
||||
- file: "gfonts://Roboto"
|
||||
id: font_small
|
||||
size: 16
|
||||
|
||||
- file: 'gfonts://Roboto'
|
||||
id: font40
|
||||
size: 40
|
||||
glyphs: °.0123456789-%d
|
||||
|
||||
- file: 'fonts/materialdesignicons-webfont.ttf'
|
||||
id: font_icon_spotify
|
||||
size: 75
|
||||
glyphs:
|
||||
- "\U000F040C" # play-circle
|
||||
- "\U000F03E5" # pause-circle
|
||||
- "\U000F0661" # skip-next-circle
|
||||
- "\U000F0376" # minus-circle
|
||||
- "\U000F0417" # plus-circle
|
||||
|
|
@ -1,216 +0,0 @@
|
|||
esphome:
|
||||
name: nfc-playbox-1
|
||||
friendly_name: NFC Playbox_1
|
||||
|
||||
esp32:
|
||||
board: esp32-s3-devkitc-1
|
||||
flash_size: 16MB
|
||||
framework:
|
||||
type: esp-idf
|
||||
|
||||
psram:
|
||||
mode: octal
|
||||
speed: 80MHz
|
||||
|
||||
# Enable logging
|
||||
logger:
|
||||
|
||||
# Enable Home Assistant API
|
||||
api:
|
||||
encryption:
|
||||
key: "OQlmeshTtUZg/iavLExjuNt1H7ywTohWAYozqNym+9M="
|
||||
|
||||
ota:
|
||||
- platform: esphome
|
||||
password: "14106c281b3e5b1ac1da204b7ff99728"
|
||||
|
||||
wifi:
|
||||
ssid: !secret wifi_ssid
|
||||
password: !secret wifi_password
|
||||
power_save_mode: none # stabilare link
|
||||
output_power: 17dB # maxa sändarnivån
|
||||
use_address: 10.0.3.29
|
||||
domain: .milvert.com
|
||||
# Enable fallback hotspot (captive portal) in case wifi connection fails
|
||||
ap:
|
||||
ssid: "Nfc-Playbox-1 Fallback Hotspot"
|
||||
password: "kQAdTCPYabwd"
|
||||
|
||||
captive_portal:
|
||||
|
||||
sensor:
|
||||
- platform: wifi_signal
|
||||
name: "NFC Playbox WiFi RSSI"
|
||||
update_interval: 30s
|
||||
|
||||
- platform: uptime
|
||||
name: "NFC Playbox Uptime"
|
||||
|
||||
script:
|
||||
- id: wake_screen
|
||||
mode: restart
|
||||
then:
|
||||
- light.turn_on:
|
||||
id: backlight
|
||||
brightness: 100%
|
||||
# - delay: 60s
|
||||
# - light.turn_off: backlight
|
||||
|
||||
image:
|
||||
- file: "images/play.png"
|
||||
id: img_playpause
|
||||
type: BINARY
|
||||
|
||||
- file: "images/low-volume.png"
|
||||
id: img_voldown
|
||||
type: BINARY
|
||||
|
||||
- file: "images/high-volume.png"
|
||||
id: img_volup
|
||||
type: BINARY
|
||||
|
||||
|
||||
globals:
|
||||
- id: touch_x
|
||||
type: int
|
||||
initial_value: "0"
|
||||
|
||||
- id: touch_y
|
||||
type: int
|
||||
initial_value: "0"
|
||||
|
||||
# ---- SPI-buss till LCD
|
||||
spi:
|
||||
- id: spi_bus
|
||||
clk_pin: 39 # LCD_SCLK
|
||||
mosi_pin: 38 # LCD_MOSI
|
||||
miso_pin: 40 # (MISO finns men används ej av LCD)
|
||||
|
||||
- id: spi_bus_rc522
|
||||
clk_pin: 13
|
||||
mosi_pin: 14
|
||||
miso_pin: 9
|
||||
|
||||
rc522_spi:
|
||||
spi_id: spi_bus_rc522
|
||||
cs_pin: 12
|
||||
reset_pin: 11
|
||||
update_interval: 1s
|
||||
on_tag:
|
||||
then:
|
||||
- logger.log:
|
||||
format: "RFID Tag UID: %s"
|
||||
args: [ 'x.c_str()' ]
|
||||
- homeassistant.tag_scanned: !lambda 'return x;'
|
||||
|
||||
# ---- Bakgrundsbelysning (GPIO1 via PWM)
|
||||
output:
|
||||
- platform: ledc
|
||||
pin: 1
|
||||
id: lcd_bl
|
||||
|
||||
light:
|
||||
- platform: monochromatic
|
||||
name: "LCD Backlight"
|
||||
id: backlight
|
||||
output: lcd_bl
|
||||
restore_mode: ALWAYS_ON
|
||||
|
||||
# ---- I2C för touch (CST816D)
|
||||
i2c:
|
||||
sda: 48 # TP_SDA
|
||||
scl: 47 # TP_SCL
|
||||
id: i2c_bus
|
||||
scan: true
|
||||
frequency: 400kHz
|
||||
|
||||
touchscreen:
|
||||
- platform: cst816
|
||||
id: tp
|
||||
interrupt_pin: 46 # TP_INT
|
||||
reset_pin: 21 # TP_RESET
|
||||
transform:
|
||||
mirror_x: false
|
||||
mirror_y: true
|
||||
swap_xy: true
|
||||
calibration:
|
||||
x_min: 0
|
||||
x_max: 320
|
||||
y_min: 30 # offset för min display
|
||||
y_max: 240
|
||||
on_touch:
|
||||
then:
|
||||
- lambda: |-
|
||||
const int CELL_SIZE = 40;
|
||||
const int ICON_R = 22; // radie på cirkeln runt ikonen
|
||||
|
||||
// Hjälpfunktion för att kolla träff
|
||||
auto hit_circle = [&](int col, int row, int px, int py) {
|
||||
int cx = (col + 1) * CELL_SIZE;
|
||||
int cy = (row + 1) * CELL_SIZE;
|
||||
int dx = px - cx;
|
||||
int dy = py - cy;
|
||||
return (dx*dx + dy*dy) <= (ICON_R * ICON_R);
|
||||
};
|
||||
- script.execute: wake_screen
|
||||
- logger.log:
|
||||
format: "Touch coordinates: (%d, %d)"
|
||||
args: ["touch.x", "touch.y"]
|
||||
|
||||
substitutions:
|
||||
SCREEN_W: "320" # från loggen: ili9xxx Dimensions: 320 x 240
|
||||
SCREEN_H: "240"
|
||||
ICON_W: "32"
|
||||
ICON_H: "32"
|
||||
MARGIN: "8"
|
||||
HIT_PAD: "6"
|
||||
|
||||
# ---- Display (ST7789T3/ ST7789V 240x320) via moderna 'ili9xxx'
|
||||
display:
|
||||
- platform: ili9xxx
|
||||
id: display_main
|
||||
model: ST7789V
|
||||
cs_pin: 45 # LCD_CS
|
||||
dc_pin: 42 # LCD_DC
|
||||
reset_pin: 0 # LCD_RST
|
||||
spi_id: spi_bus
|
||||
dimensions:
|
||||
width: 240
|
||||
height: 320
|
||||
rotation: 90 # matchar Waveshares exempel (~EXAMPLE_LCD_ROTATION=1)
|
||||
invert_colors: false
|
||||
update_interval: 500ms
|
||||
lambda: |-
|
||||
const int CELL_SIZE = 40;
|
||||
|
||||
auto draw_centered_2x2 = [&](int col, int row, const esphome::display::BaseImage *img) {
|
||||
const int center_x = (col + 1) * CELL_SIZE;
|
||||
const int center_y = (row + 1) * CELL_SIZE;
|
||||
|
||||
// cirkel runt ikonen
|
||||
it.circle(center_x, center_y, 22);
|
||||
|
||||
// rita ikonen (32x32 → offset 16)
|
||||
it.image(center_x - 16, center_y - 16,
|
||||
const_cast<esphome::display::BaseImage*>(img));
|
||||
};
|
||||
|
||||
// PLAY i blocket (1,2)-(2,3)
|
||||
draw_centered_2x2(1, 2, id(img_playpause));
|
||||
|
||||
// VOL UP i blocket (3,2)-(4,3)
|
||||
draw_centered_2x2(3, 2, id(img_volup));
|
||||
|
||||
// VOL DOWN i blocket (5,2)-(6,3)
|
||||
draw_centered_2x2(5, 2, id(img_voldown));
|
||||
|
||||
# ---- Klocka och font (för att visa något)
|
||||
time:
|
||||
- platform: sntp
|
||||
id: esptime
|
||||
timezone: "Europe/Stockholm"
|
||||
|
||||
font:
|
||||
- file: "gfonts://Roboto"
|
||||
id: font_small
|
||||
size: 16
|
||||
|
|
@ -1,634 +0,0 @@
|
|||
# https://community.home-assistant.io/t/awesome-spotify-touch-control-via-ili9341-screen/406328
|
||||
|
||||
logger:
|
||||
|
||||
ota:
|
||||
|
||||
api:
|
||||
|
||||
captive_portal:
|
||||
|
||||
time:
|
||||
platform: homeassistant
|
||||
id: esptime
|
||||
|
||||
spi:
|
||||
clk_pin: GPIO18
|
||||
mosi_pin: GPIO23
|
||||
miso_pin: GPIO19
|
||||
|
||||
font:
|
||||
- file: 'fonts/verdana.ttf'
|
||||
id: font40
|
||||
size: 40
|
||||
glyphs: °.0123456789-%d
|
||||
- file: 'fonts/verdana.ttf'
|
||||
id: font_spot_time
|
||||
size: 12
|
||||
glyphs: :0123456789
|
||||
- file: 'fonts/verdana.ttf'
|
||||
id: font21
|
||||
size: 21
|
||||
glyphs: ['&', '@', '<', '>', '$', '!', ',', '.', '?', '"', '%', '(', ')', '+', '-', '_', ':', '°', '0',
|
||||
'1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E',
|
||||
'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',
|
||||
'T', 'U', 'V', 'W', 'X', 'Y', 'Z', ' ', 'a', 'b', 'c', 'd', 'e', 'f',
|
||||
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z','å', 'Ä', 'ä', 'Ö', 'ö', 'Ü', 'ü', '/', '\', '\xab', '\xc3', '\xaf', '''', 'ß' ]
|
||||
- file: 'fonts/verdana.ttf'
|
||||
id: font18
|
||||
size: 18
|
||||
glyphs: ['&', '@', '<', '>', '$', '!', ',', '.', '?', '"', '%', '(', ')', '+', '-', '_', ':', '°', '0',
|
||||
'1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E',
|
||||
'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',
|
||||
'T', 'U', 'V', 'W', 'X', 'Y', 'Z', ' ', 'a', 'b', 'c', 'd', 'e', 'f',
|
||||
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z','å', 'Ä', 'ä', 'Ö', 'ö', 'Ü', 'ü', '/', '\', '''', 'ß' ]
|
||||
- file: 'fonts/verdana.ttf'
|
||||
id: font16
|
||||
size: 16
|
||||
glyphs: ['&', '@', '!', ',', '.', '?', '"', '%', '(', ')', '+', '-', '_', ':', '°', '0',
|
||||
'1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E',
|
||||
'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S',
|
||||
'T', 'U', 'V', 'W', 'X', 'Y', 'Z', ' ', 'a', 'b', 'c', 'd', 'e', 'f',
|
||||
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
|
||||
'u', 'v', 'w', 'x', 'y', 'z','å', 'Ä', 'ä', 'Ö', 'ö', 'Ü', 'ü', '/', '\', '''', 'ß' ]
|
||||
- file: 'fonts/materialdesignicons-webfont.ttf'
|
||||
id: font_icon_spotify
|
||||
size: 75
|
||||
glyphs:
|
||||
- "\U000F040C" # play-circle
|
||||
- "\U000F03E5" # pause-circle
|
||||
- "\U000F0661" # skip-next-circle
|
||||
- "\U000F0376" # minus-circle
|
||||
- "\U000F0417" # plus-circle
|
||||
- file: 'fonts/materialdesignicons-webfont.ttf'
|
||||
id: font_icon_spotify_big
|
||||
size: 80
|
||||
glyphs:
|
||||
- "\U000F04C7" # Spotify Icon
|
||||
- file: 'fonts/materialdesignicons-webfont.ttf'
|
||||
id: font_icon_spotify_infobar
|
||||
size: 30
|
||||
glyphs:
|
||||
- "\U000F04C7" # Spotify Icon
|
||||
- "\U000F0595" # Wetter Icon
|
||||
- file: 'fonts/materialdesignicons-webfont.ttf'
|
||||
id: font_icon_spotify_infobar_s
|
||||
size: 24
|
||||
glyphs:
|
||||
- "\U000F049D" # Shuffle on Icon
|
||||
- "\U000F049E" # Shuffle off Icon
|
||||
|
||||
color:
|
||||
- id: my_red
|
||||
red: 100%
|
||||
green: 0%
|
||||
- id: my_green
|
||||
red: 0%
|
||||
green: 100%
|
||||
blue: 30%
|
||||
- id: my_white
|
||||
red: 1.0000
|
||||
green: 1.0000
|
||||
blue: 1.0000
|
||||
- id: my_grey
|
||||
red: 0.6000
|
||||
green: 0.6000
|
||||
blue: 0.6000
|
||||
- id: my_black
|
||||
red: 0.0000
|
||||
green: 0.0000
|
||||
blue: 0.0000
|
||||
output:
|
||||
# backlight
|
||||
- platform: ledc
|
||||
pin: 15
|
||||
id: gpio_15_backlight_pwm_touch
|
||||
inverted: false
|
||||
|
||||
light:
|
||||
- platform: monochromatic
|
||||
output: gpio_15_backlight_pwm_touch
|
||||
name: "ILI9341 Display Backlight Touch"
|
||||
id: back_light_touch
|
||||
restore_mode: RESTORE_DEFAULT_ON
|
||||
|
||||
text_sensor:
|
||||
- platform: homeassistant
|
||||
id: current_title
|
||||
entity_id: sensor.esp32_media_player_current_title
|
||||
- platform: homeassistant
|
||||
id: current_artist
|
||||
entity_id: sensor.esp32_media_player_current_artist
|
||||
- platform: homeassistant
|
||||
id: current_playlist
|
||||
entity_id: sensor.esp32_media_player_current_playlist
|
||||
- platform: homeassistant
|
||||
id: spotify
|
||||
entity_id: sensor.esp32_media_player_status
|
||||
- platform: homeassistant
|
||||
id: spotpostime
|
||||
entity_id: sensor.esp32_spotify_position_time
|
||||
- platform: homeassistant
|
||||
id: spotdur
|
||||
entity_id: sensor.esp32_spotify_duration
|
||||
sensor:
|
||||
- platform: homeassistant
|
||||
id: spotvol
|
||||
entity_id: sensor.esp32_spotify_volume
|
||||
filters:
|
||||
- multiply: 10
|
||||
- platform: homeassistant
|
||||
id: spotpos
|
||||
entity_id: sensor.esp32_spotify_position
|
||||
- platform: homeassistant
|
||||
id: spotpos2
|
||||
entity_id: sensor.esp32_spotify_position
|
||||
filters:
|
||||
- offset: 78
|
||||
binary_sensor:
|
||||
- platform: homeassistant
|
||||
entity_id: input_boolean.esp32_wetter_screen
|
||||
id: display_wetter
|
||||
- platform: homeassistant
|
||||
entity_id: input_boolean.esp32_spotify_volume
|
||||
id: display_volume
|
||||
- platform: homeassistant
|
||||
entity_id: binary_sensor.esp32_spotify_shuffle
|
||||
id: spotify_shuffle
|
||||
- platform: homeassistant
|
||||
entity_id: input_boolean.esp32_spotify_playlist_menu
|
||||
id: playlist
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Start Play/Pause
|
||||
id: touch_play
|
||||
x_min: 35
|
||||
x_max: 110
|
||||
y_min: 2
|
||||
y_max: 77
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_play_pause
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Next Track
|
||||
id: touch_next
|
||||
x_min: 35
|
||||
x_max: 110
|
||||
y_min: 78
|
||||
y_max: 154
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: media_player.media_next_track
|
||||
data:
|
||||
entity_id: media_player.spotify_xxx
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Volume -
|
||||
id: touch_minus
|
||||
x_min: 35
|
||||
x_max: 110
|
||||
y_min: 155
|
||||
y_max: 230
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_volume_down
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Volume +
|
||||
id: touch_plus
|
||||
x_min: 35
|
||||
x_max: 110
|
||||
y_min: 232
|
||||
y_max: 295
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_volume_up
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Start Spotify
|
||||
id: touch_info_spot
|
||||
x_min: 195
|
||||
x_max: 239
|
||||
y_min: 275
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state != "playing";'
|
||||
- lambda: 'return id(spotify).state != "paused";'
|
||||
- and:
|
||||
- lambda: 'return id(display_wetter).state;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_open_spotify
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Turn on Display Wetter
|
||||
id: touch_info_wetter
|
||||
x_min: 195
|
||||
x_max: 239
|
||||
y_min: 275
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_open_display_wetter
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_info_spot_shuffle_off # Touch Shuffle off
|
||||
x_min: 205
|
||||
x_max: 239
|
||||
y_min: 75
|
||||
y_max: 120
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- lambda: 'return id(spotify_shuffle).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- lambda: 'return id(spotify_shuffle).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: media_player.shuffle_set
|
||||
data:
|
||||
shuffle: 'true'
|
||||
entity_id: media_player.spotify_xxx
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_info_spot_shuffle_on # Touch Shuffle on
|
||||
x_min: 205
|
||||
x_max: 239
|
||||
y_min: 75
|
||||
y_max: 120
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- lambda: 'return id(spotify_shuffle).state;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- lambda: 'return id(spotify_shuffle).state;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: media_player.shuffle_set
|
||||
data:
|
||||
shuffle: 'false'
|
||||
entity_id: media_player.spotify_xxx
|
||||
#START SPOTIFY PLAYLIST TOUCH
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen # Spotify Icon Playlist
|
||||
id: touch_playlist
|
||||
x_max: 180
|
||||
y_min: 3
|
||||
x_min: 105
|
||||
y_max: 75
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
or:
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "playing";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
- and:
|
||||
- lambda: 'return id(spotify).state == "paused";'
|
||||
- lambda: 'return id(playlist).state;'
|
||||
- lambda: 'return id(display_wetter).state == false;'
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: input_boolean.toggle
|
||||
data:
|
||||
entity_id: input_boolean.esp32_spotify_playlist_menu
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_B1
|
||||
x_max: 239
|
||||
y_min: 200
|
||||
x_min: 201
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_b1
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_A1
|
||||
x_max: 239
|
||||
y_min: 75
|
||||
x_min: 201
|
||||
y_max: 199
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_a1
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_B2
|
||||
x_max: 200
|
||||
y_min: 200
|
||||
x_min: 161
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_b2
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_A2
|
||||
x_max: 200
|
||||
y_min: 75
|
||||
x_min: 161
|
||||
y_max: 199
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_a2
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_B3
|
||||
x_max: 160
|
||||
y_min: 200
|
||||
x_min: 121
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_b3
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_A3
|
||||
x_max: 160
|
||||
y_min: 75
|
||||
x_min: 121
|
||||
y_max: 199
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_a3
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_B4
|
||||
x_max: 120
|
||||
y_min: 200
|
||||
x_min: 81
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_b4
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_A4
|
||||
x_max: 120
|
||||
y_min: 75
|
||||
x_min: 81
|
||||
y_max: 199
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_a4
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_B5
|
||||
x_max: 80
|
||||
y_min: 200
|
||||
x_min: 41
|
||||
y_max: 319
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_b5
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_A5
|
||||
x_max: 80
|
||||
y_min: 75
|
||||
x_min: 41
|
||||
y_max: 199
|
||||
on_press:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_a5
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_B6
|
||||
x_max: 40
|
||||
y_min: 200
|
||||
x_min: 2
|
||||
y_max: 319
|
||||
on_state:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_b6
|
||||
- platform: xpt2046
|
||||
xpt2046_id: touchscreen
|
||||
id: touch_A6
|
||||
x_max: 40
|
||||
y_min: 75
|
||||
x_min: 2
|
||||
y_max: 199
|
||||
on_state:
|
||||
if:
|
||||
condition:
|
||||
binary_sensor.is_off: playlist
|
||||
then:
|
||||
- homeassistant.service:
|
||||
service: script.esp32_spotify_playlist_a6
|
||||
xpt2046:
|
||||
id: touchscreen
|
||||
cs_pin: 14
|
||||
irq_pin: 27
|
||||
update_interval: 50ms
|
||||
report_interval: 1s
|
||||
threshold: 400
|
||||
dimension_x: 240
|
||||
dimension_y: 320
|
||||
calibration_x_min: 3860
|
||||
calibration_x_max: 280
|
||||
calibration_y_min: 340
|
||||
calibration_y_max: 3860
|
||||
swap_x_y: false
|
||||
|
||||
display:
|
||||
- platform: ili9341
|
||||
model: TFT 2.4
|
||||
id: touch_display
|
||||
cs_pin: GPIO5
|
||||
dc_pin: GPIO4
|
||||
reset_pin: GPIO22
|
||||
rotation: 270
|
||||
lambda: |-
|
||||
auto black = Color(0, 0, 0);
|
||||
it.fill(black);
|
||||
// WENN SPOTIFY SPIELT BUTTONS
|
||||
if ((id(spotify).state == "playing" or id(spotify).state == "paused") and id(display_wetter).state == false) {
|
||||
if (id(spotify).state == "playing")
|
||||
{ it.print(0, 155, id(font_icon_spotify), id(my_green), TextAlign::TOP_LEFT, "\U000F03E5"); } // Pause Icon
|
||||
else
|
||||
{ it.print(0, 155, id(font_icon_spotify), id(my_white), TextAlign::TOP_LEFT, "\U000F040C"); } // Play Icon
|
||||
it.print(-5, 43, id(font_icon_spotify_big), id(my_green), TextAlign::TOP_LEFT, "\U000F04C7"); // Spotify Icon gross
|
||||
// WENN SPOTIFY SPIELT UND PLAYLIST GESCHLOSSEN
|
||||
if (id(playlist).state) {
|
||||
if (id(spotify_shuffle).state) {
|
||||
it.print(98, 3, id(font_icon_spotify_infobar_s), id(my_green), TextAlign::TOP_RIGHT, "\U000F049D"); // Shuffle on
|
||||
}
|
||||
else {
|
||||
it.print(98, 3, id(font_icon_spotify_infobar_s), id(my_white), TextAlign::TOP_RIGHT, "\U000F049E"); // Shuffle off
|
||||
}
|
||||
it.strftime(4, 0, id(font21), id(my_white), TextAlign::TOP_LEFT, "%H:%M", id(esptime).now());
|
||||
it.filled_circle(319, 0, 40, id(my_white));
|
||||
it.print(319, 1, id(font_icon_spotify_infobar), id(my_black), TextAlign::TOP_RIGHT, "\U000F0595"); // Wetter Display
|
||||
it.print(78, 66, id(font21), id(my_white), TextAlign::TOP_LEFT, id(current_title).state.c_str()); // Track Infos
|
||||
it.print(78, 91, id(font21), id(my_white), TextAlign::TOP_LEFT, id(current_artist).state.c_str());
|
||||
it.print(78, 46, id(font18), id(my_green), TextAlign::TOP_LEFT, id(current_playlist).state.c_str());
|
||||
// it.printf(78, 35, id(font18), id(my_green), TextAlign::TOP_LEFT, "%.0f", id(spotpos).state);
|
||||
|
||||
it.filled_rectangle(78, 130, 222, 2, id(my_grey)); // Progress Back
|
||||
it.filled_rectangle(78, 130, id(spotpos).state, 2, id(my_green)); // Progress Bar
|
||||
it.filled_circle(id(spotpos2).state, 130, 4, id(my_green)); // Progress Circle
|
||||
it.print(78, 133, id(font_spot_time), id(my_green), TextAlign::TOP_LEFT, id(spotpostime).state.c_str()); // Position Song
|
||||
it.print(300, 133, id(font_spot_time), id(my_green), TextAlign::TOP_RIGHT, id(spotdur).state.c_str()); // Duration Song
|
||||
|
||||
it.print(78, 155, id(font_icon_spotify), id(my_white), TextAlign::TOP_LEFT, "\U000F0661"); // Next Track
|
||||
it.print(155, 155, id(font_icon_spotify), id(my_white), TextAlign::TOP_LEFT, "\U000F0376"); // Volume-
|
||||
it.print(232, 155, id(font_icon_spotify), id(my_white), TextAlign::TOP_LEFT, "\U000F0417"); // Volume+
|
||||
if (id(display_volume).state) { // Volume State
|
||||
it.filled_circle(230, 122, 35, id(my_green));
|
||||
it.printf(230, 145, id(font40), id(my_black), TextAlign::BOTTOM_CENTER, "%.0f", id(spotvol).state);
|
||||
}
|
||||
}
|
||||
// WENN SPOTIFY SPIELT UND PLAYLIST OFFEN
|
||||
else if ((id(spotify).state == "playing" or id(spotify).state == "paused") and id(playlist).state == false) {
|
||||
it.line(76, 0, 76, 240);
|
||||
it.line(200, 0, 200, 240);
|
||||
it.line(319, 0, 319, 240);
|
||||
it.line(76, 0, 319, 0);
|
||||
it.line(76, 40, 319, 40);
|
||||
it.line(76, 80, 319, 80);
|
||||
it.line(76, 120, 319, 120);
|
||||
it.line(76, 160, 319, 160);
|
||||
it.line(76, 200, 319, 200);
|
||||
it.line(76, 239, 319, 239);
|
||||
it.print(85, 20, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Mix d.Woche");
|
||||
it.print(85, 60, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Playlist W");
|
||||
it.print(85, 100, id(font18), id(my_white), TextAlign::CENTER_LEFT, "me right no");
|
||||
it.print(85, 140, id(font18), id(my_white), TextAlign::CENTER_LEFT, "House Party");
|
||||
it.print(85, 180, id(font18), id(my_white), TextAlign::CENTER_LEFT, "News");
|
||||
it.print(85, 220, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Playlist Z");
|
||||
it.print(209, 20, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Playlist X");
|
||||
it.print(209, 60, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Playlist Y");
|
||||
it.print(209, 100, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Pool Electro");
|
||||
it.print(209, 140, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Hot Hits");
|
||||
it.print(209, 180, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Kaffeehaus");
|
||||
it.print(209, 220, id(font18), id(my_white), TextAlign::CENTER_LEFT, "Chilled Dan");
|
||||
}}
|
||||
else {
|
||||
// STATUSLEISTE ZEIT ODER WETTERWARNUNG
|
||||
it.strftime(4, 3, id(font21), id(my_white), TextAlign::TOP_LEFT, "%H:%M", id(esptime).now());
|
||||
it.filled_circle(319, 0, 40, id(my_white));
|
||||
it.print(319, 1, id(font_icon_spotify_infobar), id(my_black), TextAlign::TOP_RIGHT, "\U000F04C7"); // Spotify Icon Infobar
|
||||
}
|
||||
164
evcc/evcc.yaml
|
|
@ -1,164 +0,0 @@
|
|||
sponsortoken: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJldmNjLmlvIiwic3ViIjoibWlsdmVydCIsImV4cCI6MTgzMDE5MzIwMCwiaWF0IjoxNzM1NTg1MjAwLCJzcmMiOiJnaCJ9._K23QsA15DIHRjujwH8rnFZyloSw1RPIeIS4W5WLFGE
|
||||
|
||||
log: info
|
||||
levels:
|
||||
tariff: info
|
||||
|
||||
interval: 30s
|
||||
|
||||
vehicles:
|
||||
# - name: car
|
||||
# type: custom
|
||||
# title: id4
|
||||
# capacity: 79 # kWh
|
||||
# soc:
|
||||
# source: mqtt
|
||||
# topic: weconnect/0/vehicles/WVGZZZE2ZPE051949/domains/charging/batteryStatus/currentSOC_pct
|
||||
# range:
|
||||
# source: mqtt
|
||||
# topic: weconnect/0/vehicles/WVGZZZE2ZPE051949/domains/fuelStatus/rangeStatus/primaryEngine/remainingRange_km
|
||||
# odometer:
|
||||
# source: mqtt
|
||||
# topic: weconnect/0/vehicles/WVGZZZE2ZPE051949/domains/measurements/odometerStatus/odometer
|
||||
# climater:
|
||||
# source: go
|
||||
# vm: shared
|
||||
# script: |
|
||||
# remoteClimateState != "off"
|
||||
# in:
|
||||
# - name: remoteClimateState
|
||||
# type: string
|
||||
# config:
|
||||
# source: mqtt
|
||||
# topic: weconnect/0/vehicles/WVGZZZE2ZPE051949/domains/climatisation/climatisationStatus/climatisationState
|
||||
|
||||
- name: skoda_car
|
||||
type: custom
|
||||
title: elroq
|
||||
capacity: 64 # kWh
|
||||
soc:
|
||||
source: mqtt
|
||||
topic: carconnectivity/0/garage/TMBNC7NY8SF105227/drives/primary/level
|
||||
range:
|
||||
source: mqtt
|
||||
topic: carconnectivity/0/garage/TMBNC7NY8SF105227/drives/primary/range
|
||||
odometer:
|
||||
source: mqtt
|
||||
topic: carconnectivity/0/garage/TMBNC7NY8SF105227/odometer
|
||||
climater:
|
||||
source: mqtt
|
||||
topic: carconnectivity/0/garage/TMBNC7NY8SF105227/climatization/binarystate
|
||||
limitsoc:
|
||||
source: mqtt
|
||||
topic: carconnectivity/0/garage/TMBNC7NY8SF105227/charging/settings/target_level
|
||||
status:
|
||||
source: mqtt
|
||||
topic: carconnectivity/0/garage/TMBNC7NY8SF105227/charging/state
|
||||
|
||||
chargers:
|
||||
- name: wallbox
|
||||
type: easee
|
||||
user: simon+easee@milvert.com
|
||||
password: X7#aEzjlEysBgl
|
||||
charger: EHCNF485
|
||||
|
||||
circuits:
|
||||
- name: main # if there is only one circuit defined the name needs to be 'main'
|
||||
title: 'main circuit' # name for the UI (not implemented in UI yet)
|
||||
maxCurrent: 20
|
||||
maxPower: 10000
|
||||
meter: my_grid # optiona
|
||||
|
||||
loadpoints:
|
||||
- title: Garage
|
||||
charger: wallbox
|
||||
vehicle: skoda_car
|
||||
circuit: main
|
||||
mode: pv
|
||||
enable:
|
||||
threshold: 0
|
||||
delay: 15s
|
||||
disable:
|
||||
threshold: 500
|
||||
delay:
|
||||
2m
|
||||
|
||||
site:
|
||||
title: Hemma
|
||||
meters:
|
||||
pv:
|
||||
- pv
|
||||
grid: my_grid
|
||||
|
||||
meters:
|
||||
- name: pv
|
||||
type: custom
|
||||
power:
|
||||
source: mqtt
|
||||
topic: inverter/measure/active_power
|
||||
# jq: .value
|
||||
energy:
|
||||
source: mqtt
|
||||
topic: inverter/calculated/accumulated_yield_energy
|
||||
timeout: 60s
|
||||
currents:
|
||||
- source: mqtt
|
||||
topic: inverter/measure/phase_A_current
|
||||
timeout: 60s
|
||||
# jq: .value
|
||||
- source: mqtt
|
||||
topic: inverter/measure/phase_B_current
|
||||
timeout: 60s
|
||||
# jq: .value
|
||||
- source: mqtt
|
||||
topic: inverter/measure/phase_C_current
|
||||
timeout: 60s
|
||||
|
||||
- name: my_grid
|
||||
type: custom
|
||||
power:
|
||||
source: calc
|
||||
add:
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/electricity_currently_returned
|
||||
scale: -1000
|
||||
timeout: 30s
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/electricity_currently_delivered
|
||||
scale: 1000
|
||||
timeout: 30s
|
||||
energy:
|
||||
source: calc
|
||||
add:
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/electricity_returned_1
|
||||
scale: 0.001
|
||||
timeout: 30s
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/electricity_delivered_1
|
||||
scale: -0.001
|
||||
timeout: 30s
|
||||
|
||||
currents:
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/phase_power_current_l1
|
||||
timeout: 30s
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/phase_power_current_l2
|
||||
timeout: 30s
|
||||
- source: mqtt
|
||||
topic: dsmr/reading/phase_power_current_l1
|
||||
timeout: 30s
|
||||
|
||||
|
||||
influx:
|
||||
url: http://influx:8086
|
||||
database: Car
|
||||
token: H7m068KDh84kcmmkz3fy-dEsKoeYv90rnNXIidPZoBy240Jzdbr2uDtGveiuDz4rGK4jmMI8J00zVwgIBYJxXA==
|
||||
org: milvert
|
||||
|
||||
mqtt:
|
||||
broker: mqtt:1883
|
||||
user: simon
|
||||
password: bajsa123
|
||||
topic: evcc
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
APP_NAME = Gitea: Git with a cup of simon
|
||||
RUN_MODE = prod
|
||||
RUN_USER = git
|
||||
WORK_PATH = /data/gitea
|
||||
|
||||
[repository]
|
||||
ROOT = /data/git/repositories
|
||||
|
||||
[repository.local]
|
||||
LOCAL_COPY_PATH = /data/gitea/tmp/local-repo
|
||||
|
||||
[repository.upload]
|
||||
TEMP_PATH = /data/gitea/uploads
|
||||
|
||||
[server]
|
||||
APP_DATA_PATH = /data/gitea
|
||||
SSH_DOMAIN = milvert.com
|
||||
HTTP_PORT = 3000
|
||||
ROOT_URL = https://gitea.milvert.com/
|
||||
# ROOT_URL = http://localhost:3000/
|
||||
DISABLE_SSH = false
|
||||
SSH_PORT = 22
|
||||
LFS_START_SERVER = true
|
||||
LFS = /data/git/lfs
|
||||
DOMAIN = gitea.milvert.com
|
||||
LFS_JWT_SECRET = k0fQxO-UgL1dT55DxBy7ylQpj4A7HDjXiZQs-VxSs6E
|
||||
OFFLINE_MODE = false
|
||||
|
||||
[database]
|
||||
PATH = /data/gitea/gitea.db
|
||||
DB_TYPE = sqlite3
|
||||
HOST = localhost:3306
|
||||
NAME = gitea
|
||||
USER = root
|
||||
PASSWD =
|
||||
SSL_MODE = disable
|
||||
CHARSET = utf8
|
||||
|
||||
[indexer]
|
||||
ISSUE_INDEXER_PATH = /data/gitea/indexers/issues.bleve
|
||||
|
||||
[session]
|
||||
PROVIDER_CONFIG = /data/gitea/sessions
|
||||
PROVIDER = file
|
||||
|
||||
[picture]
|
||||
AVATAR_UPLOAD_PATH = /data/gitea/avatars
|
||||
REPOSITORY_AVATAR_UPLOAD_PATH = /data/gitea/repo-avatars
|
||||
DISABLE_GRAVATAR = false
|
||||
ENABLE_FEDERATED_AVATAR = true
|
||||
|
||||
[attachment]
|
||||
PATH = /data/gitea/attachments
|
||||
|
||||
[log]
|
||||
ROOT_PATH = /data/gitea/log
|
||||
MODE = file
|
||||
LEVEL = info
|
||||
|
||||
[security]
|
||||
INSTALL_LOCK = true
|
||||
SECRET_KEY = 5Aocki6hsR7kzfZ7LJsKF7VESSZ4IMxEtGVc1YLEjGVUTYjnlTtOxL8vY7dLVFOi
|
||||
INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE1NzM5MzU0OTl9.F1h5ZnL5eXqAgPp8Ya4tvhwFK08CyZQdETkyB9O5D34
|
||||
|
||||
[service]
|
||||
DISABLE_REGISTRATION = true
|
||||
REQUIRE_SIGNIN_VIEW = false
|
||||
REGISTER_EMAIL_CONFIRM = false
|
||||
ENABLE_NOTIFY_MAIL = false
|
||||
ALLOW_ONLY_EXTERNAL_REGISTRATION = true
|
||||
ENABLE_CAPTCHA = false
|
||||
DEFAULT_KEEP_EMAIL_PRIVATE = false
|
||||
DEFAULT_ALLOW_CREATE_ORGANIZATION = true
|
||||
DEFAULT_ENABLE_TIMETRACKING = true
|
||||
SHOW_REGISTRATION_BUTTON = false
|
||||
NO_REPLY_ADDRESS = noreply.example.org
|
||||
|
||||
[oauth2]
|
||||
JWT_SECRET = bnlTPZHUxEH5WGrJIAcY5IAqisk3BFb7XY8SUeI5XjA
|
||||
|
||||
[mailer]
|
||||
ENABLED = false
|
||||
|
||||
[openid]
|
||||
ENABLE_OPENID_SIGNIN = false
|
||||
ENABLE_OPENID_SIGNUP = true
|
||||
WHITELISTED_URIS = authelia.milvert.com
|
||||
1140
grafana/grafana.ini
|
|
@ -1,18 +0,0 @@
|
|||
appdaemon:
|
||||
latitude: 0
|
||||
longitude: 0
|
||||
elevation: 30
|
||||
time_zone: Europe/Berlin
|
||||
plugins:
|
||||
HASS:
|
||||
type: hass
|
||||
ha_url: http://ha:8123
|
||||
token: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJmZmM0YTI1ZjVlYWM0NGY5OTA3OGFmOWJiMTJmYmUzZCIsImlhdCI6MTY5MzczMDQwNSwiZXhwIjoyMDA5MDkwNDA1fQ.YVH8WhH6FMvTkecJ-taCACP6kVG9is2hHmTR3tk3cns
|
||||
cert_verify: False
|
||||
loglevel: DEBUG
|
||||
module_debug:
|
||||
http:
|
||||
url: http://appdaemon:5050
|
||||
admin:
|
||||
api:
|
||||
hadashboard:
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
hello_world:
|
||||
module: hello
|
||||
class: HelloWorld
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
hello_world:
|
||||
module: hello
|
||||
class: HelloWorld
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
import hassapi as hass
|
||||
|
||||
#
|
||||
# Hello World App
|
||||
#
|
||||
# Args:
|
||||
#
|
||||
|
||||
|
||||
class HelloWorld(hass.Hass):
|
||||
def initialize(self):
|
||||
self.log("Hello from AppDaemon")
|
||||
self.log("You are now ready to run Apps!")
|
||||
|
|
@ -1,240 +0,0 @@
|
|||
|
||||
html {
|
||||
font-size: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
-ms-text-size-adjust: 100%;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
background-color: #222;
|
||||
font-size: 15px;
|
||||
color: #fff;
|
||||
padding: 0;
|
||||
line-height: 1;
|
||||
font-family: 'Helvetica Neue', 'Helvetica', 'Open Sans', 'Arial'
|
||||
}
|
||||
|
||||
b, strong {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
a {
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
img {
|
||||
border: 0;
|
||||
-ms-interpolation-mode: bicubic;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
img, object {
|
||||
max-width: 100%;
|
||||
-webkit-border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
border-radius: 5px;}
|
||||
|
||||
iframe {
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
border-spacing: 0;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
td {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
ul, ol {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
h1, h2, h3, h4, h5, p {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
h1 {
|
||||
margin-bottom: 6px;
|
||||
text-align: center;
|
||||
font-size: 100%;
|
||||
font-weight: 200;
|
||||
}
|
||||
h2 {
|
||||
font-size: 300%;
|
||||
font-weight: 400;
|
||||
color: #fff;
|
||||
}
|
||||
h3 {
|
||||
font-size: 125%;
|
||||
font-weight: 300;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
input {
|
||||
background-color: #444;
|
||||
border: none;
|
||||
}
|
||||
|
||||
.dashboard_main {
|
||||
margin: 0px auto;
|
||||
}
|
||||
|
||||
.gridster {
|
||||
margin: 0px auto;
|
||||
}
|
||||
|
||||
.icon-background {
|
||||
pointer-events: none;
|
||||
width: 100%!important;
|
||||
height: 100%;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
opacity: 0.1;
|
||||
font-size: 1375%;
|
||||
text-align: center;
|
||||
margin-top: 82px;
|
||||
}
|
||||
|
||||
.list-nostyle {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.gridster ul {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.gs-w {
|
||||
width: 100%;
|
||||
display: table;
|
||||
cursor: pointer;
|
||||
z-index: auto !important;
|
||||
}
|
||||
|
||||
.iframe {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
-webkit-border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
border-radius: 5px;}
|
||||
|
||||
.widget {
|
||||
padding: 0px 0px;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
display: table-cell;
|
||||
vertical-align: middle;
|
||||
background-color: #444444;
|
||||
-webkit-border-radius: 5px;
|
||||
-moz-border-radius: 5px;
|
||||
border-radius: 5px;}
|
||||
|
||||
.title {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.icon-inactive {
|
||||
color: #888;
|
||||
}
|
||||
|
||||
.icon-active {
|
||||
color: #aaff00;
|
||||
}
|
||||
|
||||
|
||||
#container {
|
||||
padding-top: 0px;
|
||||
}
|
||||
|
||||
.modalDialog {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
background: rgba(0,0,0,0.8);
|
||||
z-index: 9999;
|
||||
opacity:0;
|
||||
-webkit-transition: opacity 400ms ease-in;
|
||||
-moz-transition: opacity 400ms ease-in;
|
||||
transition: opacity 400ms ease-in;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.modalDialogOpen {
|
||||
opacity:0.95;
|
||||
pointer-events: auto;
|
||||
}
|
||||
|
||||
.modalDialogClose {
|
||||
opacity:0;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.modalDialog > div {
|
||||
width: 275px;
|
||||
position: relative;
|
||||
margin: 3% auto;
|
||||
padding: 5px 20px 13px 20px;
|
||||
border-radius: 10px;
|
||||
}
|
||||
|
||||
.modalDialogCloseButton {
|
||||
line-height: 50px;
|
||||
position: absolute;
|
||||
right: -25px;
|
||||
text-align: center;
|
||||
top: -20px;
|
||||
width: 50px;
|
||||
text-decoration: none;
|
||||
font-weight: bold;
|
||||
-webkit-border-radius: 25px;
|
||||
-moz-border-radius: 25px;
|
||||
border-radius: 25px;
|
||||
}
|
||||
|
||||
.modalDialogCloseButton:hover { background: #444; }
|
||||
|
||||
.widget-basedisplay-default-label .unit {
|
||||
font-size: 225%;
|
||||
font-weight: 400;
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
margin-left: 5px;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.widget-basedisplay-default-label .value {
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.widget-basedisplay-default-label .valueunit {
|
||||
width: 100%;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.widget-basedisplay-default-label .title {
|
||||
position: absolute;
|
||||
top: 5px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.widget-basedisplay-default-label .title2 {
|
||||
position: absolute;
|
||||
top: 23px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.widget-basedisplay-default-label .state_text {
|
||||
position: absolute;
|
||||
bottom: -3px;
|
||||
width: 100%;
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
<! body tags ->
|
||||
|
||||
|
||||
|
||||
<! body tags go here ->
|
||||
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
<! head tags ->
|
||||
|
||||
|
||||
|
||||
<! head tags go here ->
|
||||
|
||||
|
|
@ -1,130 +0,0 @@
|
|||
var myDeviceID;
|
||||
$(function(){ //DOM Ready
|
||||
|
||||
function navigate(url)
|
||||
{
|
||||
window.location.href = url;
|
||||
}
|
||||
|
||||
$(document).attr("title", "Hello Panel");
|
||||
content_width = (120 + 5) * 8 + 5
|
||||
$('.gridster').width(content_width)
|
||||
$(".gridster ul").gridster({
|
||||
widget_margins: [5, 5],
|
||||
widget_base_dimensions: [120, 120],
|
||||
avoid_overlapped_widgets: true,
|
||||
max_rows: 15,
|
||||
max_size_x: 8,
|
||||
shift_widgets_up: false
|
||||
}).data('gridster').disable();
|
||||
|
||||
// Add Widgets
|
||||
|
||||
var gridster = $(".gridster ul").gridster().data('gridster');
|
||||
|
||||
gridster.add_widget('<li><div data-bind="attr: {style: widget_style}" class="widget widget-basedisplay-default-label" id="default-label"><h1 class="title" data-bind="text: title, attr:{ style: title_style}"></h1><h1 class="title2" data-bind="text: title2, attr:{ style: title2_style}"></h1><div class="valueunit" data-bind="attr:{ style: container_style}"><h2 class="value" data-bind="html: value, attr:{ style: value_style}"></h2><p class="unit" data-bind="html: unit, attr:{ style: unit_style}"></p></div><h1 class="state_text" data-bind="text: state_text, attr: {style: state_text_style}"></h1></div></li>', 2, 2, 1, 1)
|
||||
|
||||
|
||||
|
||||
var widgets = {}
|
||||
// Initialize Widgets
|
||||
|
||||
widgets["default-label"] = new basedisplay("default-label", "", "default", {'widget_type': 'basedisplay', 'fields': {'title': '', 'title2': '', 'value': 'Hello World', 'unit': '', 'state_text': ''}, 'static_css': {'title_style': 'color: #fff;', 'title2_style': 'color: #fff;', 'unit_style': '', 'value_style': 'color: #fff;', 'state_text_style': 'color: #fff;', 'widget_style': 'background-color: #444;', 'container_style': ''}, 'css': {}, 'icons': [], 'static_icons': [], 'namespace': 'default'})
|
||||
|
||||
|
||||
// Setup click handler to cancel timeout navigations
|
||||
|
||||
$( ".gridster" ).click(function(){
|
||||
clearTimeout(myTimeout);
|
||||
if (myTimeoutSticky) {
|
||||
myTimeout = setTimeout(function() { navigate(myTimeoutUrl); }, myTimeoutDelay);
|
||||
}
|
||||
});
|
||||
|
||||
// Set up timeout
|
||||
|
||||
var myTimeout;
|
||||
var myTimeoutUrl;
|
||||
var myTimeoutDelay;
|
||||
var myTimeoutSticky = 0;
|
||||
if (location.search != "")
|
||||
{
|
||||
console.log("begin")
|
||||
var query = location.search.substr(1);
|
||||
var result = {};
|
||||
query.split("&").forEach(function(part) {
|
||||
var item = part.split("=");
|
||||
result[item[0]] = decodeURIComponent(item[1]);
|
||||
});
|
||||
|
||||
if ("deviceid" in result)
|
||||
{
|
||||
myDeviceID = result.deviceid;
|
||||
try
|
||||
{
|
||||
setCookie('ADdevID', myDeviceID);
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
console.log(e);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
myDeviceID = getCookie('ADdevID');
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
console.log(e);
|
||||
myDeviceID = null;
|
||||
}
|
||||
}
|
||||
if ("timeout" in result && "return" in result)
|
||||
{
|
||||
url = result.return
|
||||
argcount = 0
|
||||
for (arg in result)
|
||||
{
|
||||
if (arg != "timeout" && arg != "return" && arg != "sticky")
|
||||
{
|
||||
if (argcount == 0)
|
||||
{
|
||||
url += "?";
|
||||
}
|
||||
else
|
||||
{
|
||||
url += "&";
|
||||
}
|
||||
argcount ++;
|
||||
url += arg + "=" + result[arg]
|
||||
}
|
||||
}
|
||||
if ("sticky" in result)
|
||||
{
|
||||
myTimeoutSticky = (result.sticky == "1");
|
||||
}
|
||||
myTimeoutUrl = url;
|
||||
myTimeoutDelay = result.timeout * 1000;
|
||||
myTimeout = setTimeout(function() { navigate(url); }, result.timeout * 1000);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
myDeviceID = getCookie('ADdevID');
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
console.log(e);
|
||||
myDeviceID = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Start listening for AD Events
|
||||
|
||||
window.dashstream = new DashStream("ws", location.protocol, document.domain, location.port, "Hello Panel", widgets);
|
||||
|
||||
});
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
#
|
||||
# Main arguments, all optional
|
||||
#
|
||||
title: Hello Panel
|
||||
widget_dimensions: [120, 120]
|
||||
widget_margins: [5, 5]
|
||||
columns: 8
|
||||
|
||||
label:
|
||||
widget_type: label
|
||||
text: Hello World
|
||||
|
||||
layout:
|
||||
- label(2x2)
|
||||
|
|
@ -1,755 +0,0 @@
|
|||
- id: '1698524075327'
|
||||
alias: light spot when motion
|
||||
description: ''
|
||||
trigger:
|
||||
- type: turned_on
|
||||
platform: device
|
||||
device_id: 80ecd4d9a212d4bc811d958b336ccec0
|
||||
entity_id: 390b057edde8e3a7ce5dd96fbc8f7486
|
||||
domain: binary_sensor
|
||||
condition:
|
||||
- condition: state
|
||||
entity_id: binary_sensor.g5_flex_is_dark
|
||||
state: 'on'
|
||||
action:
|
||||
- service: light.turn_on
|
||||
data:
|
||||
color_temp: 359
|
||||
brightness_pct: 100
|
||||
target:
|
||||
entity_id: light.garage_spot
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 5
|
||||
seconds: 0
|
||||
milliseconds: 0
|
||||
- service: light.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id: light.garage_spot
|
||||
mode: single
|
||||
- id: '1700687177645'
|
||||
alias: Handle_motorvärmare
|
||||
description: ''
|
||||
triggers:
|
||||
- at: input_datetime.motorvarmare_start
|
||||
trigger: time
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: input_boolean.motorvarmare_toogle
|
||||
state: 'on'
|
||||
actions:
|
||||
- data: {}
|
||||
action: switch.turn_on
|
||||
target:
|
||||
entity_id: switch.nodeid_22_nodeid_22_switch
|
||||
- delay: 02:00:00
|
||||
- data: {}
|
||||
action: switch.turn_off
|
||||
target:
|
||||
entity_id: switch.nodeid_22_nodeid_22_switch
|
||||
mode: single
|
||||
- id: '1700693056778'
|
||||
alias: motorvärmare 2h
|
||||
description: motorvärmare i 2h
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id:
|
||||
- switch.nodeid_22_switch
|
||||
from: 'off'
|
||||
to: 'on'
|
||||
for:
|
||||
minutes: 120
|
||||
condition: []
|
||||
action:
|
||||
- service: switch.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.nodeid_22_switch
|
||||
mode: single
|
||||
- id: '1703971688590'
|
||||
alias: Lampa trappa dag
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id:
|
||||
- binary_sensor.h007m_occupancy
|
||||
to: 'on'
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: time
|
||||
after: '16:00:00'
|
||||
before: '22:30:00'
|
||||
- condition: time
|
||||
after: 08:00:00
|
||||
before: '11:00:00'
|
||||
action:
|
||||
- service: light.turn_on
|
||||
data:
|
||||
brightness_pct: 52
|
||||
kelvin: 2900
|
||||
target:
|
||||
device_id: ad8c90d56d6753ae960fe61560f1de66
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 20
|
||||
seconds: 0
|
||||
milliseconds: 0
|
||||
- service: light.turn_off
|
||||
data: {}
|
||||
target:
|
||||
device_id: ad8c90d56d6753ae960fe61560f1de66
|
||||
mode: single
|
||||
- id: '1704919949019'
|
||||
alias: Update Jaffa location as MQTT location updates
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: mqtt
|
||||
topic: weconnect/0/vehicles/WVGZZZE2ZPE051949/parking/parkingPosition/latitude
|
||||
- platform: mqtt
|
||||
topic: weconnect/0/vehicles/WVGZZZE2ZPE051949/parking/parkingPosition/longitude
|
||||
condition: []
|
||||
action:
|
||||
- service: device_tracker.see
|
||||
data:
|
||||
dev_id: jaffa_location
|
||||
source_type: gps
|
||||
gps:
|
||||
- '{{ states.sensor.none_jaffa_lat.state }}'
|
||||
- '{{ states.sensor.none_jaffa_long.state }}'
|
||||
initial_state: 'on'
|
||||
- id: '1706989035065'
|
||||
alias: Lampa trappa natt
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id:
|
||||
- binary_sensor.h007m_occupancy
|
||||
to: 'on'
|
||||
condition:
|
||||
- condition: time
|
||||
after: '22:30:00'
|
||||
before: 08:00:00
|
||||
action:
|
||||
- service: light.turn_on
|
||||
data:
|
||||
brightness_pct: 7
|
||||
target:
|
||||
device_id: ad8c90d56d6753ae960fe61560f1de66
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 10
|
||||
seconds: 0
|
||||
milliseconds: 0
|
||||
- service: light.turn_off
|
||||
data: {}
|
||||
target:
|
||||
device_id: ad8c90d56d6753ae960fe61560f1de66
|
||||
mode: single
|
||||
- id: '1707166664479'
|
||||
alias: 'Oscar tts '
|
||||
description: 'Spelar upp en text på Oscars högtalare '
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id:
|
||||
- input_text.tts_syntesiser
|
||||
condition: []
|
||||
action:
|
||||
- service: tts.edge_tts_say
|
||||
metadata: {}
|
||||
data:
|
||||
cache: false
|
||||
entity_id: media_player.oscar
|
||||
language: sv_SE
|
||||
message: '{{ states(''input_text.tts_syntesiser'') }}'
|
||||
enabled: false
|
||||
- service: tts.edge_tts_say
|
||||
metadata: {}
|
||||
data:
|
||||
entity_id: media_player.oscar
|
||||
message: 'Hej '
|
||||
mode: single
|
||||
- id: '1709494545609'
|
||||
alias: växtlampa schema
|
||||
trigger:
|
||||
- platform: template
|
||||
value_template: '{{ now().hour == 7 }}'
|
||||
id: 'on'
|
||||
- platform: template
|
||||
value_template: '{{ (now().hour, now().minute) == (21,30) }}'
|
||||
id: 'off'
|
||||
action:
|
||||
- service: light.turn_{{ trigger.id }}
|
||||
target:
|
||||
entity_id: light.vaxtlampa
|
||||
- id: '1713552723716'
|
||||
alias: Automation_oscar_skrivbord
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: event
|
||||
event_type: button_pressed
|
||||
event_data:
|
||||
entity_id: switch.oscar_skrivbord
|
||||
state: 'off'
|
||||
id: 'off'
|
||||
- platform: event
|
||||
event_type: button_pressed
|
||||
event_data:
|
||||
entity_id: switch.oscar_skrivbord
|
||||
state: 'on'
|
||||
id: 'on'
|
||||
condition: []
|
||||
action:
|
||||
- service: light.turn_{{ trigger.id }}
|
||||
target:
|
||||
entity_id: light.oscar_skrivbord
|
||||
data: {}
|
||||
- service: switch.turn_{{ trigger.id }}
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.nodeid_13_nodeid_13_switch
|
||||
mode: single
|
||||
- id: '1713555224562'
|
||||
alias: Automation_oscar_moln
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: event
|
||||
event_type: button_pressed
|
||||
event_data:
|
||||
entity_id: switch.oscar_skrivbord
|
||||
state: 'off'
|
||||
id: 'off'
|
||||
- platform: event
|
||||
event_type: button_pressed
|
||||
event_data:
|
||||
entity_id: switch.oscar_skrivbord
|
||||
state: 'on'
|
||||
id: 'on'
|
||||
condition: []
|
||||
action:
|
||||
- service: switch.turn_{{ trigger.id }}
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.nodeid_7_nodeid_7_switch
|
||||
mode: single
|
||||
- id: '1715369564640'
|
||||
alias: Bevattning 20 min
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: sun
|
||||
event: sunrise
|
||||
offset: 0
|
||||
condition: []
|
||||
action:
|
||||
- type: turn_on
|
||||
device_id: ee19c0deed59d2a266b59c30dbf7ccaa
|
||||
entity_id: 068c418ccf17f83fe236590673ce7c1f
|
||||
domain: switch
|
||||
- delay:
|
||||
hours: 0
|
||||
minutes: 30
|
||||
seconds: 0
|
||||
milliseconds: 0
|
||||
- type: turn_off
|
||||
device_id: ee19c0deed59d2a266b59c30dbf7ccaa
|
||||
entity_id: 068c418ccf17f83fe236590673ce7c1f
|
||||
domain: switch
|
||||
mode: single
|
||||
- id: '1715979338256'
|
||||
alias: Nattnotis
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id:
|
||||
- scene.natt
|
||||
condition:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: state
|
||||
entity_id: binary_sensor.jaffa_locked
|
||||
state: 'on'
|
||||
- condition: state
|
||||
entity_id: lock.h014s
|
||||
state: unlocked
|
||||
- condition: state
|
||||
entity_id: binary_sensor.sensor_inne_tvattstuga_dt002_contact
|
||||
state: 'on'
|
||||
- condition: state
|
||||
entity_id: binary_sensor.sensor_inne_arum_dt003_contact
|
||||
state: 'on'
|
||||
action:
|
||||
- service: notify.mobile_app_simon_mobil
|
||||
metadata: {}
|
||||
data:
|
||||
title: Kolla lås.
|
||||
message: "**Natt-scen aktiverad!** * Jaffadörren: {{ states('binary_sensor.jaffa_locked').state
|
||||
}} * Tvättstuga: {{ states('binary_sensor.sensor_inne_tvattstuga_dt002_contact').state
|
||||
}} * Arbetsrum: {{ states('binary_sensor.sensor_inne_arum_dt003_contact').state
|
||||
}} \n"
|
||||
mode: single
|
||||
- id: '1723749734599'
|
||||
alias: Kväll_on_off
|
||||
description: ''
|
||||
triggers:
|
||||
- event: sunset
|
||||
offset: 00:30:00
|
||||
trigger: sun
|
||||
- at: '19:30:00'
|
||||
id: time_on
|
||||
trigger: time
|
||||
- at: '23:15:00'
|
||||
id: time_off
|
||||
trigger: time
|
||||
conditions: []
|
||||
actions:
|
||||
- data:
|
||||
entity_id: '{{ ''scene.kvalls_belysning'' if now().hour < 23 else ''scene.natt''
|
||||
}}
|
||||
|
||||
'
|
||||
action: scene.turn_on
|
||||
- id: '1723751411352'
|
||||
alias: ute_av_på
|
||||
description: ''
|
||||
triggers:
|
||||
- event: sunrise
|
||||
offset: -00:15:00
|
||||
trigger: sun
|
||||
- event: sunset
|
||||
offset: 00:15:00
|
||||
trigger: sun
|
||||
- at: 07:55:00
|
||||
trigger: time
|
||||
conditions: []
|
||||
actions:
|
||||
- data:
|
||||
entity_id: '{{ ''scene.ute'' if trigger.event == ''sunset'' else ''scene.ute_av''
|
||||
}}
|
||||
|
||||
'
|
||||
action: scene.turn_on
|
||||
mode: single
|
||||
- id: '1725646259613'
|
||||
alias: Kök på 5 min
|
||||
description: ''
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id:
|
||||
- light.kok_ct
|
||||
to: 'on'
|
||||
condition: []
|
||||
action:
|
||||
- entity_id: script.kok_ct_timer_2
|
||||
action: script.turn_on
|
||||
mode: restart
|
||||
- id: '1729969302596'
|
||||
alias: Tänd och släck vrum upp vid helgmorgon
|
||||
description: Tänder lampan kl 06:00 och släcker den kl 07:30 på lördagar och söndagar
|
||||
triggers:
|
||||
- at: 06:00:00
|
||||
trigger: time
|
||||
- at: 07:30:00
|
||||
trigger: time
|
||||
conditions:
|
||||
- condition: time
|
||||
weekday:
|
||||
- sat
|
||||
- sun
|
||||
actions:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ trigger.platform == ''time'' and trigger.now.strftime(''%H:%M:%S'')
|
||||
== ''06:00:00'' }}'
|
||||
sequence:
|
||||
- data:
|
||||
brightness_pct: 9
|
||||
target:
|
||||
device_id: 79ba72943d4ed67fa5dc4fdbfe4fa54d
|
||||
action: light.turn_on
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ trigger.platform == ''time'' and trigger.now.strftime(''%H:%M:%S'')
|
||||
== ''07:30:00'' }}'
|
||||
sequence:
|
||||
- target:
|
||||
device_id: 79ba72943d4ed67fa5dc4fdbfe4fa54d
|
||||
action: light.turn_off
|
||||
data: {}
|
||||
- action: switch.turn_off
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
device_id: d93a6c62e11bec4c4d480497363d4512
|
||||
mode: single
|
||||
- id: '1731354729078'
|
||||
alias: Tänd garage
|
||||
description: ''
|
||||
triggers:
|
||||
- type: occupied
|
||||
device_id: 0c06d34c097db550f6339bdf16b8b408
|
||||
entity_id: c03b9e6b324f34b4ff4dc523b49ed991
|
||||
domain: binary_sensor
|
||||
trigger: device
|
||||
- trigger: state
|
||||
entity_id:
|
||||
- binary_sensor.sensor_inne_garage_m001_occupancy
|
||||
to: 'on'
|
||||
conditions: []
|
||||
actions:
|
||||
- action: light.turn_on
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
device_id: f5781cfa34b2e0a238f6f6333e7d7fa2
|
||||
- action: switch.turn_on
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.nodeid_16_nodeid_16_switch
|
||||
- delay:
|
||||
minutes: 5
|
||||
- action: light.turn_off
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
device_id: f5781cfa34b2e0a238f6f6333e7d7fa2
|
||||
- action: switch.turn_off
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.nodeid_16_nodeid_16_switch
|
||||
mode: single
|
||||
- id: '1731870335988'
|
||||
alias: Lampa, byt slinga mot fasad
|
||||
description: ''
|
||||
triggers:
|
||||
- at: '23:15:00'
|
||||
trigger: time
|
||||
- at: 06:00:00
|
||||
trigger: time
|
||||
conditions: []
|
||||
actions:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: time
|
||||
before: 06:00:00
|
||||
sequence:
|
||||
- action: switch.turn_off
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.h024s
|
||||
- target:
|
||||
entity_id: switch.h015l
|
||||
action: switch.turn_on
|
||||
data: {}
|
||||
- conditions:
|
||||
- condition: time
|
||||
after: 05:59:59
|
||||
sequence:
|
||||
- action: switch.toggle
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.h024s
|
||||
- target:
|
||||
entity_id: switch.h015l
|
||||
action: switch.toggle
|
||||
data: {}
|
||||
mode: single
|
||||
- id: '1733170240917'
|
||||
alias: 'Jul belysning '
|
||||
description: ''
|
||||
triggers:
|
||||
- value_template: '{{ now().hour == 16 }}'
|
||||
id: 'on'
|
||||
trigger: template
|
||||
- value_template: '{{ (now().hour) == 8 }}'
|
||||
id: 'off'
|
||||
trigger: template
|
||||
actions:
|
||||
- target:
|
||||
entity_id: group.jul_group
|
||||
action: homeassistant.turn_{{ trigger.id }}
|
||||
- id: '1735463627194'
|
||||
alias: Ada släck 0800
|
||||
description: ''
|
||||
triggers:
|
||||
- trigger: time
|
||||
at: 08:00:00
|
||||
conditions: []
|
||||
actions:
|
||||
- action: light.turn_off
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
entity_id: light.ada_jordglob2
|
||||
- action: switch.turn_off
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
entity_id: switch.ada_slnga
|
||||
- action: light.turn_off
|
||||
metadata: {}
|
||||
data: {}
|
||||
target:
|
||||
device_id: 069299a3de0a369de40dd512292a2828
|
||||
mode: single
|
||||
- id: '1740430707464'
|
||||
alias: Sov gott Oscar när sleep timer är 0
|
||||
description: ''
|
||||
triggers:
|
||||
- entity_id: sensor.sonos_sleep_timer
|
||||
to: '0'
|
||||
trigger: state
|
||||
actions:
|
||||
- action: script.talk_on_oscar
|
||||
data:
|
||||
message: Sov Gott Oscar! Dags att sova
|
||||
mode: single
|
||||
- id: '1741036830396'
|
||||
alias: 'Reser temp '
|
||||
description: ''
|
||||
triggers:
|
||||
- trigger: time
|
||||
at: 06:00:00
|
||||
conditions: []
|
||||
actions:
|
||||
- action: climate.set_fan_mode
|
||||
metadata: {}
|
||||
data:
|
||||
fan_mode: '3'
|
||||
target:
|
||||
device_id: ced8502d2ee4ac70dbb9929329ec3ae2
|
||||
- action: climate.set_temperature
|
||||
metadata: {}
|
||||
data:
|
||||
temperature: 22
|
||||
target:
|
||||
device_id: ced8502d2ee4ac70dbb9929329ec3ae2
|
||||
mode: single
|
||||
- id: '1743450529687'
|
||||
alias: Styr temperaturen vrum
|
||||
description: ''
|
||||
triggers:
|
||||
- type: opened
|
||||
device_id: c57486d3c2297020f9b19f7128bf867e
|
||||
entity_id: 19d29462d89383fd11e32861269de77d
|
||||
domain: binary_sensor
|
||||
metadata:
|
||||
secondary: false
|
||||
trigger: device
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 30
|
||||
- type: not_opened
|
||||
device_id: c57486d3c2297020f9b19f7128bf867e
|
||||
entity_id: 19d29462d89383fd11e32861269de77d
|
||||
domain: binary_sensor
|
||||
trigger: device
|
||||
conditions: []
|
||||
actions:
|
||||
- choose:
|
||||
- conditions:
|
||||
- type: is_open
|
||||
condition: device
|
||||
device_id: c57486d3c2297020f9b19f7128bf867e
|
||||
entity_id: 19d29462d89383fd11e32861269de77d
|
||||
domain: binary_sensor
|
||||
for:
|
||||
hours: 0
|
||||
minutes: 0
|
||||
seconds: 30
|
||||
sequence:
|
||||
- data:
|
||||
value: '{{ state_attr(''climate.vardagsrum_2'', ''temperature'') }}'
|
||||
target:
|
||||
entity_id: input_number.vardagsrum_temperatur_sparad
|
||||
action: input_number.set_value
|
||||
- metadata: {}
|
||||
data:
|
||||
temperature: 6
|
||||
target:
|
||||
entity_id: climate.vardagsrum_2
|
||||
action: climate.set_temperature
|
||||
- conditions:
|
||||
- type: is_not_open
|
||||
condition: device
|
||||
device_id: c57486d3c2297020f9b19f7128bf867e
|
||||
entity_id: 19d29462d89383fd11e32861269de77d
|
||||
domain: binary_sensor
|
||||
sequence:
|
||||
- metadata: {}
|
||||
data:
|
||||
temperature: '{{ states(''input_number.vardagsrum_temperatur_sparad'') }}'
|
||||
target:
|
||||
entity_id: climate.vardagsrum_2
|
||||
action: climate.set_temperature
|
||||
mode: single
|
||||
- id: '1744573491574'
|
||||
alias: Möjlig frost - baserat på trend eller prognos
|
||||
triggers:
|
||||
- at: '20:00:00'
|
||||
trigger: time
|
||||
conditions:
|
||||
- condition: or
|
||||
conditions:
|
||||
- condition: template
|
||||
value_template: "{% set forecast = state_attr('weather.forecast_home', 'forecast')
|
||||
%} {% if forecast %}\n {{ forecast[0].templow | float < 0 }}\n{% else %}\n
|
||||
\ false\n{% endif %}\n"
|
||||
- condition: and
|
||||
conditions:
|
||||
- condition: numeric_state
|
||||
entity_id: sensor.h017s_temperature
|
||||
below: 4
|
||||
- condition: numeric_state
|
||||
entity_id: sensor.h017s_derivata
|
||||
below: -0.5
|
||||
actions:
|
||||
- data:
|
||||
message: Varning! Frost kan vara på gång ❄️ (enligt prognos eller temptrend)
|
||||
action: notify.mobile_app_simon_mobil
|
||||
mode: single
|
||||
- id: '1745266231556'
|
||||
alias: Nimly_31_manual
|
||||
description: ''
|
||||
triggers:
|
||||
- trigger: time
|
||||
at: 06:45:00
|
||||
conditions: []
|
||||
actions:
|
||||
- action: mqtt.publish
|
||||
metadata: {}
|
||||
data:
|
||||
qos: '0'
|
||||
retain: false
|
||||
topic: zigbee_home_2/h014s/set/pin_code
|
||||
payload: "{\n \"user\": 31,\n \"user_type\": \"unrestricted\",\n \"user_enabled\":
|
||||
true,\n \"pin_code\": 9480\n }\n"
|
||||
mode: single
|
||||
- id: '1745266911651'
|
||||
alias: enable_31
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: user/nimly_lock.yaml
|
||||
input:
|
||||
lock_device: lock.h014s
|
||||
trigger_time: 06:45:00
|
||||
pin_code: '9480'
|
||||
user_id: 31
|
||||
enabled: true
|
||||
- id: '1745267035417'
|
||||
alias: disable_31
|
||||
description: ''
|
||||
use_blueprint:
|
||||
path: user/nimly_lock.yaml
|
||||
input:
|
||||
trigger_time: '17:00:00'
|
||||
lock_device: lock.h014s
|
||||
user_id: 31
|
||||
pin_code: 'null'
|
||||
enabled: false
|
||||
- id: '1746992840091'
|
||||
alias: Save last person detection
|
||||
description: ''
|
||||
triggers:
|
||||
- type: turned_on
|
||||
device_id: 80ecd4d9a212d4bc811d958b336ccec0
|
||||
entity_id: 390b057edde8e3a7ce5dd96fbc8f7486
|
||||
domain: binary_sensor
|
||||
trigger: device
|
||||
conditions: []
|
||||
actions:
|
||||
- action: camera.snapshot
|
||||
metadata: {}
|
||||
data:
|
||||
filename: /config/www/last_person_detection.jpg
|
||||
target:
|
||||
device_id: 80ecd4d9a212d4bc811d958b336ccec0
|
||||
mode: single
|
||||
- id: '1755028231874'
|
||||
alias: 'Spela musik '
|
||||
description: ''
|
||||
triggers:
|
||||
- trigger: tag
|
||||
tag_id: d317de29-a548-4041-92a8-b34627e45cc3
|
||||
conditions: []
|
||||
actions:
|
||||
- action: media_player.play_media
|
||||
metadata: {}
|
||||
data:
|
||||
media_content_type: Music
|
||||
media_content_id: spotify:playlist:37i9dQZF1DWVCKO3xAlT1Q
|
||||
target:
|
||||
entity_id: media_player.ada
|
||||
mode: single
|
||||
- id: '1756585900611'
|
||||
alias: NFC Playbox
|
||||
description: ''
|
||||
triggers:
|
||||
- event_type: tag_scanned
|
||||
trigger: event
|
||||
actions:
|
||||
- choose:
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ current is defined and current.method == ''play_media''
|
||||
}}'
|
||||
sequence:
|
||||
- target:
|
||||
entity_id: '{{ current.player }}'
|
||||
data:
|
||||
media:
|
||||
media_content_id: '{{ current.media_content_id }}'
|
||||
media_content_type: '{{ current.media_content_type | default(''Music'')
|
||||
}}'
|
||||
metadata: {}
|
||||
action: media_player.play_media
|
||||
- conditions:
|
||||
- condition: template
|
||||
value_template: '{{ current is defined and current.method == ''select_source''
|
||||
}}'
|
||||
sequence:
|
||||
- target:
|
||||
entity_id: '{{ current.player }}'
|
||||
data:
|
||||
source: '{{ current.source }}'
|
||||
action: media_player.select_source
|
||||
default:
|
||||
- data:
|
||||
title: NFC Playbox
|
||||
message: 'Okänd NFC-tagg: {{ tag_id }}'
|
||||
action: persistent_notification.create
|
||||
mode: single
|
||||
variables:
|
||||
TAGS:
|
||||
04-A1-4E-94-2E-02-89:
|
||||
info: spellista_dolly
|
||||
method: play_media
|
||||
player: media_player.ada
|
||||
media_content_id: spotify:playlist:3CTFR7Tf99Nj6rSM5l5HRf
|
||||
media_content_type: playlist
|
||||
04-01-BA-52-2E-02-89:
|
||||
info: spellista_godnattstund
|
||||
method: play_media
|
||||
player: media_player.ada
|
||||
media_content_id: spotify:playlist:2hHIaWS6pELC5w58vJraVJ
|
||||
media_content_type: playlist
|
||||
04-51-0C-91-2E-02-89:
|
||||
info: spellista_disco
|
||||
method: play_media
|
||||
player: media_player.ada
|
||||
media_content_id: spotify:playlist:7Lu5u70XvPDvRMc4fwMsLY
|
||||
media_content_type: playlist
|
||||
04-01-DF-98-2E-02-89:
|
||||
info: spellista_rosahelikopter
|
||||
method: play_media
|
||||
player: media_player.ada
|
||||
media_content_id: spotify:playlist:37i9dQZF1E8C5l0TDkGXpx
|
||||
media_content_type: playlist
|
||||
tag_id: '{{ trigger.event.data.tag_id }}'
|
||||
current: '{{ TAGS.get(tag_id) }}'
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
blueprint:
|
||||
name: Motion-activated Light
|
||||
description: Turn on a light when motion is detected.
|
||||
domain: automation
|
||||
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/automation/blueprints/motion_light.yaml
|
||||
author: Home Assistant
|
||||
input:
|
||||
motion_entity:
|
||||
name: Motion Sensor
|
||||
selector:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
device_class: motion
|
||||
light_target:
|
||||
name: Light
|
||||
selector:
|
||||
target:
|
||||
entity:
|
||||
domain: light
|
||||
no_motion_wait:
|
||||
name: Wait time
|
||||
description: Time to leave the light on after last motion is detected.
|
||||
default: 120
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 3600
|
||||
unit_of_measurement: seconds
|
||||
|
||||
# If motion is detected within the delay,
|
||||
# we restart the script.
|
||||
mode: restart
|
||||
max_exceeded: silent
|
||||
|
||||
trigger:
|
||||
platform: state
|
||||
entity_id: !input motion_entity
|
||||
from: "off"
|
||||
to: "on"
|
||||
|
||||
action:
|
||||
- alias: "Turn on the light"
|
||||
service: light.turn_on
|
||||
target: !input light_target
|
||||
- alias: "Wait until there is no motion from device"
|
||||
wait_for_trigger:
|
||||
platform: state
|
||||
entity_id: !input motion_entity
|
||||
from: "on"
|
||||
to: "off"
|
||||
- alias: "Wait the number of seconds that has been set"
|
||||
delay: !input no_motion_wait
|
||||
- alias: "Turn off the light"
|
||||
service: light.turn_off
|
||||
target: !input light_target
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
blueprint:
|
||||
name: Zone Notification
|
||||
description: Send a notification to a device when a person leaves a specific zone.
|
||||
domain: automation
|
||||
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/automation/blueprints/notify_leaving_zone.yaml
|
||||
author: Home Assistant
|
||||
input:
|
||||
person_entity:
|
||||
name: Person
|
||||
selector:
|
||||
entity:
|
||||
domain: person
|
||||
zone_entity:
|
||||
name: Zone
|
||||
selector:
|
||||
entity:
|
||||
domain: zone
|
||||
notify_device:
|
||||
name: Device to notify
|
||||
description: Device needs to run the official Home Assistant app to receive notifications.
|
||||
selector:
|
||||
device:
|
||||
integration: mobile_app
|
||||
|
||||
trigger:
|
||||
platform: state
|
||||
entity_id: !input person_entity
|
||||
|
||||
variables:
|
||||
zone_entity: !input zone_entity
|
||||
# This is the state of the person when it's in this zone.
|
||||
zone_state: "{{ states[zone_entity].name }}"
|
||||
person_entity: !input person_entity
|
||||
person_name: "{{ states[person_entity].name }}"
|
||||
|
||||
condition:
|
||||
condition: template
|
||||
# The first case handles leaving the Home zone which has a special state when zoning called 'home'.
|
||||
# The second case handles leaving all other zones.
|
||||
value_template: "{{ zone_entity == 'zone.home' and trigger.from_state.state == 'home' and trigger.to_state.state != 'home' or trigger.from_state.state == zone_state and trigger.to_state.state != zone_state }}"
|
||||
|
||||
action:
|
||||
- alias: "Notify that a person has left the zone"
|
||||
domain: mobile_app
|
||||
type: notify
|
||||
device_id: !input notify_device
|
||||
message: "{{ person_name }} has left {{ zone_state }}"
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
blueprint:
|
||||
name: Nimly lås - Enable/Disable användare
|
||||
description: Aktivera/inaktivera användare via MQTT till Nimly-lås i Zigbee2MQTT.
|
||||
domain: automation
|
||||
input:
|
||||
trigger_time:
|
||||
name: Tidpunkt
|
||||
description: När automationen ska köras
|
||||
selector:
|
||||
time: {}
|
||||
lock_device:
|
||||
name: Välj låsenhet
|
||||
description: Låset som ska styras
|
||||
selector:
|
||||
entity:
|
||||
domain: lock
|
||||
user_id:
|
||||
name: Användar-ID
|
||||
description: ID för användaren på låset
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 250
|
||||
pin_code:
|
||||
name: PIN-kod
|
||||
description: PIN-kod för användaren. Sätt till null för att ta bort den.
|
||||
default: null
|
||||
selector:
|
||||
text:
|
||||
enabled:
|
||||
name: Aktivera användare
|
||||
description: true för att aktivera, false för att inaktivera
|
||||
selector:
|
||||
boolean: {}
|
||||
|
||||
mode: single
|
||||
|
||||
triggers:
|
||||
- trigger: time
|
||||
at: !input trigger_time
|
||||
|
||||
variables:
|
||||
topic: "zigbee_home_2/h014s/set/pin_code"
|
||||
user_id: !input user_id
|
||||
pin_code: !input pin_code
|
||||
enabled: !input enabled
|
||||
|
||||
actions:
|
||||
- service: mqtt.publish
|
||||
data:
|
||||
topic: "{{ topic }}"
|
||||
payload: >
|
||||
{
|
||||
"user": {{ user_id }},
|
||||
"user_type": "unrestricted",
|
||||
"user_enabled": {{ enabled | lower }},
|
||||
"pin_code": {{ 'null' if pin_code == 'null' else '"' ~ pin_code ~ '"' }}
|
||||
}
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
blueprint:
|
||||
name: Announce
|
||||
description: A script that announces a message on a media player using text-to-speech.
|
||||
domain: script
|
||||
input:
|
||||
text_to_speech_engine:
|
||||
selector:
|
||||
entity:
|
||||
domain:
|
||||
- tts
|
||||
multiple: false
|
||||
name: Text-to-Speech engine
|
||||
media_player:
|
||||
selector:
|
||||
entity:
|
||||
domain:
|
||||
- media_player
|
||||
multiple: true
|
||||
name: Media Player
|
||||
source_url: https://community.home-assistant.io/t/announce-text-to-speech-on-media-player/699186
|
||||
mode: queued
|
||||
fields:
|
||||
message:
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
name: Message
|
||||
description: The message to broadcast
|
||||
required: true
|
||||
sequence:
|
||||
- service: tts.speak
|
||||
data:
|
||||
media_player_entity_id: !input media_player
|
||||
message: '{{ message }}'
|
||||
target:
|
||||
entity_id: !input text_to_speech_engine
|
||||
|
|
@ -1,85 +0,0 @@
|
|||
blueprint:
|
||||
name: Confirmable Notification
|
||||
description: >-
|
||||
A script that sends an actionable notification with a confirmation before
|
||||
running the specified action.
|
||||
domain: script
|
||||
source_url: https://github.com/home-assistant/core/blob/master/homeassistant/components/script/blueprints/confirmable_notification.yaml
|
||||
author: Home Assistant
|
||||
input:
|
||||
notify_device:
|
||||
name: Device to notify
|
||||
description: Device needs to run the official Home Assistant app to receive notifications.
|
||||
selector:
|
||||
device:
|
||||
integration: mobile_app
|
||||
title:
|
||||
name: "Title"
|
||||
description: "The title of the button shown in the notification."
|
||||
default: ""
|
||||
selector:
|
||||
text:
|
||||
message:
|
||||
name: "Message"
|
||||
description: "The message body"
|
||||
selector:
|
||||
text:
|
||||
confirm_text:
|
||||
name: "Confirmation Text"
|
||||
description: "Text to show on the confirmation button"
|
||||
default: "Confirm"
|
||||
selector:
|
||||
text:
|
||||
confirm_action:
|
||||
name: "Confirmation Action"
|
||||
description: "Action to run when notification is confirmed"
|
||||
default: []
|
||||
selector:
|
||||
action:
|
||||
dismiss_text:
|
||||
name: "Dismiss Text"
|
||||
description: "Text to show on the dismiss button"
|
||||
default: "Dismiss"
|
||||
selector:
|
||||
text:
|
||||
dismiss_action:
|
||||
name: "Dismiss Action"
|
||||
description: "Action to run when notification is dismissed"
|
||||
default: []
|
||||
selector:
|
||||
action:
|
||||
|
||||
mode: restart
|
||||
|
||||
sequence:
|
||||
- alias: "Set up variables"
|
||||
variables:
|
||||
action_confirm: "{{ 'CONFIRM_' ~ context.id }}"
|
||||
action_dismiss: "{{ 'DISMISS_' ~ context.id }}"
|
||||
- alias: "Send notification"
|
||||
domain: mobile_app
|
||||
type: notify
|
||||
device_id: !input notify_device
|
||||
title: !input title
|
||||
message: !input message
|
||||
data:
|
||||
actions:
|
||||
- action: "{{ action_confirm }}"
|
||||
title: !input confirm_text
|
||||
- action: "{{ action_dismiss }}"
|
||||
title: !input dismiss_text
|
||||
- alias: "Awaiting response"
|
||||
wait_for_trigger:
|
||||
- platform: event
|
||||
event_type: mobile_app_notification_action
|
||||
event_data:
|
||||
action: "{{ action_confirm }}"
|
||||
- platform: event
|
||||
event_type: mobile_app_notification_action
|
||||
event_data:
|
||||
action: "{{ action_dismiss }}"
|
||||
- choose:
|
||||
- conditions: "{{ wait.trigger.event.data.action == action_confirm }}"
|
||||
sequence: !input confirm_action
|
||||
- conditions: "{{ wait.trigger.event.data.action == action_dismiss }}"
|
||||
sequence: !input dismiss_action
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
blueprint:
|
||||
name: Invert a binary sensor
|
||||
description: Creates a binary_sensor which holds the inverted value of a reference binary_sensor
|
||||
domain: template
|
||||
source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/template/blueprints/inverted_binary_sensor.yaml
|
||||
input:
|
||||
reference_entity:
|
||||
name: Binary sensor to be inverted
|
||||
description: The binary_sensor which needs to have its value inverted
|
||||
selector:
|
||||
entity:
|
||||
domain: binary_sensor
|
||||
variables:
|
||||
reference_entity: !input reference_entity
|
||||
binary_sensor:
|
||||
state: >
|
||||
{% if states(reference_entity) == 'on' %}
|
||||
off
|
||||
{% elif states(reference_entity) == 'off' %}
|
||||
on
|
||||
{% else %}
|
||||
{{ states(reference_entity) }}
|
||||
{% endif %}
|
||||
# delay_on: not_used in this example
|
||||
# delay_off: not_used in this example
|
||||
# auto_off: not_used in this example
|
||||
availability: "{{ states(reference_entity) not in ('unknown', 'unavailable') }}"
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
|
||||
# Loads default set of integrations. Do not remove.
|
||||
default_config:
|
||||
|
||||
# Load frontend themes from the themes folder
|
||||
frontend:
|
||||
themes: !include_dir_merge_named themes
|
||||
extra_module_url:
|
||||
- /config/www/community/lovelace-card-mod/card-mod.js
|
||||
|
||||
|
||||
automation: !include automations.yaml
|
||||
script: !include scripts.yaml
|
||||
scene: !include scenes.yaml
|
||||
command_line: !include sensor.yaml
|
||||
|
||||
battery_notes:
|
||||
|
||||
|
||||
#lovelace:
|
||||
# mode: storage
|
||||
# resources:
|
||||
# - url: /local/week-planner-card_2.js
|
||||
# type: module
|
||||
# dashboards:
|
||||
# dash-general:
|
||||
# mode: yaml
|
||||
# filename: dashboards/default.yaml
|
||||
# title: Overview
|
||||
# icon: mdi:tools
|
||||
# show_in_sidebar: true
|
||||
# require_admin: false
|
||||
|
||||
tts:
|
||||
- platform: edge_tts
|
||||
service-name: edge-say
|
||||
language: sv-SE
|
||||
|
||||
#This is to synthesise TTS for the Google Home Mini
|
||||
input_text:
|
||||
tts_syntesiser:
|
||||
name: TTS-til-Google
|
||||
|
||||
proximity:
|
||||
home_jaffa:
|
||||
zone: home
|
||||
devices:
|
||||
- device_tracker.jaffa_location
|
||||
tolerance: 5
|
||||
unit_of_measurement: km
|
||||
|
||||
homeassistant:
|
||||
internal_url: http://10.0.0.203:8123
|
||||
external_url: https://ha.milvert.com
|
||||
auth_providers:
|
||||
- type: homeassistant
|
||||
|
||||
packages: !include_dir_named packages
|
||||
#evcc: !include packages/evcc.yaml
|
||||
allowlist_external_dirs:
|
||||
- "/config/files"
|
||||
customize:
|
||||
# Add an entry for each entity that you want to overwrite.
|
||||
thermostat.family_room:
|
||||
entity_picture: https://example.com/images/nest.jpg
|
||||
friendly_name: Nest
|
||||
|
||||
|
||||
sonos:
|
||||
media_player:
|
||||
advertise_addr: 10.0.0.203
|
||||
hosts:
|
||||
- 10.0.3.33
|
||||
- 10.0.3.32
|
||||
|
||||
|
||||
logger:
|
||||
default: error
|
||||
|
||||
#logs:
|
||||
|
||||
#homeassistant.components.command_line: debug
|
||||
#adax: debug
|
||||
#custom_components.adax: debug
|
||||
#homeassistant.components.adax: debug
|
||||
# rflink: error
|
||||
# homeassistant.components.rflink: debug
|
||||
|
||||
|
||||
http:
|
||||
use_x_forwarded_for: true
|
||||
ip_ban_enabled: true
|
||||
login_attempts_threshold: 5
|
||||
trusted_proxies:
|
||||
- 10.0.0.223
|
||||
- 172.19.0.0/24
|
||||
|
||||
template:
|
||||
- sensor:
|
||||
- name: "Vardagsrum Source"
|
||||
state: >
|
||||
{% set source = state_attr('media_player.vardagsrum', 'source') | trim %}
|
||||
{% if source == "TV" %}
|
||||
TV
|
||||
{% else %}
|
||||
{{ source if source else "none" }}
|
||||
{% endif %}
|
||||
attributes:
|
||||
original_source: "{{ state_attr('media_player.vardagsrum', 'source') }}"
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,229 +0,0 @@
|
|||
"""HACS gives you a powerful UI to handle downloads of all your custom needs.
|
||||
|
||||
For more details about this integration, please refer to the documentation at
|
||||
https://hacs.xyz/
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException, GitHub, GitHubAPI
|
||||
from aiogithubapi.const import ACCEPT_HEADERS
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant.components.frontend import async_remove_panel
|
||||
from homeassistant.components.lovelace.system_health import system_health_info
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import Platform, __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity_registry import async_get as async_get_entity_registry
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.loader import async_get_integration
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN, HACS_SYSTEM_ID, MINIMUM_HA_VERSION, STARTUP
|
||||
from .data_client import HacsDataClient
|
||||
from .enums import HacsDisabledReason, HacsStage, LovelaceMode
|
||||
from .frontend import async_register_frontend
|
||||
from .utils.data import HacsData
|
||||
from .utils.queue_manager import QueueManager
|
||||
from .utils.version import version_left_higher_or_equal_then_right
|
||||
from .websocket import async_register_websocket_commands
|
||||
|
||||
PLATFORMS = [Platform.SWITCH, Platform.UPDATE]
|
||||
|
||||
|
||||
async def _async_initialize_integration(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
) -> bool:
|
||||
"""Initialize the integration"""
|
||||
hass.data[DOMAIN] = hacs = HacsBase()
|
||||
hacs.enable_hacs()
|
||||
|
||||
if config_entry.source == SOURCE_IMPORT:
|
||||
# Import is not supported
|
||||
hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))
|
||||
return False
|
||||
|
||||
hacs.configuration.update_from_dict(
|
||||
{
|
||||
"config_entry": config_entry,
|
||||
**config_entry.data,
|
||||
**config_entry.options,
|
||||
},
|
||||
)
|
||||
|
||||
integration = await async_get_integration(hass, DOMAIN)
|
||||
|
||||
hacs.set_stage(None)
|
||||
|
||||
hacs.log.info(STARTUP, integration.version)
|
||||
|
||||
clientsession = async_get_clientsession(hass)
|
||||
|
||||
hacs.integration = integration
|
||||
hacs.version = integration.version
|
||||
hacs.configuration.dev = integration.version == "0.0.0"
|
||||
hacs.hass = hass
|
||||
hacs.queue = QueueManager(hass=hass)
|
||||
hacs.data = HacsData(hacs=hacs)
|
||||
hacs.data_client = HacsDataClient(
|
||||
session=clientsession,
|
||||
client_name=f"HACS/{integration.version}",
|
||||
)
|
||||
hacs.system.running = True
|
||||
hacs.session = clientsession
|
||||
|
||||
hacs.core.lovelace_mode = LovelaceMode.YAML
|
||||
try:
|
||||
lovelace_info = await system_health_info(hacs.hass)
|
||||
hacs.core.lovelace_mode = LovelaceMode(lovelace_info.get("mode", "yaml"))
|
||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
# If this happens, the users YAML is not valid, we assume YAML mode
|
||||
pass
|
||||
hacs.core.config_path = hacs.hass.config.path()
|
||||
|
||||
if hacs.core.ha_version is None:
|
||||
hacs.core.ha_version = AwesomeVersion(HAVERSION)
|
||||
|
||||
## Legacy GitHub client
|
||||
hacs.github = GitHub(
|
||||
hacs.configuration.token,
|
||||
clientsession,
|
||||
headers={
|
||||
"User-Agent": f"HACS/{hacs.version}",
|
||||
"Accept": ACCEPT_HEADERS["preview"],
|
||||
},
|
||||
)
|
||||
|
||||
## New GitHub client
|
||||
hacs.githubapi = GitHubAPI(
|
||||
token=hacs.configuration.token,
|
||||
session=clientsession,
|
||||
**{"client_name": f"HACS/{hacs.version}"},
|
||||
)
|
||||
|
||||
async def async_startup():
|
||||
"""HACS startup tasks."""
|
||||
hacs.enable_hacs()
|
||||
|
||||
try:
|
||||
import custom_components.custom_updater
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
hacs.log.critical(
|
||||
"HACS cannot be used with custom_updater. "
|
||||
"To use HACS you need to remove custom_updater from `custom_components`",
|
||||
)
|
||||
|
||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||
return False
|
||||
|
||||
if not version_left_higher_or_equal_then_right(
|
||||
hacs.core.ha_version.string,
|
||||
MINIMUM_HA_VERSION,
|
||||
):
|
||||
hacs.log.critical(
|
||||
"You need HA version %s or newer to use this integration.",
|
||||
MINIMUM_HA_VERSION,
|
||||
)
|
||||
hacs.disable_hacs(HacsDisabledReason.CONSTRAINS)
|
||||
return False
|
||||
|
||||
if not await hacs.data.restore():
|
||||
hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||
return False
|
||||
|
||||
hacs.set_active_categories()
|
||||
|
||||
async_register_websocket_commands(hass)
|
||||
await async_register_frontend(hass, hacs)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
hacs.set_stage(HacsStage.SETUP)
|
||||
if hacs.system.disabled:
|
||||
return False
|
||||
|
||||
hacs.set_stage(HacsStage.WAITING)
|
||||
hacs.log.info("Setup complete, waiting for Home Assistant before startup tasks starts")
|
||||
|
||||
# Schedule startup tasks
|
||||
async_at_start(hass=hass, at_start_cb=hacs.startup_tasks)
|
||||
|
||||
return not hacs.system.disabled
|
||||
|
||||
async def async_try_startup(_=None):
|
||||
"""Startup wrapper for yaml config."""
|
||||
try:
|
||||
startup_result = await async_startup()
|
||||
except AIOGitHubAPIException:
|
||||
startup_result = False
|
||||
if not startup_result:
|
||||
if hacs.system.disabled_reason != HacsDisabledReason.INVALID_TOKEN:
|
||||
hacs.log.info("Could not setup HACS, trying again in 15 min")
|
||||
async_call_later(hass, 900, async_try_startup)
|
||||
return
|
||||
hacs.enable_hacs()
|
||||
|
||||
await async_try_startup()
|
||||
|
||||
# Remove old (v0-v1) sensor if it exists, can be removed in v3
|
||||
er = async_get_entity_registry(hass)
|
||||
if old_sensor := er.async_get_entity_id("sensor", DOMAIN, HACS_SYSTEM_ID):
|
||||
er.async_remove(old_sensor)
|
||||
|
||||
# Mischief managed!
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
config_entry.async_on_unload(config_entry.add_update_listener(async_reload_entry))
|
||||
setup_result = await _async_initialize_integration(hass=hass, config_entry=config_entry)
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
return setup_result and not hacs.system.disabled
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Handle removal of an entry."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
|
||||
if hacs.queue.has_pending_tasks:
|
||||
hacs.log.warning("Pending tasks, can not unload, try again later.")
|
||||
return False
|
||||
|
||||
# Clear out pending queue
|
||||
hacs.queue.clear()
|
||||
|
||||
for task in hacs.recurring_tasks:
|
||||
# Cancel all pending tasks
|
||||
task()
|
||||
|
||||
# Store data
|
||||
await hacs.data.async_write(force=True)
|
||||
|
||||
try:
|
||||
if hass.data.get("frontend_panels", {}).get("hacs"):
|
||||
hacs.log.info("Removing sidepanel")
|
||||
async_remove_panel(hass, "hacs")
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
hacs.set_stage(None)
|
||||
hacs.disable_hacs(HacsDisabledReason.REMOVED)
|
||||
|
||||
hass.data.pop(DOMAIN, None)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_reload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Reload the HACS config entry."""
|
||||
if not await async_unload_entry(hass, config_entry):
|
||||
return
|
||||
await async_setup_entry(hass, config_entry)
|
||||
|
|
@ -1,225 +0,0 @@
|
|||
"""Adds config flow for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiogithubapi import (
|
||||
GitHubDeviceAPI,
|
||||
GitHubException,
|
||||
GitHubLoginDeviceModel,
|
||||
GitHubLoginOauthModel,
|
||||
)
|
||||
from aiogithubapi.common.const import OAUTH_USER_LOGIN
|
||||
from awesomeversion import AwesomeVersion
|
||||
from homeassistant.config_entries import ConfigFlow, OptionsFlow
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import UnknownFlow
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.loader import async_get_integration
|
||||
import voluptuous as vol
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import CLIENT_ID, DOMAIN, LOCALE, MINIMUM_HA_VERSION
|
||||
from .utils.configuration_schema import (
|
||||
APPDAEMON,
|
||||
COUNTRY,
|
||||
SIDEPANEL_ICON,
|
||||
SIDEPANEL_TITLE,
|
||||
)
|
||||
from .utils.logger import LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
class HacsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow for HACS."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
hass: HomeAssistant
|
||||
activation_task: asyncio.Task | None = None
|
||||
device: GitHubDeviceAPI | None = None
|
||||
|
||||
_registration: GitHubLoginDeviceModel | None = None
|
||||
_activation: GitHubLoginOauthModel | None = None
|
||||
_reauth: bool = False
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize."""
|
||||
self._errors = {}
|
||||
self._user_input = {}
|
||||
|
||||
async def async_step_user(self, user_input):
|
||||
"""Handle a flow initialized by the user."""
|
||||
self._errors = {}
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
if self.hass.data.get(DOMAIN):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input:
|
||||
if [x for x in user_input if x.startswith("acc_") and not user_input[x]]:
|
||||
self._errors["base"] = "acc"
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
self._user_input = user_input
|
||||
|
||||
return await self.async_step_device(user_input)
|
||||
|
||||
# Initial form
|
||||
return await self._show_config_form(user_input)
|
||||
|
||||
async def async_step_device(self, _user_input):
|
||||
"""Handle device steps."""
|
||||
|
||||
async def _wait_for_activation() -> None:
|
||||
try:
|
||||
response = await self.device.activation(device_code=self._registration.device_code)
|
||||
self._activation = response.data
|
||||
finally:
|
||||
|
||||
async def _progress():
|
||||
with suppress(UnknownFlow):
|
||||
await self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
|
||||
|
||||
if not self.device:
|
||||
integration = await async_get_integration(self.hass, DOMAIN)
|
||||
self.device = GitHubDeviceAPI(
|
||||
client_id=CLIENT_ID,
|
||||
session=aiohttp_client.async_get_clientsession(self.hass),
|
||||
**{"client_name": f"HACS/{integration.version}"},
|
||||
)
|
||||
try:
|
||||
response = await self.device.register()
|
||||
self._registration = response.data
|
||||
except GitHubException as exception:
|
||||
LOGGER.exception(exception)
|
||||
return self.async_abort(reason="could_not_register")
|
||||
|
||||
if self.activation_task is None:
|
||||
self.activation_task = self.hass.async_create_task(_wait_for_activation())
|
||||
|
||||
if self.activation_task.done():
|
||||
if (exception := self.activation_task.exception()) is not None:
|
||||
LOGGER.exception(exception)
|
||||
return self.async_show_progress_done(next_step_id="could_not_register")
|
||||
return self.async_show_progress_done(next_step_id="device_done")
|
||||
|
||||
show_progress_kwargs = {
|
||||
"step_id": "device",
|
||||
"progress_action": "wait_for_device",
|
||||
"description_placeholders": {
|
||||
"url": OAUTH_USER_LOGIN,
|
||||
"code": self._registration.user_code,
|
||||
},
|
||||
"progress_task": self.activation_task,
|
||||
}
|
||||
return self.async_show_progress(**show_progress_kwargs)
|
||||
|
||||
async def _show_config_form(self, user_input):
|
||||
"""Show the configuration form to edit location data."""
|
||||
|
||||
if not user_input:
|
||||
user_input = {}
|
||||
|
||||
if AwesomeVersion(HAVERSION) < MINIMUM_HA_VERSION:
|
||||
return self.async_abort(
|
||||
reason="min_ha_version",
|
||||
description_placeholders={"version": MINIMUM_HA_VERSION},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required("acc_logs", default=user_input.get("acc_logs", False)): bool,
|
||||
vol.Required("acc_addons", default=user_input.get("acc_addons", False)): bool,
|
||||
vol.Required(
|
||||
"acc_untested", default=user_input.get("acc_untested", False)
|
||||
): bool,
|
||||
vol.Required("acc_disable", default=user_input.get("acc_disable", False)): bool,
|
||||
}
|
||||
),
|
||||
errors=self._errors,
|
||||
)
|
||||
|
||||
async def async_step_device_done(self, user_input: dict[str, bool] | None = None):
|
||||
"""Handle device steps"""
|
||||
if self._reauth:
|
||||
existing_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
self.hass.config_entries.async_update_entry(
|
||||
existing_entry, data={**existing_entry.data, "token": self._activation.access_token}
|
||||
)
|
||||
await self.hass.config_entries.async_reload(existing_entry.entry_id)
|
||||
return self.async_abort(reason="reauth_successful")
|
||||
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={
|
||||
"token": self._activation.access_token,
|
||||
},
|
||||
options={
|
||||
"experimental": True,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_could_not_register(self, _user_input=None):
|
||||
"""Handle issues that need transition await from progress step."""
|
||||
return self.async_abort(reason="could_not_register")
|
||||
|
||||
async def async_step_reauth(self, _user_input=None):
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(self, user_input=None):
|
||||
"""Dialog that informs the user that reauth is required."""
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({}),
|
||||
)
|
||||
self._reauth = True
|
||||
return await self.async_step_device(None)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
return HacsOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class HacsOptionsFlowHandler(OptionsFlow):
|
||||
"""HACS config flow options handler."""
|
||||
|
||||
def __init__(self, config_entry):
|
||||
"""Initialize HACS options flow."""
|
||||
if AwesomeVersion(HAVERSION) < "2024.11.99":
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, _user_input=None):
|
||||
"""Manage the options."""
|
||||
return await self.async_step_user()
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initialized by the user."""
|
||||
hacs: HacsBase = self.hass.data.get(DOMAIN)
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data={**user_input, "experimental": True})
|
||||
|
||||
if hacs is None or hacs.configuration is None:
|
||||
return self.async_abort(reason="not_setup")
|
||||
|
||||
if hacs.queue.has_pending_tasks:
|
||||
return self.async_abort(reason="pending_tasks")
|
||||
|
||||
schema = {
|
||||
vol.Optional(SIDEPANEL_TITLE, default=hacs.configuration.sidepanel_title): str,
|
||||
vol.Optional(SIDEPANEL_ICON, default=hacs.configuration.sidepanel_icon): str,
|
||||
vol.Optional(COUNTRY, default=hacs.configuration.country): vol.In(LOCALE),
|
||||
vol.Optional(APPDAEMON, default=hacs.configuration.appdaemon): bool,
|
||||
}
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))
|
||||
|
|
@ -1,294 +0,0 @@
|
|||
"""Constants for HACS"""
|
||||
|
||||
from typing import TypeVar
|
||||
|
||||
from aiogithubapi.common.const import ACCEPT_HEADERS
|
||||
|
||||
NAME_SHORT = "HACS"
|
||||
DOMAIN = "hacs"
|
||||
CLIENT_ID = "395a8e669c5de9f7c6e8"
|
||||
MINIMUM_HA_VERSION = "2024.4.1"
|
||||
|
||||
URL_BASE = "/hacsfiles"
|
||||
|
||||
TV = TypeVar("TV")
|
||||
|
||||
PACKAGE_NAME = "custom_components.hacs"
|
||||
|
||||
DEFAULT_CONCURRENT_TASKS = 15
|
||||
DEFAULT_CONCURRENT_BACKOFF_TIME = 1
|
||||
|
||||
HACS_REPOSITORY_ID = "172733314"
|
||||
|
||||
HACS_ACTION_GITHUB_API_HEADERS = {
|
||||
"User-Agent": "HACS/action",
|
||||
"Accept": ACCEPT_HEADERS["preview"],
|
||||
}
|
||||
|
||||
VERSION_STORAGE = "6"
|
||||
STORENAME = "hacs"
|
||||
|
||||
HACS_SYSTEM_ID = "0717a0cd-745c-48fd-9b16-c8534c9704f9-bc944b0f-fd42-4a58-a072-ade38d1444cd"
|
||||
|
||||
STARTUP = """
|
||||
-------------------------------------------------------------------
|
||||
HACS (Home Assistant Community Store)
|
||||
|
||||
Version: %s
|
||||
This is a custom integration
|
||||
If you have any issues with this you need to open an issue here:
|
||||
https://github.com/hacs/integration/issues
|
||||
-------------------------------------------------------------------
|
||||
"""
|
||||
|
||||
LOCALE = [
|
||||
"ALL",
|
||||
"AF",
|
||||
"AL",
|
||||
"DZ",
|
||||
"AS",
|
||||
"AD",
|
||||
"AO",
|
||||
"AI",
|
||||
"AQ",
|
||||
"AG",
|
||||
"AR",
|
||||
"AM",
|
||||
"AW",
|
||||
"AU",
|
||||
"AT",
|
||||
"AZ",
|
||||
"BS",
|
||||
"BH",
|
||||
"BD",
|
||||
"BB",
|
||||
"BY",
|
||||
"BE",
|
||||
"BZ",
|
||||
"BJ",
|
||||
"BM",
|
||||
"BT",
|
||||
"BO",
|
||||
"BQ",
|
||||
"BA",
|
||||
"BW",
|
||||
"BV",
|
||||
"BR",
|
||||
"IO",
|
||||
"BN",
|
||||
"BG",
|
||||
"BF",
|
||||
"BI",
|
||||
"KH",
|
||||
"CM",
|
||||
"CA",
|
||||
"CV",
|
||||
"KY",
|
||||
"CF",
|
||||
"TD",
|
||||
"CL",
|
||||
"CN",
|
||||
"CX",
|
||||
"CC",
|
||||
"CO",
|
||||
"KM",
|
||||
"CG",
|
||||
"CD",
|
||||
"CK",
|
||||
"CR",
|
||||
"HR",
|
||||
"CU",
|
||||
"CW",
|
||||
"CY",
|
||||
"CZ",
|
||||
"CI",
|
||||
"DK",
|
||||
"DJ",
|
||||
"DM",
|
||||
"DO",
|
||||
"EC",
|
||||
"EG",
|
||||
"SV",
|
||||
"GQ",
|
||||
"ER",
|
||||
"EE",
|
||||
"ET",
|
||||
"FK",
|
||||
"FO",
|
||||
"FJ",
|
||||
"FI",
|
||||
"FR",
|
||||
"GF",
|
||||
"PF",
|
||||
"TF",
|
||||
"GA",
|
||||
"GM",
|
||||
"GE",
|
||||
"DE",
|
||||
"GH",
|
||||
"GI",
|
||||
"GR",
|
||||
"GL",
|
||||
"GD",
|
||||
"GP",
|
||||
"GU",
|
||||
"GT",
|
||||
"GG",
|
||||
"GN",
|
||||
"GW",
|
||||
"GY",
|
||||
"HT",
|
||||
"HM",
|
||||
"VA",
|
||||
"HN",
|
||||
"HK",
|
||||
"HU",
|
||||
"IS",
|
||||
"IN",
|
||||
"ID",
|
||||
"IR",
|
||||
"IQ",
|
||||
"IE",
|
||||
"IM",
|
||||
"IL",
|
||||
"IT",
|
||||
"JM",
|
||||
"JP",
|
||||
"JE",
|
||||
"JO",
|
||||
"KZ",
|
||||
"KE",
|
||||
"KI",
|
||||
"KP",
|
||||
"KR",
|
||||
"KW",
|
||||
"KG",
|
||||
"LA",
|
||||
"LV",
|
||||
"LB",
|
||||
"LS",
|
||||
"LR",
|
||||
"LY",
|
||||
"LI",
|
||||
"LT",
|
||||
"LU",
|
||||
"MO",
|
||||
"MK",
|
||||
"MG",
|
||||
"MW",
|
||||
"MY",
|
||||
"MV",
|
||||
"ML",
|
||||
"MT",
|
||||
"MH",
|
||||
"MQ",
|
||||
"MR",
|
||||
"MU",
|
||||
"YT",
|
||||
"MX",
|
||||
"FM",
|
||||
"MD",
|
||||
"MC",
|
||||
"MN",
|
||||
"ME",
|
||||
"MS",
|
||||
"MA",
|
||||
"MZ",
|
||||
"MM",
|
||||
"NA",
|
||||
"NR",
|
||||
"NP",
|
||||
"NL",
|
||||
"NC",
|
||||
"NZ",
|
||||
"NI",
|
||||
"NE",
|
||||
"NG",
|
||||
"NU",
|
||||
"NF",
|
||||
"MP",
|
||||
"NO",
|
||||
"OM",
|
||||
"PK",
|
||||
"PW",
|
||||
"PS",
|
||||
"PA",
|
||||
"PG",
|
||||
"PY",
|
||||
"PE",
|
||||
"PH",
|
||||
"PN",
|
||||
"PL",
|
||||
"PT",
|
||||
"PR",
|
||||
"QA",
|
||||
"RO",
|
||||
"RU",
|
||||
"RW",
|
||||
"RE",
|
||||
"BL",
|
||||
"SH",
|
||||
"KN",
|
||||
"LC",
|
||||
"MF",
|
||||
"PM",
|
||||
"VC",
|
||||
"WS",
|
||||
"SM",
|
||||
"ST",
|
||||
"SA",
|
||||
"SN",
|
||||
"RS",
|
||||
"SC",
|
||||
"SL",
|
||||
"SG",
|
||||
"SX",
|
||||
"SK",
|
||||
"SI",
|
||||
"SB",
|
||||
"SO",
|
||||
"ZA",
|
||||
"GS",
|
||||
"SS",
|
||||
"ES",
|
||||
"LK",
|
||||
"SD",
|
||||
"SR",
|
||||
"SJ",
|
||||
"SZ",
|
||||
"SE",
|
||||
"CH",
|
||||
"SY",
|
||||
"TW",
|
||||
"TJ",
|
||||
"TZ",
|
||||
"TH",
|
||||
"TL",
|
||||
"TG",
|
||||
"TK",
|
||||
"TO",
|
||||
"TT",
|
||||
"TN",
|
||||
"TR",
|
||||
"TM",
|
||||
"TC",
|
||||
"TV",
|
||||
"UG",
|
||||
"UA",
|
||||
"AE",
|
||||
"GB",
|
||||
"US",
|
||||
"UM",
|
||||
"UY",
|
||||
"UZ",
|
||||
"VU",
|
||||
"VE",
|
||||
"VN",
|
||||
"VG",
|
||||
"VI",
|
||||
"WF",
|
||||
"EH",
|
||||
"YE",
|
||||
"ZM",
|
||||
"ZW",
|
||||
]
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
"""HACS Data client."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientSession, ClientTimeout
|
||||
import voluptuous as vol
|
||||
|
||||
from .exceptions import HacsException, HacsNotModifiedException
|
||||
from .utils.logger import LOGGER
|
||||
from .utils.validate import (
|
||||
VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA,
|
||||
VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA,
|
||||
VALIDATE_FETCHED_V2_REPO_DATA,
|
||||
)
|
||||
|
||||
CRITICAL_REMOVED_VALIDATORS = {
|
||||
"critical": VALIDATE_FETCHED_V2_CRITICAL_REPO_SCHEMA,
|
||||
"removed": VALIDATE_FETCHED_V2_REMOVED_REPO_SCHEMA,
|
||||
}
|
||||
|
||||
|
||||
class HacsDataClient:
|
||||
"""HACS Data client."""
|
||||
|
||||
def __init__(self, session: ClientSession, client_name: str) -> None:
|
||||
"""Initialize."""
|
||||
self._client_name = client_name
|
||||
self._etags = {}
|
||||
self._session = session
|
||||
|
||||
async def _do_request(
|
||||
self,
|
||||
filename: str,
|
||||
section: str | None = None,
|
||||
) -> dict[str, dict[str, Any]] | list[str]:
|
||||
"""Do request."""
|
||||
endpoint = "/".join([v for v in [section, filename] if v is not None])
|
||||
try:
|
||||
response = await self._session.get(
|
||||
f"https://data-v2.hacs.xyz/{endpoint}",
|
||||
timeout=ClientTimeout(total=60),
|
||||
headers={
|
||||
"User-Agent": self._client_name,
|
||||
"If-None-Match": self._etags.get(endpoint, ""),
|
||||
},
|
||||
)
|
||||
if response.status == 304:
|
||||
raise HacsNotModifiedException() from None
|
||||
response.raise_for_status()
|
||||
except HacsNotModifiedException:
|
||||
raise
|
||||
except TimeoutError:
|
||||
raise HacsException("Timeout of 60s reached") from None
|
||||
except Exception as exception:
|
||||
raise HacsException(f"Error fetching data from HACS: {exception}") from exception
|
||||
|
||||
self._etags[endpoint] = response.headers.get("etag")
|
||||
|
||||
return await response.json()
|
||||
|
||||
async def get_data(self, section: str | None, *, validate: bool) -> dict[str, dict[str, Any]]:
|
||||
"""Get data."""
|
||||
data = await self._do_request(filename="data.json", section=section)
|
||||
if not validate:
|
||||
return data
|
||||
|
||||
if section in VALIDATE_FETCHED_V2_REPO_DATA:
|
||||
validated = {}
|
||||
for key, repo_data in data.items():
|
||||
try:
|
||||
validated[key] = VALIDATE_FETCHED_V2_REPO_DATA[section](repo_data)
|
||||
except vol.Invalid as exception:
|
||||
LOGGER.info(
|
||||
"Got invalid data for %s (%s)", repo_data.get("full_name", key), exception
|
||||
)
|
||||
continue
|
||||
|
||||
return validated
|
||||
|
||||
if not (validator := CRITICAL_REMOVED_VALIDATORS.get(section)):
|
||||
raise ValueError(f"Do not know how to validate {section}")
|
||||
|
||||
validated = []
|
||||
for repo_data in data:
|
||||
try:
|
||||
validated.append(validator(repo_data))
|
||||
except vol.Invalid as exception:
|
||||
LOGGER.info("Got invalid data for %s (%s)", section, exception)
|
||||
continue
|
||||
|
||||
return validated
|
||||
|
||||
async def get_repositories(self, section: str) -> list[str]:
|
||||
"""Get repositories."""
|
||||
return await self._do_request(filename="repositories.json", section=section)
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
"""Diagnostics support for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiogithubapi import GitHubException
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
|
||||
data = {
|
||||
"entry": entry.as_dict(),
|
||||
"hacs": {
|
||||
"stage": hacs.stage,
|
||||
"version": hacs.version,
|
||||
"disabled_reason": hacs.system.disabled_reason,
|
||||
"new": hacs.status.new,
|
||||
"startup": hacs.status.startup,
|
||||
"categories": hacs.common.categories,
|
||||
"renamed_repositories": hacs.common.renamed_repositories,
|
||||
"archived_repositories": hacs.common.archived_repositories,
|
||||
"ignored_repositories": hacs.common.ignored_repositories,
|
||||
"lovelace_mode": hacs.core.lovelace_mode,
|
||||
"configuration": {},
|
||||
},
|
||||
"custom_repositories": [
|
||||
repo.data.full_name
|
||||
for repo in hacs.repositories.list_all
|
||||
if not hacs.repositories.is_default(str(repo.data.id))
|
||||
],
|
||||
"repositories": [],
|
||||
}
|
||||
|
||||
for key in (
|
||||
"appdaemon",
|
||||
"country",
|
||||
"debug",
|
||||
"dev",
|
||||
"python_script",
|
||||
"release_limit",
|
||||
"theme",
|
||||
):
|
||||
data["hacs"]["configuration"][key] = getattr(hacs.configuration, key, None)
|
||||
|
||||
for repository in hacs.repositories.list_downloaded:
|
||||
data["repositories"].append(
|
||||
{
|
||||
"data": repository.data.to_json(),
|
||||
"integration_manifest": repository.integration_manifest,
|
||||
"repository_manifest": repository.repository_manifest.to_dict(),
|
||||
"ref": repository.ref,
|
||||
"paths": {
|
||||
"localpath": repository.localpath.replace(hacs.core.config_path, "/config"),
|
||||
"local": repository.content.path.local.replace(
|
||||
hacs.core.config_path, "/config"
|
||||
),
|
||||
"remote": repository.content.path.remote,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
rate_limit_response = await hacs.githubapi.rate_limit()
|
||||
data["rate_limit"] = rate_limit_response.data.as_dict
|
||||
except GitHubException as exception:
|
||||
data["rate_limit"] = str(exception)
|
||||
|
||||
return async_redact_data(data, ("token",))
|
||||
|
|
@ -1,143 +0,0 @@
|
|||
"""HACS Base entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.update_coordinator import BaseCoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, HACS_SYSTEM_ID, NAME_SHORT
|
||||
from .coordinator import HacsUpdateCoordinator
|
||||
from .enums import HacsDispatchEvent, HacsGitHubRepo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import HacsBase
|
||||
from .repositories.base import HacsRepository
|
||||
|
||||
|
||||
def system_info(hacs: HacsBase) -> dict:
|
||||
"""Return system info."""
|
||||
return {
|
||||
"identifiers": {(DOMAIN, HACS_SYSTEM_ID)},
|
||||
"name": NAME_SHORT,
|
||||
"manufacturer": "hacs.xyz",
|
||||
"model": "",
|
||||
"sw_version": str(hacs.version),
|
||||
"configuration_url": "homeassistant://hacs",
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
|
||||
class HacsBaseEntity(Entity):
|
||||
"""Base HACS entity."""
|
||||
|
||||
repository: HacsRepository | None = None
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, hacs: HacsBase) -> None:
|
||||
"""Initialize."""
|
||||
self.hacs = hacs
|
||||
|
||||
|
||||
class HacsDispatcherEntity(HacsBaseEntity):
|
||||
"""Base HACS entity listening to dispatcher signals."""
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register for status events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
self._update_and_write_state,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update(self) -> None:
|
||||
"""Update the sensor."""
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Manual updates of the sensor."""
|
||||
self._update()
|
||||
|
||||
@callback
|
||||
def _update_and_write_state(self, _: Any) -> None:
|
||||
"""Update the entity and write state."""
|
||||
self._update()
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class HacsSystemEntity(HacsDispatcherEntity):
|
||||
"""Base system entity."""
|
||||
|
||||
_attr_icon = "hacs:hacs"
|
||||
_attr_unique_id = HACS_SYSTEM_ID
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict[str, any]:
|
||||
"""Return device information about HACS."""
|
||||
return system_info(self.hacs)
|
||||
|
||||
|
||||
class HacsRepositoryEntity(BaseCoordinatorEntity[HacsUpdateCoordinator], HacsBaseEntity):
|
||||
"""Base repository entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hacs: HacsBase,
|
||||
repository: HacsRepository,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
BaseCoordinatorEntity.__init__(self, hacs.coordinators[repository.data.category])
|
||||
HacsBaseEntity.__init__(self, hacs=hacs)
|
||||
self.repository = repository
|
||||
self._attr_unique_id = str(repository.data.id)
|
||||
self._repo_last_fetched = repository.data.last_fetched
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.hacs.repositories.is_downloaded(repository_id=str(self.repository.data.id))
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict[str, any]:
|
||||
"""Return device information about HACS."""
|
||||
if self.repository.data.full_name == HacsGitHubRepo.INTEGRATION:
|
||||
return system_info(self.hacs)
|
||||
|
||||
def _manufacturer():
|
||||
if authors := self.repository.data.authors:
|
||||
return ", ".join(author.replace("@", "") for author in authors)
|
||||
return self.repository.data.full_name.split("/")[0]
|
||||
|
||||
return {
|
||||
"identifiers": {(DOMAIN, str(self.repository.data.id))},
|
||||
"name": self.repository.display_name,
|
||||
"model": self.repository.data.category,
|
||||
"manufacturer": _manufacturer(),
|
||||
"configuration_url": f"homeassistant://hacs/repository/{self.repository.data.id}",
|
||||
"entry_type": DeviceEntryType.SERVICE,
|
||||
}
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
if (
|
||||
self._repo_last_fetched is not None
|
||||
and self.repository.data.last_fetched is not None
|
||||
and self._repo_last_fetched >= self.repository.data.last_fetched
|
||||
):
|
||||
return
|
||||
|
||||
self._repo_last_fetched = self.repository.data.last_fetched
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
"""
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
"""Helper constants."""
|
||||
|
||||
# pylint: disable=missing-class-docstring
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class HacsGitHubRepo(StrEnum):
|
||||
"""HacsGitHubRepo."""
|
||||
|
||||
DEFAULT = "hacs/default"
|
||||
INTEGRATION = "hacs/integration"
|
||||
|
||||
|
||||
class HacsCategory(StrEnum):
|
||||
APPDAEMON = "appdaemon"
|
||||
INTEGRATION = "integration"
|
||||
LOVELACE = "lovelace"
|
||||
PLUGIN = "plugin" # Kept for legacy purposes
|
||||
PYTHON_SCRIPT = "python_script"
|
||||
TEMPLATE = "template"
|
||||
THEME = "theme"
|
||||
REMOVED = "removed"
|
||||
|
||||
def __str__(self):
|
||||
return str(self.value)
|
||||
|
||||
|
||||
class HacsDispatchEvent(StrEnum):
|
||||
"""HacsDispatchEvent."""
|
||||
|
||||
CONFIG = "hacs_dispatch_config"
|
||||
ERROR = "hacs_dispatch_error"
|
||||
RELOAD = "hacs_dispatch_reload"
|
||||
REPOSITORY = "hacs_dispatch_repository"
|
||||
REPOSITORY_DOWNLOAD_PROGRESS = "hacs_dispatch_repository_download_progress"
|
||||
STAGE = "hacs_dispatch_stage"
|
||||
STARTUP = "hacs_dispatch_startup"
|
||||
STATUS = "hacs_dispatch_status"
|
||||
|
||||
|
||||
class RepositoryFile(StrEnum):
|
||||
"""Repository file names."""
|
||||
|
||||
HACS_JSON = "hacs.json"
|
||||
MAINIFEST_JSON = "manifest.json"
|
||||
|
||||
|
||||
class LovelaceMode(StrEnum):
|
||||
"""Lovelace Modes."""
|
||||
|
||||
STORAGE = "storage"
|
||||
AUTO = "auto"
|
||||
AUTO_GEN = "auto-gen"
|
||||
YAML = "yaml"
|
||||
|
||||
|
||||
class HacsStage(StrEnum):
|
||||
SETUP = "setup"
|
||||
STARTUP = "startup"
|
||||
WAITING = "waiting"
|
||||
RUNNING = "running"
|
||||
BACKGROUND = "background"
|
||||
|
||||
|
||||
class HacsDisabledReason(StrEnum):
|
||||
RATE_LIMIT = "rate_limit"
|
||||
REMOVED = "removed"
|
||||
INVALID_TOKEN = "invalid_token"
|
||||
CONSTRAINS = "constrains"
|
||||
LOAD_HACS = "load_hacs"
|
||||
RESTORE = "restore"
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
"""Custom Exceptions for HACS."""
|
||||
|
||||
|
||||
class HacsException(Exception):
|
||||
"""Super basic."""
|
||||
|
||||
|
||||
class HacsRepositoryArchivedException(HacsException):
|
||||
"""For repositories that are archived."""
|
||||
|
||||
|
||||
class HacsNotModifiedException(HacsException):
|
||||
"""For responses that are not modified."""
|
||||
|
||||
|
||||
class HacsExpectedException(HacsException):
|
||||
"""For stuff that are expected."""
|
||||
|
||||
|
||||
class HacsRepositoryExistException(HacsException):
|
||||
"""For repositories that are already exist."""
|
||||
|
||||
|
||||
class HacsExecutionStillInProgress(HacsException):
|
||||
"""Exception to raise if execution is still in progress."""
|
||||
|
||||
|
||||
class AddonRepositoryException(HacsException):
|
||||
"""Exception to raise when user tries to add add-on repository."""
|
||||
|
||||
exception_message = (
|
||||
"The repository does not seem to be a integration, "
|
||||
"but an add-on repository. HACS does not manage add-ons."
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self.exception_message)
|
||||
|
||||
|
||||
class HomeAssistantCoreRepositoryException(HacsException):
|
||||
"""Exception to raise when user tries to add the home-assistant/core repository."""
|
||||
|
||||
exception_message = (
|
||||
"You can not add homeassistant/core, to use core integrations "
|
||||
"check the Home Assistant documentation for how to add them."
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(self.exception_message)
|
||||
|
|
@ -1,67 +0,0 @@
|
|||
"""Starting setup task: Frontend."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components.frontend import (
|
||||
add_extra_js_url,
|
||||
async_register_built_in_panel,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, URL_BASE
|
||||
from .hacs_frontend import VERSION as FE_VERSION, locate_dir
|
||||
from .utils.workarounds import async_register_static_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .base import HacsBase
|
||||
|
||||
|
||||
async def async_register_frontend(hass: HomeAssistant, hacs: HacsBase) -> None:
|
||||
"""Register the frontend."""
|
||||
|
||||
# Register frontend
|
||||
if hacs.configuration.dev and (frontend_path := os.getenv("HACS_FRONTEND_DIR")):
|
||||
hacs.log.warning(
|
||||
"<HacsFrontend> Frontend development mode enabled. Do not run in production!"
|
||||
)
|
||||
await async_register_static_path(
|
||||
hass, f"{URL_BASE}/frontend", f"{frontend_path}/hacs_frontend", cache_headers=False
|
||||
)
|
||||
hacs.frontend_version = "dev"
|
||||
else:
|
||||
await async_register_static_path(
|
||||
hass, f"{URL_BASE}/frontend", locate_dir(), cache_headers=False
|
||||
)
|
||||
hacs.frontend_version = FE_VERSION
|
||||
|
||||
# Custom iconset
|
||||
await async_register_static_path(
|
||||
hass, f"{URL_BASE}/iconset.js", str(hacs.integration_dir / "iconset.js")
|
||||
)
|
||||
add_extra_js_url(hass, f"{URL_BASE}/iconset.js")
|
||||
|
||||
# Add to sidepanel if needed
|
||||
if DOMAIN not in hass.data.get("frontend_panels", {}):
|
||||
async_register_built_in_panel(
|
||||
hass,
|
||||
component_name="custom",
|
||||
sidebar_title=hacs.configuration.sidepanel_title,
|
||||
sidebar_icon=hacs.configuration.sidepanel_icon,
|
||||
frontend_url_path=DOMAIN,
|
||||
config={
|
||||
"_panel_custom": {
|
||||
"name": "hacs-frontend",
|
||||
"embed_iframe": True,
|
||||
"trust_external": False,
|
||||
"js_url": f"/hacsfiles/frontend/entrypoint.js?hacstag={hacs.frontend_version}",
|
||||
}
|
||||
},
|
||||
require_admin=True,
|
||||
)
|
||||
|
||||
# Setup plugin endpoint if needed
|
||||
await hacs.async_setup_frontend_endpoint_plugin()
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
"""HACS Frontend"""
|
||||
from .version import VERSION
|
||||
|
||||
def locate_dir():
|
||||
return __path__[0]
|
||||
|
|
@ -1 +0,0 @@
|
|||
!function(){function n(n){var e=document.createElement("script");e.src=n,document.body.appendChild(e)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/hacsfiles/frontend/frontend_es5/entrypoint.c180d0b256f9b6d0.js");else try{new Function("import('/hacsfiles/frontend/frontend_latest/entrypoint.bb9d28f38e9fba76.js')")()}catch(e){n("/hacsfiles/frontend/frontend_es5/entrypoint.c180d0b256f9b6d0.js")}}()
|
||||
|
|
@ -1 +0,0 @@
|
|||
VERSION="20250128065759"
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
const hacsIcons = {
|
||||
hacs: {
|
||||
path: "m 20.064849,22.306912 c -0.0319,0.369835 -0.280561,0.707789 -0.656773,0.918212 -0.280572,0.153036 -0.605773,0.229553 -0.950094,0.229553 -0.0765,0 -0.146661,-0.0064 -0.216801,-0.01275 -0.605774,-0.05739 -1.135016,-0.344329 -1.402827,-0.7588 l 0.784304,-0.516495 c 0.0893,0.146659 0.344331,0.312448 0.707793,0.34433 0.235931,0.02551 0.471852,-0.01913 0.637643,-0.108401 0.101998,-0.05101 0.172171,-0.127529 0.17854,-0.191295 0.0065,-0.08289 -0.0255,-0.369835 -0.733293,-0.439975 -1.013854,-0.09565 -1.645127,-0.688661 -1.568606,-1.460214 0.0319,-0.382589 0.280561,-0.714165 0.663153,-0.930965 0.331571,-0.172165 0.752423,-0.25506 1.166895,-0.210424 0.599382,0.05739 1.128635,0.344329 1.402816,0.7588 l -0.784304,0.510118 c -0.0893,-0.140282 -0.344331,-0.299694 -0.707782,-0.331576 -0.235932,-0.02551 -0.471863,0.01913 -0.637654,0.10202 -0.0956,0.05739 -0.165791,0.133906 -0.17216,0.191295 -0.0255,0.293317 0.465482,0.420847 0.726913,0.439976 v 0.0064 c 1.020234,0.09565 1.638757,0.66953 1.562237,1.460213 z m -7.466854,-0.988354 c 0,-1.192401 0.962855,-2.155249 2.15525,-2.155249 0.599393,0 1.179645,0.25506 1.594117,0.707789 l -0.695033,0.624895 c -0.235931,-0.25506 -0.561133,-0.401718 -0.899084,-0.401718 -0.675903,0 -1.217906,0.542 -1.217906,1.217906 0,0.66953 0.542003,1.217908 1.217906,1.217908 0.337951,0 0.663153,-0.140283 0.899084,-0.401718 l 0.695033,0.631271 c -0.414472,0.452729 -0.988355,0.707788 -1.594117,0.707788 -1.192395,0 -2.15525,-0.969224 -2.15525,-2.148872 z M 8.6573365,23.461054 10.353474,19.14418 h 0.624893 l 1.568618,4.316874 H 11.52037 L 11.265308,22.734136 H 9.964513 l -0.274192,0.726918 z m 1.6833885,-1.68339 h 0.580263 L 10.646796,21.012487 Z M 8.1089536,19.156932 v 4.297745 H 7.1461095 v -1.645131 h -1.606867 v 1.645131 H 4.5763876 v -4.297745 h 0.9628549 v 1.696143 h 1.606867 V 19.156932 Z M 20.115859,4.2997436 C 20.090359,4.159461 19.969198,4.0574375 19.822548,4.0574375 H 14.141102 10.506516 4.8250686 c -0.14665,0 -0.2678112,0.1020202 -0.2933108,0.2423061 L 3.690064,8.8461703 c -0.00651,0.01913 -0.00651,0.03826 -0.00651,0.057391 v 1.5239797 c 0,0.165789 0.133911,0.299694 0.2996911,0.299694 H 4.5762579 20.0711 20.664112 c 0.165781,0 0.299691,-0.133905 0.299691,-0.299694 V 8.8971848 c 0,-0.01913 0,-0.03826 -0.0065,-0.05739 z M 4.5763876,17.358767 c 0,0.184917 0.1466608,0.331577 0.3315819,0.331577 h 5.5985465 3.634586 0.924594 c 0.184911,0 0.331571,-0.14666 0.331571,-0.331577 v -4.744098 c 0,-0.184918 0.146661,-0.331577 0.331582,-0.331577 h 2.894913 c 0.184921,0 0.331582,0.146659 0.331582,0.331577 v 4.744098 c 0,0.184917 0.146661,0.331577 0.331571,0.331577 h 0.446363 c 0.18491,0 0.331571,-0.14666 0.331571,-0.331577 v -5.636804 c 0,-0.184918 -0.146661,-0.331577 -0.331571,-0.331577 H 4.9079695 c -0.1849211,0 -0.3315819,0.146659 -0.3315819,0.331577 z m 1.6578879,-4.852498 h 5.6495565 c 0.15303,0 0.280561,0.12753 0.280561,0.280564 v 3.513438 c 0,0.153036 -0.127531,0.280566 -0.280561,0.280566 H 6.2342755 c -0.1530412,0 -0.2805719,-0.12753 -0.2805719,-0.280566 v -3.513438 c 0,-0.159411 0.1275307,-0.280564 0.2805719,-0.280564 z M 19.790657,3.3879075 H 4.8569594 c -0.1530412,0 -0.2805718,-0.1275296 -0.2805718,-0.2805642 V 1.3665653 C 4.5763876,1.2135296 4.7039182,1.086 4.8569594,1.086 H 19.790657 c 0.153041,0 0.280572,0.1275296 0.280572,0.2805653 v 1.740778 c 0,0.1530346 -0.127531,0.2805642 -0.280572,0.2805642 z",
|
||||
keywords: ["hacs", "home assistant community store"],
|
||||
},
|
||||
};
|
||||
|
||||
window.customIcons = window.customIcons || {};
|
||||
window.customIconsets = window.customIconsets || {};
|
||||
|
||||
window.customIcons["hacs"] = {
|
||||
getIcon: async (iconName) => (
|
||||
{ path: hacsIcons[iconName]?.path }
|
||||
),
|
||||
getIconList: async () =>
|
||||
Object.entries(hacsIcons).map(([icon, content]) => ({
|
||||
name: icon,
|
||||
keywords: content.keywords,
|
||||
})
|
||||
)
|
||||
};
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"domain": "hacs",
|
||||
"name": "HACS",
|
||||
"after_dependencies": [
|
||||
"python_script"
|
||||
],
|
||||
"codeowners": [
|
||||
"@ludeeus"
|
||||
],
|
||||
"config_flow": true,
|
||||
"dependencies": [
|
||||
"http",
|
||||
"websocket_api",
|
||||
"frontend",
|
||||
"persistent_notification",
|
||||
"lovelace",
|
||||
"repairs"
|
||||
],
|
||||
"documentation": "https://hacs.xyz/docs/use/",
|
||||
"iot_class": "cloud_polling",
|
||||
"issue_tracker": "https://github.com/hacs/integration/issues",
|
||||
"requirements": [
|
||||
"aiogithubapi>=22.10.1"
|
||||
],
|
||||
"version": "2.0.5"
|
||||
}
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
"""Repairs platform for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant
|
||||
import voluptuous as vol
|
||||
|
||||
from custom_components.hacs.base import HacsBase
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class RestartRequiredFixFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow."""
|
||||
|
||||
def __init__(self, issue_id: str) -> None:
|
||||
self.issue_id = issue_id
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
|
||||
return await self.async_step_confirm_restart()
|
||||
|
||||
async def async_step_confirm_restart(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the confirm step of a fix flow."""
|
||||
if user_input is not None:
|
||||
await self.hass.services.async_call("homeassistant", "restart")
|
||||
return self.async_create_entry(title="", data={})
|
||||
|
||||
hacs: HacsBase = self.hass.data[DOMAIN]
|
||||
integration = hacs.repositories.get_by_id(self.issue_id.split("_")[2])
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="confirm_restart",
|
||||
data_schema=vol.Schema({}),
|
||||
description_placeholders={"name": integration.display_name},
|
||||
)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None = None,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> RepairsFlow | None:
|
||||
"""Create flow."""
|
||||
if issue_id.startswith("restart_required"):
|
||||
return RestartRequiredFixFlow(issue_id)
|
||||
return None
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
"""Initialize repositories."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ..enums import HacsCategory
|
||||
from .appdaemon import HacsAppdaemonRepository
|
||||
from .base import HacsRepository
|
||||
from .integration import HacsIntegrationRepository
|
||||
from .plugin import HacsPluginRepository
|
||||
from .python_script import HacsPythonScriptRepository
|
||||
from .template import HacsTemplateRepository
|
||||
from .theme import HacsThemeRepository
|
||||
|
||||
REPOSITORY_CLASSES: dict[HacsCategory, HacsRepository] = {
|
||||
HacsCategory.THEME: HacsThemeRepository,
|
||||
HacsCategory.INTEGRATION: HacsIntegrationRepository,
|
||||
HacsCategory.PYTHON_SCRIPT: HacsPythonScriptRepository,
|
||||
HacsCategory.APPDAEMON: HacsAppdaemonRepository,
|
||||
HacsCategory.PLUGIN: HacsPluginRepository,
|
||||
HacsCategory.TEMPLATE: HacsTemplateRepository,
|
||||
}
|
||||
|
|
@ -1,93 +0,0 @@
|
|||
"""Class for appdaemon apps in HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aiogithubapi import AIOGitHubAPIException
|
||||
|
||||
from ..enums import HacsCategory, HacsDispatchEvent
|
||||
from ..exceptions import HacsException
|
||||
from ..utils.decorator import concurrent
|
||||
from .base import HacsRepository
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
class HacsAppdaemonRepository(HacsRepository):
|
||||
"""Appdaemon apps in HACS."""
|
||||
|
||||
def __init__(self, hacs: HacsBase, full_name: str):
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.APPDAEMON
|
||||
self.content.path.local = self.localpath
|
||||
self.content.path.remote = "apps"
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/appdaemon/apps/{self.data.name}"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
try:
|
||||
addir = await self.repository_object.get_contents("apps", self.ref)
|
||||
except AIOGitHubAPIException:
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
) from None
|
||||
|
||||
if not isinstance(addir, list):
|
||||
self.validate.errors.append(f"{self.string} Repository structure not compliant")
|
||||
|
||||
self.content.path.remote = addir[0].path
|
||||
self.content.objects = await self.repository_object.get_contents(
|
||||
self.content.path.remote, self.ref
|
||||
)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self.string, error)
|
||||
return self.validate.success
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def update_repository(self, ignore_issues=False, force=False):
|
||||
"""Update."""
|
||||
if not await self.common_update(ignore_issues, force) and not force:
|
||||
return
|
||||
|
||||
# Get appdaemon objects.
|
||||
if self.repository_manifest:
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "apps":
|
||||
addir = await self.repository_object.get_contents(self.content.path.remote, self.ref)
|
||||
self.content.path.remote = addir[0].path
|
||||
self.content.objects = await self.repository_object.get_contents(
|
||||
self.content.path.remote, self.ref
|
||||
)
|
||||
|
||||
# Set local path
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
# Signal frontend to refresh
|
||||
if self.data.installed:
|
||||
self.hacs.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": self.data.full_name,
|
||||
"repository_id": self.data.id,
|
||||
},
|
||||
)
|
||||
|
|
@ -1,217 +0,0 @@
|
|||
"""Class for integrations in HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..enums import HacsCategory, HacsDispatchEvent, HacsGitHubRepo, RepositoryFile
|
||||
from ..exceptions import AddonRepositoryException, HacsException
|
||||
from ..utils.decode import decode_content
|
||||
from ..utils.decorator import concurrent
|
||||
from ..utils.filters import get_first_directory_in_directory
|
||||
from ..utils.json import json_loads
|
||||
from .base import HacsRepository
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
class HacsIntegrationRepository(HacsRepository):
|
||||
"""Integrations in HACS."""
|
||||
|
||||
def __init__(self, hacs: HacsBase, full_name: str):
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.INTEGRATION
|
||||
self.content.path.remote = "custom_components"
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/custom_components/{self.data.domain}"
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
self.pending_restart = True
|
||||
if self.data.config_flow:
|
||||
if self.data.full_name != HacsGitHubRepo.INTEGRATION:
|
||||
await self.reload_custom_components()
|
||||
if self.data.first_install:
|
||||
self.pending_restart = False
|
||||
|
||||
if self.pending_restart:
|
||||
self.logger.debug("%s Creating restart_required issue", self.string)
|
||||
async_create_issue(
|
||||
hass=self.hacs.hass,
|
||||
domain=DOMAIN,
|
||||
issue_id=f"restart_required_{self.data.id}_{self.ref}",
|
||||
is_fixable=True,
|
||||
issue_domain=self.data.domain or DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="restart_required",
|
||||
translation_placeholders={
|
||||
"name": self.display_name,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_post_uninstall(self) -> None:
|
||||
"""Run post uninstall steps."""
|
||||
if self.data.config_flow:
|
||||
await self.reload_custom_components()
|
||||
else:
|
||||
self.pending_restart = True
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "custom_components":
|
||||
name = get_first_directory_in_directory(self.tree, "custom_components")
|
||||
if name is None:
|
||||
if (
|
||||
"repository.json" in self.treefiles
|
||||
or "repository.yaml" in self.treefiles
|
||||
or "repository.yml" in self.treefiles
|
||||
):
|
||||
raise AddonRepositoryException()
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {
|
||||
self.ref.replace('tags/', '')} is not compliant"
|
||||
)
|
||||
self.content.path.remote = f"custom_components/{name}"
|
||||
|
||||
# Get the content of manifest.json
|
||||
if manifest := await self.async_get_integration_manifest():
|
||||
try:
|
||||
self.integration_manifest = manifest
|
||||
self.data.authors = manifest.get("codeowners", [])
|
||||
self.data.domain = manifest["domain"]
|
||||
self.data.manifest_name = manifest.get("name")
|
||||
self.data.config_flow = manifest.get("config_flow", False)
|
||||
|
||||
except KeyError as exception:
|
||||
self.validate.errors.append(
|
||||
f"Missing expected key '{exception}' in {
|
||||
RepositoryFile.MAINIFEST_JSON}"
|
||||
)
|
||||
self.hacs.log.error(
|
||||
"Missing expected key '%s' in '%s'", exception, RepositoryFile.MAINIFEST_JSON
|
||||
)
|
||||
|
||||
# Set local path
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self.string, error)
|
||||
return self.validate.success
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def update_repository(self, ignore_issues=False, force=False):
|
||||
"""Update."""
|
||||
if not await self.common_update(ignore_issues, force) and not force:
|
||||
return
|
||||
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
if self.content.path.remote == "custom_components":
|
||||
name = get_first_directory_in_directory(self.tree, "custom_components")
|
||||
self.content.path.remote = f"custom_components/{name}"
|
||||
|
||||
# Get the content of manifest.json
|
||||
if manifest := await self.async_get_integration_manifest():
|
||||
try:
|
||||
self.integration_manifest = manifest
|
||||
self.data.authors = manifest.get("codeowners", [])
|
||||
self.data.domain = manifest["domain"]
|
||||
self.data.manifest_name = manifest.get("name")
|
||||
self.data.config_flow = manifest.get("config_flow", False)
|
||||
|
||||
except KeyError as exception:
|
||||
self.validate.errors.append(
|
||||
f"Missing expected key '{exception}' in {
|
||||
RepositoryFile.MAINIFEST_JSON}"
|
||||
)
|
||||
self.hacs.log.error(
|
||||
"Missing expected key '%s' in '%s'", exception, RepositoryFile.MAINIFEST_JSON
|
||||
)
|
||||
|
||||
# Set local path
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
# Signal frontend to refresh
|
||||
if self.data.installed:
|
||||
self.hacs.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": self.data.full_name,
|
||||
"repository_id": self.data.id,
|
||||
},
|
||||
)
|
||||
|
||||
async def reload_custom_components(self):
|
||||
"""Reload custom_components (and config flows)in HA."""
|
||||
self.logger.info("Reloading custom_component cache")
|
||||
del self.hacs.hass.data["custom_components"]
|
||||
await async_get_custom_components(self.hacs.hass)
|
||||
self.logger.info("Custom_component cache reloaded")
|
||||
|
||||
async def async_get_integration_manifest(self, ref: str = None) -> dict[str, Any] | None:
|
||||
"""Get the content of the manifest.json file."""
|
||||
manifest_path = (
|
||||
"manifest.json"
|
||||
if self.repository_manifest.content_in_root
|
||||
else f"{self.content.path.remote}/{RepositoryFile.MAINIFEST_JSON}"
|
||||
)
|
||||
|
||||
if not manifest_path in (x.full_path for x in self.tree):
|
||||
raise HacsException(f"No {RepositoryFile.MAINIFEST_JSON} file found '{manifest_path}'")
|
||||
|
||||
response = await self.hacs.async_github_api_method(
|
||||
method=self.hacs.githubapi.repos.contents.get,
|
||||
repository=self.data.full_name,
|
||||
path=manifest_path,
|
||||
**{"params": {"ref": ref or self.version_to_download()}},
|
||||
)
|
||||
if response:
|
||||
return json_loads(decode_content(response.data.content))
|
||||
|
||||
async def get_integration_manifest(self, *, version: str, **kwargs) -> dict[str, Any] | None:
|
||||
"""Get the content of the manifest.json file."""
|
||||
manifest_path = (
|
||||
"manifest.json"
|
||||
if self.repository_manifest.content_in_root
|
||||
else f"{self.content.path.remote}/{RepositoryFile.MAINIFEST_JSON}"
|
||||
)
|
||||
|
||||
if manifest_path not in (x.full_path for x in self.tree):
|
||||
raise HacsException(f"No {RepositoryFile.MAINIFEST_JSON} file found '{manifest_path}'")
|
||||
|
||||
self.logger.debug("%s Getting manifest.json for version=%s", self.string, version)
|
||||
try:
|
||||
result = await self.hacs.async_download_file(
|
||||
f"https://raw.githubusercontent.com/{
|
||||
self.data.full_name}/{version}/{manifest_path}",
|
||||
nolog=True,
|
||||
)
|
||||
if result is None:
|
||||
return None
|
||||
return json_loads(result)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return None
|
||||
|
|
@ -1,246 +0,0 @@
|
|||
"""Class for plugins in HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..enums import HacsCategory, HacsDispatchEvent
|
||||
from ..exceptions import HacsException
|
||||
from ..utils.decorator import concurrent
|
||||
from ..utils.json import json_loads
|
||||
from .base import HacsRepository
|
||||
|
||||
HACSTAG_REPLACER = re.compile(r"\D+")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.components.lovelace.resources import ResourceStorageCollection
|
||||
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
class HacsPluginRepository(HacsRepository):
|
||||
"""Plugins in HACS."""
|
||||
|
||||
def __init__(self, hacs: HacsBase, full_name: str):
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.file_name = None
|
||||
self.data.category = HacsCategory.PLUGIN
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/www/community/{self.data.full_name.split('/')[-1]}"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
self.update_filenames()
|
||||
|
||||
if self.content.path.remote is None:
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
if self.content.path.remote == "release":
|
||||
self.content.single = True
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self.string, error)
|
||||
return self.validate.success
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
await self.hacs.async_setup_frontend_endpoint_plugin()
|
||||
await self.update_dashboard_resources()
|
||||
|
||||
async def async_post_uninstall(self):
|
||||
"""Run post uninstall steps."""
|
||||
await self.remove_dashboard_resources()
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def update_repository(self, ignore_issues=False, force=False):
|
||||
"""Update."""
|
||||
if not await self.common_update(ignore_issues, force) and not force:
|
||||
return
|
||||
|
||||
# Get plugin objects.
|
||||
self.update_filenames()
|
||||
|
||||
if self.content.path.remote is None:
|
||||
self.validate.errors.append(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
if self.content.path.remote == "release":
|
||||
self.content.single = True
|
||||
|
||||
# Signal frontend to refresh
|
||||
if self.data.installed:
|
||||
self.hacs.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": self.data.full_name,
|
||||
"repository_id": self.data.id,
|
||||
},
|
||||
)
|
||||
|
||||
async def get_package_content(self):
|
||||
"""Get package content."""
|
||||
try:
|
||||
package = await self.repository_object.get_contents("package.json", self.ref)
|
||||
package = json_loads(package.content)
|
||||
|
||||
if package:
|
||||
self.data.authors = package["author"]
|
||||
except BaseException: # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
def update_filenames(self) -> None:
|
||||
"""Get the filename to target."""
|
||||
content_in_root = self.repository_manifest.content_in_root
|
||||
if specific_filename := self.repository_manifest.filename:
|
||||
valid_filenames = (specific_filename,)
|
||||
else:
|
||||
valid_filenames = (
|
||||
f"{self.data.name.replace('lovelace-', '')}.js",
|
||||
f"{self.data.name}.js",
|
||||
f"{self.data.name}.umd.js",
|
||||
f"{self.data.name}-bundle.js",
|
||||
)
|
||||
|
||||
if not content_in_root:
|
||||
if self.releases.objects:
|
||||
release = self.releases.objects[0]
|
||||
if release.assets:
|
||||
if assetnames := [
|
||||
filename
|
||||
for filename in valid_filenames
|
||||
for asset in release.assets
|
||||
if filename == asset.name
|
||||
]:
|
||||
self.data.file_name = assetnames[0]
|
||||
self.content.path.remote = "release"
|
||||
return
|
||||
|
||||
all_paths = {x.full_path for x in self.tree}
|
||||
for filename in valid_filenames:
|
||||
if filename in all_paths:
|
||||
self.data.file_name = filename
|
||||
self.content.path.remote = ""
|
||||
return
|
||||
if not content_in_root and f"dist/{filename}" in all_paths:
|
||||
self.data.file_name = filename.split("/")[-1]
|
||||
self.content.path.remote = "dist"
|
||||
return
|
||||
|
||||
def generate_dashboard_resource_hacstag(self) -> str:
|
||||
"""Get the HACS tag used by dashboard resources."""
|
||||
version = (
|
||||
self.display_installed_version
|
||||
or self.data.selected_tag
|
||||
or self.display_available_version
|
||||
)
|
||||
return f"{self.data.id}{HACSTAG_REPLACER.sub('', version)}"
|
||||
|
||||
def generate_dashboard_resource_namespace(self) -> str:
|
||||
"""Get the dashboard resource namespace."""
|
||||
return f"/hacsfiles/{self.data.full_name.split("/")[1]}"
|
||||
|
||||
def generate_dashboard_resource_url(self) -> str:
|
||||
"""Get the dashboard resource namespace."""
|
||||
filename = self.data.file_name
|
||||
if "/" in filename:
|
||||
self.logger.warning("%s have defined an invalid file name %s", self.string, filename)
|
||||
filename = filename.split("/")[-1]
|
||||
return (
|
||||
f"{self.generate_dashboard_resource_namespace()}/{filename}"
|
||||
f"?hacstag={self.generate_dashboard_resource_hacstag()}"
|
||||
)
|
||||
|
||||
def _get_resource_handler(self) -> ResourceStorageCollection | None:
|
||||
"""Get the resource handler."""
|
||||
resources: ResourceStorageCollection | None
|
||||
if not (hass_data := self.hacs.hass.data):
|
||||
self.logger.error("%s Can not access the hass data", self.string)
|
||||
return
|
||||
|
||||
if (lovelace_data := hass_data.get("lovelace")) is None:
|
||||
self.logger.warning("%s Can not access the lovelace integration data", self.string)
|
||||
return
|
||||
|
||||
if self.hacs.core.ha_version > "2025.1.99":
|
||||
# Changed to 2025.2.0
|
||||
# Changed in https://github.com/home-assistant/core/pull/136313
|
||||
resources = lovelace_data.resources
|
||||
else:
|
||||
resources = lovelace_data.get("resources")
|
||||
|
||||
if resources is None:
|
||||
self.logger.warning("%s Can not access the dashboard resources", self.string)
|
||||
return
|
||||
|
||||
if not hasattr(resources, "store") or resources.store is None:
|
||||
self.logger.info("%s YAML mode detected, can not update resources", self.string)
|
||||
return
|
||||
|
||||
if resources.store.key != "lovelace_resources" or resources.store.version != 1:
|
||||
self.logger.warning("%s Can not use the dashboard resources", self.string)
|
||||
return
|
||||
|
||||
return resources
|
||||
|
||||
async def update_dashboard_resources(self) -> None:
|
||||
"""Update dashboard resources."""
|
||||
if not (resources := self._get_resource_handler()):
|
||||
return
|
||||
|
||||
if not resources.loaded:
|
||||
await resources.async_load()
|
||||
|
||||
namespace = self.generate_dashboard_resource_namespace()
|
||||
url = self.generate_dashboard_resource_url()
|
||||
|
||||
for entry in resources.async_items():
|
||||
if (entry_url := entry["url"]).startswith(namespace):
|
||||
if entry_url != url:
|
||||
self.logger.info(
|
||||
"%s Updating existing dashboard resource from %s to %s",
|
||||
self.string,
|
||||
entry_url,
|
||||
url,
|
||||
)
|
||||
await resources.async_update_item(entry["id"], {"url": url})
|
||||
return
|
||||
|
||||
# Nothing was updated, add the resource
|
||||
self.logger.info("%s Adding dashboard resource %s", self.string, url)
|
||||
await resources.async_create_item({"res_type": "module", "url": url})
|
||||
|
||||
async def remove_dashboard_resources(self) -> None:
|
||||
"""Remove dashboard resources."""
|
||||
if not (resources := self._get_resource_handler()):
|
||||
return
|
||||
|
||||
if not resources.loaded:
|
||||
await resources.async_load()
|
||||
|
||||
namespace = self.generate_dashboard_resource_namespace()
|
||||
|
||||
for entry in resources.async_items():
|
||||
if entry["url"].startswith(namespace):
|
||||
self.logger.info("%s Removing dashboard resource %s", self.string, entry["url"])
|
||||
await resources.async_delete_item(entry["id"])
|
||||
return
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
"""Class for python_scripts in HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..enums import HacsCategory, HacsDispatchEvent
|
||||
from ..exceptions import HacsException
|
||||
from ..utils.decorator import concurrent
|
||||
from .base import HacsRepository
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
class HacsPythonScriptRepository(HacsRepository):
|
||||
"""python_scripts in HACS."""
|
||||
|
||||
category = "python_script"
|
||||
|
||||
def __init__(self, hacs: HacsBase, full_name: str):
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.PYTHON_SCRIPT
|
||||
self.content.path.remote = "python_scripts"
|
||||
self.content.path.local = self.localpath
|
||||
self.content.single = True
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/python_scripts"
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(".py"):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self.string, error)
|
||||
return self.validate.success
|
||||
|
||||
async def async_post_registration(self):
|
||||
"""Registration."""
|
||||
# Set name
|
||||
self.update_filenames()
|
||||
|
||||
if self.hacs.system.action:
|
||||
await self.hacs.validation.async_run_repository_checks(self)
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def update_repository(self, ignore_issues=False, force=False):
|
||||
"""Update."""
|
||||
if not await self.common_update(ignore_issues, force) and not force:
|
||||
return
|
||||
|
||||
# Get python_script objects.
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith(f"{self.content.path.remote}") and treefile.endswith(".py"):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
# Update name
|
||||
self.update_filenames()
|
||||
|
||||
# Signal frontend to refresh
|
||||
if self.data.installed:
|
||||
self.hacs.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": self.data.full_name,
|
||||
"repository_id": self.data.id,
|
||||
},
|
||||
)
|
||||
|
||||
def update_filenames(self) -> None:
|
||||
"""Get the filename to target."""
|
||||
for treefile in self.tree:
|
||||
if treefile.full_path.startswith(
|
||||
self.content.path.remote
|
||||
) and treefile.full_path.endswith(".py"):
|
||||
self.data.file_name = treefile.filename
|
||||
|
|
@ -1,106 +0,0 @@
|
|||
"""Class for themes in HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..enums import HacsCategory, HacsDispatchEvent
|
||||
from ..exceptions import HacsException
|
||||
from ..utils.decorator import concurrent
|
||||
from .base import HacsRepository
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
class HacsTemplateRepository(HacsRepository):
|
||||
"""Custom templates in HACS."""
|
||||
|
||||
def __init__(self, hacs: HacsBase, full_name: str):
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.TEMPLATE
|
||||
self.content.path.remote = ""
|
||||
self.content.path.local = self.localpath
|
||||
self.content.single = True
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/custom_templates"
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
await self._reload_custom_templates()
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
self.data.file_name = self.repository_manifest.filename
|
||||
|
||||
if (
|
||||
not self.data.file_name
|
||||
or "/" in self.data.file_name
|
||||
or not self.data.file_name.endswith(".jinja")
|
||||
or self.data.file_name not in self.treefiles
|
||||
):
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self.string, error)
|
||||
return self.validate.success
|
||||
|
||||
async def async_post_registration(self):
|
||||
"""Registration."""
|
||||
# Set filenames
|
||||
self.data.file_name = self.repository_manifest.filename
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
if self.hacs.system.action:
|
||||
await self.hacs.validation.async_run_repository_checks(self)
|
||||
|
||||
async def async_post_uninstall(self) -> None:
|
||||
"""Run post uninstall steps."""
|
||||
await self._reload_custom_templates()
|
||||
|
||||
async def _reload_custom_templates(self) -> None:
|
||||
"""Reload custom templates."""
|
||||
self.logger.debug("%s Reloading custom templates", self.string)
|
||||
try:
|
||||
await self.hacs.hass.services.async_call("homeassistant", "reload_custom_templates", {})
|
||||
except HomeAssistantError as exception:
|
||||
self.logger.exception("%s %s", self.string, exception)
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def update_repository(self, ignore_issues=False, force=False):
|
||||
"""Update."""
|
||||
if not await self.common_update(ignore_issues, force) and not force:
|
||||
return
|
||||
|
||||
# Update filenames
|
||||
self.data.file_name = self.repository_manifest.filename
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
# Signal frontend to refresh
|
||||
if self.data.installed:
|
||||
self.hacs.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": self.data.full_name,
|
||||
"repository_id": self.data.id,
|
||||
},
|
||||
)
|
||||
|
|
@ -1,119 +0,0 @@
|
|||
"""Class for themes in HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..enums import HacsCategory, HacsDispatchEvent
|
||||
from ..exceptions import HacsException
|
||||
from ..utils.decorator import concurrent
|
||||
from .base import HacsRepository
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
class HacsThemeRepository(HacsRepository):
|
||||
"""Themes in HACS."""
|
||||
|
||||
def __init__(self, hacs: HacsBase, full_name: str):
|
||||
"""Initialize."""
|
||||
super().__init__(hacs=hacs)
|
||||
self.data.full_name = full_name
|
||||
self.data.full_name_lower = full_name.lower()
|
||||
self.data.category = HacsCategory.THEME
|
||||
self.content.path.remote = "themes"
|
||||
self.content.path.local = self.localpath
|
||||
self.content.single = False
|
||||
|
||||
@property
|
||||
def localpath(self):
|
||||
"""Return localpath."""
|
||||
return f"{self.hacs.core.config_path}/themes/{self.data.file_name.replace('.yaml', '')}"
|
||||
|
||||
async def async_post_installation(self):
|
||||
"""Run post installation steps."""
|
||||
await self._reload_frontend_themes()
|
||||
|
||||
async def validate_repository(self):
|
||||
"""Validate."""
|
||||
# Run common validation steps.
|
||||
await self.common_validate()
|
||||
|
||||
# Custom step 1: Validate content.
|
||||
compliant = False
|
||||
for treefile in self.treefiles:
|
||||
if treefile.startswith("themes/") and treefile.endswith(".yaml"):
|
||||
compliant = True
|
||||
break
|
||||
if not compliant:
|
||||
raise HacsException(
|
||||
f"{self.string} Repository structure for {self.ref.replace('tags/','')} is not compliant"
|
||||
)
|
||||
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
# Handle potential errors
|
||||
if self.validate.errors:
|
||||
for error in self.validate.errors:
|
||||
if not self.hacs.status.startup:
|
||||
self.logger.error("%s %s", self.string, error)
|
||||
return self.validate.success
|
||||
|
||||
async def async_post_registration(self):
|
||||
"""Registration."""
|
||||
# Set name
|
||||
self.update_filenames()
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
if self.hacs.system.action:
|
||||
await self.hacs.validation.async_run_repository_checks(self)
|
||||
|
||||
async def _reload_frontend_themes(self) -> None:
|
||||
"""Reload frontend themes."""
|
||||
self.logger.debug("%s Reloading frontend themes", self.string)
|
||||
try:
|
||||
await self.hacs.hass.services.async_call("frontend", "reload_themes", {})
|
||||
except HomeAssistantError as exception:
|
||||
self.logger.exception("%s %s", self.string, exception)
|
||||
|
||||
async def async_post_uninstall(self) -> None:
|
||||
"""Run post uninstall steps."""
|
||||
await self._reload_frontend_themes()
|
||||
|
||||
@concurrent(concurrenttasks=10, backoff_time=5)
|
||||
async def update_repository(self, ignore_issues=False, force=False):
|
||||
"""Update."""
|
||||
if not await self.common_update(ignore_issues, force) and not force:
|
||||
return
|
||||
|
||||
# Get theme objects.
|
||||
if self.repository_manifest.content_in_root:
|
||||
self.content.path.remote = ""
|
||||
|
||||
# Update name
|
||||
self.update_filenames()
|
||||
self.content.path.local = self.localpath
|
||||
|
||||
# Signal frontend to refresh
|
||||
if self.data.installed:
|
||||
self.hacs.async_dispatch(
|
||||
HacsDispatchEvent.REPOSITORY,
|
||||
{
|
||||
"id": 1337,
|
||||
"action": "update",
|
||||
"repository": self.data.full_name,
|
||||
"repository_id": self.data.id,
|
||||
},
|
||||
)
|
||||
|
||||
def update_filenames(self) -> None:
|
||||
"""Get the filename to target."""
|
||||
for treefile in self.tree:
|
||||
if treefile.full_path.startswith(
|
||||
self.content.path.remote
|
||||
) and treefile.full_path.endswith(".yaml"):
|
||||
self.data.file_name = treefile.filename
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
"""Provide info to system health."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from aiogithubapi.common.const import BASE_API_URL
|
||||
from homeassistant.components import system_health
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN
|
||||
|
||||
GITHUB_STATUS = "https://www.githubstatus.com/"
|
||||
CLOUDFLARE_STATUS = "https://www.cloudflarestatus.com/"
|
||||
|
||||
|
||||
@callback
|
||||
def async_register(hass: HomeAssistant, register: system_health.SystemHealthRegistration) -> None:
|
||||
"""Register system health callbacks."""
|
||||
register.domain = "Home Assistant Community Store"
|
||||
register.async_register_info(system_health_info, "/hacs")
|
||||
|
||||
|
||||
async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
|
||||
"""Get info for the info page."""
|
||||
if DOMAIN not in hass.data:
|
||||
return {"Disabled": "HACS is not loaded, but HA still requests this information..."}
|
||||
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
response = await hacs.githubapi.rate_limit()
|
||||
|
||||
data = {
|
||||
"GitHub API": system_health.async_check_can_reach_url(hass, BASE_API_URL, GITHUB_STATUS),
|
||||
"GitHub Content": system_health.async_check_can_reach_url(
|
||||
hass, "https://raw.githubusercontent.com/hacs/integration/main/hacs.json"
|
||||
),
|
||||
"GitHub Web": system_health.async_check_can_reach_url(
|
||||
hass, "https://github.com/", GITHUB_STATUS
|
||||
),
|
||||
"HACS Data": system_health.async_check_can_reach_url(
|
||||
hass, "https://data-v2.hacs.xyz/data.json", CLOUDFLARE_STATUS
|
||||
),
|
||||
"GitHub API Calls Remaining": response.data.resources.core.remaining,
|
||||
"Installed Version": hacs.version,
|
||||
"Stage": hacs.stage,
|
||||
"Available Repositories": len(hacs.repositories.list_all),
|
||||
"Downloaded Repositories": len(hacs.repositories.list_downloaded),
|
||||
}
|
||||
|
||||
if hacs.system.disabled:
|
||||
data["Disabled"] = hacs.system.disabled_reason
|
||||
|
||||
return data
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"single_instance_allowed": "Only a single configuration of HACS is allowed.",
|
||||
"min_ha_version": "You need at least version {version} of Home Assistant to setup HACS.",
|
||||
"github": "Could not authenticate with GitHub, try again later.",
|
||||
"not_setup": "HACS is not setup.",
|
||||
"reauth_successful": "Reauthentication was successful."
|
||||
},
|
||||
"error": {
|
||||
"auth": "Personal Access Token is not correct",
|
||||
"acc": "You need to acknowledge all the statements before continuing"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"acc_logs": "I know how to access Home Assistant logs",
|
||||
"acc_addons": "I know that there are no add-ons in HACS",
|
||||
"acc_untested": "I know that everything inside HACS including HACS itself is custom and untested by Home Assistant",
|
||||
"acc_disable": "I know that if I get issues with Home Assistant I should disable all my custom_components"
|
||||
},
|
||||
"description": "Before you can setup HACS you need to acknowledge the following"
|
||||
},
|
||||
"device": {
|
||||
"title": "Waiting for device activation"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"title": "Reauthentication needed",
|
||||
"description": "You need to reauthenticate with GitHub."
|
||||
}
|
||||
},
|
||||
"progress": {
|
||||
"wait_for_device": "1. Open {url} \n2. Paste the following key to authorize HACS: \n```\n{code}\n```"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"abort": {
|
||||
"not_setup": "HACS is not setup.",
|
||||
"pending_tasks": "There are pending tasks. Try again later.",
|
||||
"release_limit_value": "The release limit needs to be between 1 and 100."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"not_in_use": "Not in use with YAML",
|
||||
"country": "Filter with country code",
|
||||
"release_limit": "Number of releases to show",
|
||||
"debug": "Enable debug",
|
||||
"appdaemon": "Enable AppDaemon apps discovery & tracking",
|
||||
"sidepanel_icon": "Side panel icon",
|
||||
"sidepanel_title": "Side panel title"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"restart_required": {
|
||||
"title": "Restart required",
|
||||
"fix_flow": {
|
||||
"step": {
|
||||
"confirm_restart": {
|
||||
"title": "Restart required",
|
||||
"description": "Restart of Home Assistant is required to finish download/update of {name}, click submit to restart now."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"removed": {
|
||||
"title": "Repository removed from HACS",
|
||||
"description": "Because {reason}, `{name}` has been removed from HACS. Please visit the [HACS Panel](/hacs/repository/{repositry_id}) to remove it."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"switch": {
|
||||
"pre-release": {
|
||||
"name": "Pre-release",
|
||||
"state": {
|
||||
"off": "No pre-releases",
|
||||
"on": "Pre-releases preferred"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,158 +0,0 @@
|
|||
"""Update entities for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.update import UpdateEntity, UpdateEntityFeature
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, HomeAssistantError, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .base import HacsBase
|
||||
from .const import DOMAIN
|
||||
from .entity import HacsRepositoryEntity
|
||||
from .enums import HacsCategory, HacsDispatchEvent
|
||||
from .exceptions import HacsException
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
|
||||
) -> None:
|
||||
"""Setup update platform."""
|
||||
hacs: HacsBase = hass.data[DOMAIN]
|
||||
async_add_entities(
|
||||
HacsRepositoryUpdateEntity(hacs=hacs, repository=repository)
|
||||
for repository in hacs.repositories.list_downloaded
|
||||
)
|
||||
|
||||
|
||||
class HacsRepositoryUpdateEntity(HacsRepositoryEntity, UpdateEntity):
|
||||
"""Update entities for repositories downloaded with HACS."""
|
||||
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL
|
||||
| UpdateEntityFeature.SPECIFIC_VERSION
|
||||
| UpdateEntityFeature.PROGRESS
|
||||
| UpdateEntityFeature.RELEASE_NOTES
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str | None:
|
||||
"""Return the name."""
|
||||
return f"{self.repository.display_name} update"
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return latest version of the entity."""
|
||||
return self.repository.display_available_version
|
||||
|
||||
@property
|
||||
def release_url(self) -> str:
|
||||
"""Return the URL of the release page."""
|
||||
if self.repository.display_version_or_commit == "commit":
|
||||
return f"https://github.com/{self.repository.data.full_name}"
|
||||
return f"https://github.com/{self.repository.data.full_name}/releases/{self.latest_version}"
|
||||
|
||||
@property
|
||||
def installed_version(self) -> str:
|
||||
"""Return downloaded version of the entity."""
|
||||
return self.repository.display_installed_version
|
||||
|
||||
@property
|
||||
def release_summary(self) -> str | None:
|
||||
"""Return the release summary."""
|
||||
if self.repository.pending_restart:
|
||||
return "<ha-alert alert-type='error'>Restart of Home Assistant required</ha-alert>"
|
||||
return None
|
||||
|
||||
@property
|
||||
def entity_picture(self) -> str | None:
|
||||
"""Return the entity picture to use in the frontend."""
|
||||
if (
|
||||
self.repository.data.category != HacsCategory.INTEGRATION
|
||||
or self.repository.data.domain is None
|
||||
):
|
||||
return None
|
||||
|
||||
return f"https://brands.home-assistant.io/_/{self.repository.data.domain}/icon.png"
|
||||
|
||||
async def async_install(self, version: str | None, backup: bool, **kwargs: Any) -> None:
|
||||
"""Install an update."""
|
||||
to_download = version or self.latest_version
|
||||
if to_download == self.installed_version:
|
||||
raise HomeAssistantError(f"Version {self.installed_version} of {
|
||||
self.repository.data.full_name} is already downloaded")
|
||||
try:
|
||||
await self.repository.async_download_repository(ref=version or self.latest_version)
|
||||
except HacsException as exception:
|
||||
raise HomeAssistantError(exception) from exception
|
||||
|
||||
async def async_release_notes(self) -> str | None:
|
||||
"""Return the release notes."""
|
||||
if self.repository.pending_restart:
|
||||
return None
|
||||
|
||||
if self.latest_version not in self.repository.data.published_tags:
|
||||
releases = await self.repository.get_releases(
|
||||
prerelease=self.repository.data.show_beta,
|
||||
returnlimit=self.hacs.configuration.release_limit,
|
||||
)
|
||||
if releases:
|
||||
self.repository.data.releases = True
|
||||
self.repository.releases.objects = releases
|
||||
self.repository.data.published_tags = [x.tag_name for x in releases]
|
||||
self.repository.data.last_version = next(iter(self.repository.data.published_tags))
|
||||
|
||||
release_notes = ""
|
||||
# Compile release notes from installed version up to the latest
|
||||
if self.installed_version in self.repository.data.published_tags:
|
||||
for release in self.repository.releases.objects:
|
||||
if release.tag_name == self.installed_version:
|
||||
break
|
||||
release_notes += f"# {release.tag_name}"
|
||||
if release.tag_name != release.name:
|
||||
release_notes += f" - {release.name}"
|
||||
release_notes += f"\n\n{release.body}"
|
||||
release_notes += "\n\n---\n\n"
|
||||
elif any(self.repository.releases.objects):
|
||||
release_notes += self.repository.releases.objects[0].body
|
||||
|
||||
if self.repository.pending_update:
|
||||
if self.repository.data.category == HacsCategory.INTEGRATION:
|
||||
release_notes += (
|
||||
"\n\n<ha-alert alert-type='warning'>You need to restart"
|
||||
" Home Assistant manually after updating.</ha-alert>\n\n"
|
||||
)
|
||||
if self.repository.data.category == HacsCategory.PLUGIN:
|
||||
release_notes += (
|
||||
"\n\n<ha-alert alert-type='warning'>You need to manually"
|
||||
" clear the frontend cache after updating.</ha-alert>\n\n"
|
||||
)
|
||||
|
||||
return release_notes.replace("\n#", "\n\n#")
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register for status events."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
HacsDispatchEvent.REPOSITORY_DOWNLOAD_PROGRESS,
|
||||
self._update_download_progress,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _update_download_progress(self, data: dict) -> None:
|
||||
"""Update the download progress."""
|
||||
if data["repository"] != self.repository.data.full_name:
|
||||
return
|
||||
self._update_in_progress(progress=data["progress"])
|
||||
|
||||
@callback
|
||||
def _update_in_progress(self, progress: int | bool) -> None:
|
||||
"""Update the download progress."""
|
||||
self._attr_in_progress = progress
|
||||
self.async_write_ha_state()
|
||||
|
|
@ -1 +0,0 @@
|
|||
"""Initialize HACS utils."""
|
||||
|
|
@ -1,110 +0,0 @@
|
|||
"""Backup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from time import sleep
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .path import is_safe
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
from ..repositories.base import HacsRepository
|
||||
|
||||
|
||||
DEFAULT_BACKUP_PATH = f"{tempfile.gettempdir()}/hacs_backup/"
|
||||
|
||||
|
||||
class Backup:
|
||||
"""Backup."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hacs: HacsBase,
|
||||
local_path: str | None = None,
|
||||
backup_path: str = DEFAULT_BACKUP_PATH,
|
||||
repository: HacsRepository | None = None,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.hacs = hacs
|
||||
self.repository = repository
|
||||
self.local_path = local_path or repository.content.path.local
|
||||
self.backup_path = backup_path
|
||||
if repository:
|
||||
self.backup_path = (
|
||||
tempfile.gettempdir()
|
||||
+ f"/hacs_persistent_{repository.data.category}/"
|
||||
+ repository.data.name
|
||||
)
|
||||
self.backup_path_full = f"{self.backup_path}{self.local_path.split('/')[-1]}"
|
||||
|
||||
def _init_backup_dir(self) -> bool:
|
||||
"""Init backup dir."""
|
||||
if not os.path.exists(self.local_path):
|
||||
return False
|
||||
if not is_safe(self.hacs, self.local_path):
|
||||
return False
|
||||
if os.path.exists(self.backup_path):
|
||||
shutil.rmtree(self.backup_path)
|
||||
|
||||
# Wait for the folder to be removed
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
os.makedirs(self.backup_path, exist_ok=True)
|
||||
return True
|
||||
|
||||
def create(self) -> None:
|
||||
"""Create a backup in /tmp"""
|
||||
if not self._init_backup_dir():
|
||||
return
|
||||
|
||||
try:
|
||||
if os.path.isfile(self.local_path):
|
||||
shutil.copyfile(self.local_path, self.backup_path_full)
|
||||
os.remove(self.local_path)
|
||||
else:
|
||||
shutil.copytree(self.local_path, self.backup_path_full)
|
||||
shutil.rmtree(self.local_path)
|
||||
while os.path.exists(self.local_path):
|
||||
sleep(0.1)
|
||||
self.hacs.log.debug(
|
||||
"Backup for %s, created in %s",
|
||||
self.local_path,
|
||||
self.backup_path_full,
|
||||
)
|
||||
except (
|
||||
BaseException # lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
) as exception:
|
||||
self.hacs.log.warning("Could not create backup: %s", exception)
|
||||
|
||||
def restore(self) -> None:
|
||||
"""Restore from backup."""
|
||||
if not os.path.exists(self.backup_path_full):
|
||||
return
|
||||
|
||||
if os.path.isfile(self.backup_path_full):
|
||||
if os.path.exists(self.local_path):
|
||||
os.remove(self.local_path)
|
||||
shutil.copyfile(self.backup_path_full, self.local_path)
|
||||
else:
|
||||
if os.path.exists(self.local_path):
|
||||
shutil.rmtree(self.local_path)
|
||||
while os.path.exists(self.local_path):
|
||||
sleep(0.1)
|
||||
shutil.copytree(self.backup_path_full, self.local_path)
|
||||
self.hacs.log.debug("Restored %s, from backup %s", self.local_path, self.backup_path_full)
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""Cleanup backup files."""
|
||||
if not os.path.exists(self.backup_path):
|
||||
return
|
||||
|
||||
shutil.rmtree(self.backup_path)
|
||||
|
||||
# Wait for the folder to be removed
|
||||
while os.path.exists(self.backup_path):
|
||||
sleep(0.1)
|
||||
self.hacs.log.debug("Backup dir %s cleared", self.backup_path)
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
"""HACS Configuration Schemas."""
|
||||
|
||||
# Configuration:
|
||||
SIDEPANEL_TITLE = "sidepanel_title"
|
||||
SIDEPANEL_ICON = "sidepanel_icon"
|
||||
APPDAEMON = "appdaemon"
|
||||
|
||||
# Options:
|
||||
COUNTRY = "country"
|
||||
|
|
@ -1,323 +0,0 @@
|
|||
"""Data handler for HACS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from ..base import HacsBase
|
||||
from ..const import HACS_REPOSITORY_ID
|
||||
from ..enums import HacsDisabledReason, HacsDispatchEvent
|
||||
from ..repositories.base import TOPIC_FILTER, HacsManifest, HacsRepository
|
||||
from .logger import LOGGER
|
||||
from .path import is_safe
|
||||
from .store import async_load_from_store, async_save_to_store
|
||||
|
||||
EXPORTED_BASE_DATA = (
|
||||
("new", False),
|
||||
("full_name", ""),
|
||||
)
|
||||
|
||||
EXPORTED_REPOSITORY_DATA = EXPORTED_BASE_DATA + (
|
||||
("authors", []),
|
||||
("category", ""),
|
||||
("description", ""),
|
||||
("domain", None),
|
||||
("downloads", 0),
|
||||
("etag_repository", None),
|
||||
("hide", False),
|
||||
("last_updated", 0),
|
||||
("new", False),
|
||||
("stargazers_count", 0),
|
||||
("topics", []),
|
||||
)
|
||||
|
||||
EXPORTED_DOWNLOADED_REPOSITORY_DATA = EXPORTED_REPOSITORY_DATA + (
|
||||
("archived", False),
|
||||
("config_flow", False),
|
||||
("default_branch", None),
|
||||
("first_install", False),
|
||||
("installed_commit", None),
|
||||
("installed", False),
|
||||
("last_commit", None),
|
||||
("last_version", None),
|
||||
("manifest_name", None),
|
||||
("open_issues", 0),
|
||||
("prerelease", None),
|
||||
("published_tags", []),
|
||||
("releases", False),
|
||||
("selected_tag", None),
|
||||
("show_beta", False),
|
||||
)
|
||||
|
||||
|
||||
class HacsData:
|
||||
"""HacsData class."""
|
||||
|
||||
def __init__(self, hacs: HacsBase):
|
||||
"""Initialize."""
|
||||
self.logger = LOGGER
|
||||
self.hacs = hacs
|
||||
self.content = {}
|
||||
|
||||
async def async_force_write(self, _=None):
|
||||
"""Force write."""
|
||||
await self.async_write(force=True)
|
||||
|
||||
async def async_write(self, force: bool = False) -> None:
|
||||
"""Write content to the store files."""
|
||||
if not force and self.hacs.system.disabled:
|
||||
return
|
||||
|
||||
self.logger.debug("<HacsData async_write> Saving data")
|
||||
|
||||
# Hacs
|
||||
await async_save_to_store(
|
||||
self.hacs.hass,
|
||||
"hacs",
|
||||
{
|
||||
"archived_repositories": self.hacs.common.archived_repositories,
|
||||
"renamed_repositories": self.hacs.common.renamed_repositories,
|
||||
"ignored_repositories": self.hacs.common.ignored_repositories,
|
||||
},
|
||||
)
|
||||
await self._async_store_experimental_content_and_repos()
|
||||
await self._async_store_content_and_repos()
|
||||
|
||||
async def _async_store_content_and_repos(self, _=None): # bb: ignore
|
||||
"""Store the main repos file and each repo that is out of date."""
|
||||
# Repositories
|
||||
self.content = {}
|
||||
for repository in self.hacs.repositories.list_all:
|
||||
if repository.data.category in self.hacs.common.categories:
|
||||
self.async_store_repository_data(repository)
|
||||
|
||||
await async_save_to_store(self.hacs.hass, "repositories", self.content)
|
||||
for event in (HacsDispatchEvent.REPOSITORY, HacsDispatchEvent.CONFIG):
|
||||
self.hacs.async_dispatch(event, {})
|
||||
|
||||
async def _async_store_experimental_content_and_repos(self, _=None):
|
||||
"""Store the main repos file and each repo that is out of date."""
|
||||
# Repositories
|
||||
self.content = {}
|
||||
for repository in self.hacs.repositories.list_all:
|
||||
if repository.data.category in self.hacs.common.categories:
|
||||
self.async_store_experimental_repository_data(repository)
|
||||
|
||||
await async_save_to_store(self.hacs.hass, "data", {"repositories": self.content})
|
||||
|
||||
@callback
|
||||
def async_store_repository_data(self, repository: HacsRepository) -> dict:
|
||||
"""Store the repository data."""
|
||||
data = {"repository_manifest": repository.repository_manifest.manifest}
|
||||
|
||||
for key, default in (
|
||||
EXPORTED_DOWNLOADED_REPOSITORY_DATA
|
||||
if repository.data.installed
|
||||
else EXPORTED_REPOSITORY_DATA
|
||||
):
|
||||
if (value := getattr(repository.data, key, default)) != default:
|
||||
data[key] = value
|
||||
|
||||
if repository.data.installed_version:
|
||||
data["version_installed"] = repository.data.installed_version
|
||||
if repository.data.last_fetched:
|
||||
data["last_fetched"] = repository.data.last_fetched.timestamp()
|
||||
|
||||
self.content[str(repository.data.id)] = data
|
||||
|
||||
@callback
|
||||
def async_store_experimental_repository_data(self, repository: HacsRepository) -> None:
|
||||
"""Store the experimental repository data for non downloaded repositories."""
|
||||
data = {}
|
||||
self.content.setdefault(repository.data.category, [])
|
||||
|
||||
if repository.data.installed:
|
||||
data["repository_manifest"] = repository.repository_manifest.manifest
|
||||
for key, default in EXPORTED_DOWNLOADED_REPOSITORY_DATA:
|
||||
if (value := getattr(repository.data, key, default)) != default:
|
||||
data[key] = value
|
||||
|
||||
if repository.data.installed_version:
|
||||
data["version_installed"] = repository.data.installed_version
|
||||
if repository.data.last_fetched:
|
||||
data["last_fetched"] = repository.data.last_fetched.timestamp()
|
||||
else:
|
||||
for key, default in EXPORTED_BASE_DATA:
|
||||
if (value := getattr(repository.data, key, default)) != default:
|
||||
data[key] = value
|
||||
|
||||
self.content[repository.data.category].append({"id": str(repository.data.id), **data})
|
||||
|
||||
async def restore(self):
|
||||
"""Restore saved data."""
|
||||
self.hacs.status.new = False
|
||||
repositories = {}
|
||||
hacs = {}
|
||||
|
||||
try:
|
||||
hacs = await async_load_from_store(self.hacs.hass, "hacs") or {}
|
||||
except HomeAssistantError:
|
||||
pass
|
||||
|
||||
try:
|
||||
repositories = await async_load_from_store(self.hacs.hass, "repositories")
|
||||
if not repositories and (data := await async_load_from_store(self.hacs.hass, "data")):
|
||||
for category, entries in data.get("repositories", {}).items():
|
||||
for repository in entries:
|
||||
repositories[repository["id"]] = {"category": category, **repository}
|
||||
|
||||
except HomeAssistantError as exception:
|
||||
self.hacs.log.error(
|
||||
"Could not read %s, restore the file from a backup - %s",
|
||||
self.hacs.hass.config.path(".storage/hacs.data"),
|
||||
exception,
|
||||
)
|
||||
self.hacs.disable_hacs(HacsDisabledReason.RESTORE)
|
||||
return False
|
||||
|
||||
if not hacs and not repositories:
|
||||
# Assume new install
|
||||
self.hacs.status.new = True
|
||||
return True
|
||||
|
||||
self.logger.info("<HacsData restore> Restore started")
|
||||
|
||||
# Hacs
|
||||
self.hacs.common.archived_repositories = set()
|
||||
self.hacs.common.ignored_repositories = set()
|
||||
self.hacs.common.renamed_repositories = {}
|
||||
|
||||
# Clear out doubble renamed values
|
||||
renamed = hacs.get("renamed_repositories", {})
|
||||
for entry in renamed:
|
||||
value = renamed.get(entry)
|
||||
if value not in renamed:
|
||||
self.hacs.common.renamed_repositories[entry] = value
|
||||
|
||||
# Clear out doubble archived values
|
||||
for entry in hacs.get("archived_repositories", set()):
|
||||
if entry not in self.hacs.common.archived_repositories:
|
||||
self.hacs.common.archived_repositories.add(entry)
|
||||
|
||||
# Clear out doubble ignored values
|
||||
for entry in hacs.get("ignored_repositories", set()):
|
||||
if entry not in self.hacs.common.ignored_repositories:
|
||||
self.hacs.common.ignored_repositories.add(entry)
|
||||
|
||||
try:
|
||||
await self.register_unknown_repositories(repositories)
|
||||
|
||||
for entry, repo_data in repositories.items():
|
||||
if entry == "0":
|
||||
# Ignore repositories with ID 0
|
||||
self.logger.debug(
|
||||
"<HacsData restore> Found repository with ID %s - %s", entry, repo_data
|
||||
)
|
||||
continue
|
||||
self.async_restore_repository(entry, repo_data)
|
||||
|
||||
self.logger.info("<HacsData restore> Restore done")
|
||||
except (
|
||||
# lgtm [py/catch-base-exception] pylint: disable=broad-except
|
||||
BaseException
|
||||
) as exception:
|
||||
self.logger.critical(
|
||||
"<HacsData restore> [%s] Restore Failed!", exception, exc_info=exception
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
async def register_unknown_repositories(
|
||||
self, repositories: dict[str, dict[str, Any]], category: str | None = None
|
||||
):
|
||||
"""Registry any unknown repositories."""
|
||||
for repo_idx, (entry, repo_data) in enumerate(repositories.items()):
|
||||
# async_register_repository is awaited in a loop
|
||||
# since its unlikely to ever suspend at startup
|
||||
if (
|
||||
entry == "0"
|
||||
or repo_data.get("category", category) is None
|
||||
or self.hacs.repositories.is_registered(repository_id=entry)
|
||||
):
|
||||
continue
|
||||
await self.hacs.async_register_repository(
|
||||
repository_full_name=repo_data["full_name"],
|
||||
category=repo_data.get("category", category),
|
||||
check=False,
|
||||
repository_id=entry,
|
||||
)
|
||||
if repo_idx % 100 == 0:
|
||||
# yield to avoid blocking the event loop
|
||||
await asyncio.sleep(0)
|
||||
|
||||
@callback
|
||||
def async_restore_repository(self, entry: str, repository_data: dict[str, Any]):
|
||||
"""Restore repository."""
|
||||
repository: HacsRepository | None = None
|
||||
if full_name := repository_data.get("full_name"):
|
||||
repository = self.hacs.repositories.get_by_full_name(full_name)
|
||||
if not repository:
|
||||
repository = self.hacs.repositories.get_by_id(entry)
|
||||
if not repository:
|
||||
return
|
||||
|
||||
try:
|
||||
self.hacs.repositories.set_repository_id(repository, entry)
|
||||
except ValueError as exception:
|
||||
self.logger.warning("<HacsData async_restore_repository> duplicate IDs %s", exception)
|
||||
return
|
||||
|
||||
# Restore repository attributes
|
||||
repository.data.authors = repository_data.get("authors", [])
|
||||
repository.data.description = repository_data.get("description", "")
|
||||
repository.data.downloads = repository_data.get("downloads", 0)
|
||||
repository.data.last_updated = repository_data.get("last_updated", 0)
|
||||
if self.hacs.system.generator:
|
||||
repository.data.etag_releases = repository_data.get("etag_releases")
|
||||
repository.data.open_issues = repository_data.get("open_issues", 0)
|
||||
repository.data.etag_repository = repository_data.get("etag_repository")
|
||||
repository.data.topics = [
|
||||
topic for topic in repository_data.get("topics", []) if topic not in TOPIC_FILTER
|
||||
]
|
||||
repository.data.domain = repository_data.get("domain")
|
||||
repository.data.stargazers_count = repository_data.get(
|
||||
"stargazers_count"
|
||||
) or repository_data.get("stars", 0)
|
||||
repository.releases.last_release = repository_data.get("last_release_tag")
|
||||
repository.data.releases = repository_data.get("releases", False)
|
||||
repository.data.installed = repository_data.get("installed", False)
|
||||
repository.data.new = repository_data.get("new", False)
|
||||
repository.data.selected_tag = repository_data.get("selected_tag")
|
||||
repository.data.show_beta = repository_data.get("show_beta", False)
|
||||
repository.data.last_version = repository_data.get("last_version")
|
||||
repository.data.prerelease = repository_data.get("prerelease")
|
||||
repository.data.last_commit = repository_data.get("last_commit")
|
||||
repository.data.installed_version = repository_data.get("version_installed")
|
||||
repository.data.installed_commit = repository_data.get("installed_commit")
|
||||
repository.data.manifest_name = repository_data.get("manifest_name")
|
||||
|
||||
if last_fetched := repository_data.get("last_fetched"):
|
||||
repository.data.last_fetched = datetime.fromtimestamp(last_fetched, UTC)
|
||||
|
||||
repository.repository_manifest = HacsManifest.from_dict(
|
||||
repository_data.get("manifest") or repository_data.get("repository_manifest") or {}
|
||||
)
|
||||
|
||||
if repository.data.prerelease == repository.data.last_version:
|
||||
repository.data.prerelease = None
|
||||
|
||||
if repository.localpath is not None and is_safe(self.hacs, repository.localpath):
|
||||
# Set local path
|
||||
repository.content.path.local = repository.localpath
|
||||
|
||||
if repository.data.installed:
|
||||
repository.data.first_install = False
|
||||
|
||||
if entry == HACS_REPOSITORY_ID:
|
||||
repository.data.installed_version = self.hacs.version
|
||||
repository.data.installed = True
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
"""Util to decode content from the github API."""
|
||||
|
||||
from base64 import b64decode
|
||||
|
||||
|
||||
def decode_content(content: str) -> str:
|
||||
"""Decode content."""
|
||||
return b64decode(bytearray(content, "utf-8")).decode()
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
"""HACS Decorators."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Coroutine
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from ..const import DEFAULT_CONCURRENT_BACKOFF_TIME, DEFAULT_CONCURRENT_TASKS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
def concurrent(
|
||||
concurrenttasks: int = DEFAULT_CONCURRENT_TASKS,
|
||||
backoff_time: int = DEFAULT_CONCURRENT_BACKOFF_TIME,
|
||||
) -> Coroutine[Any, Any, None]:
|
||||
"""Return a modified function."""
|
||||
|
||||
max_concurrent = asyncio.Semaphore(concurrenttasks)
|
||||
|
||||
def inner_function(function) -> Coroutine[Any, Any, None]:
|
||||
@wraps(function)
|
||||
async def wrapper(*args, **kwargs) -> None:
|
||||
hacs: HacsBase = getattr(args[0], "hacs", None)
|
||||
|
||||
async with max_concurrent:
|
||||
result = await function(*args, **kwargs)
|
||||
if (
|
||||
hacs is None
|
||||
or hacs.queue is None
|
||||
or hacs.queue.has_pending_tasks
|
||||
or "update" not in function.__name__
|
||||
):
|
||||
await asyncio.sleep(backoff_time)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return inner_function
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
"""Filter functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
def filter_content_return_one_of_type(
|
||||
content: list[str | Any],
|
||||
namestartswith: str,
|
||||
filterfiltype: str,
|
||||
attr: str = "name",
|
||||
) -> list[str]:
|
||||
"""Only match 1 of the filter."""
|
||||
contents = []
|
||||
filetypefound = False
|
||||
for filename in content:
|
||||
if isinstance(filename, str):
|
||||
if filename.startswith(namestartswith):
|
||||
if filename.endswith(f".{filterfiltype}"):
|
||||
if not filetypefound:
|
||||
contents.append(filename)
|
||||
filetypefound = True
|
||||
continue
|
||||
else:
|
||||
contents.append(filename)
|
||||
else:
|
||||
if getattr(filename, attr).startswith(namestartswith):
|
||||
if getattr(filename, attr).endswith(f".{filterfiltype}"):
|
||||
if not filetypefound:
|
||||
contents.append(filename)
|
||||
filetypefound = True
|
||||
continue
|
||||
else:
|
||||
contents.append(filename)
|
||||
return contents
|
||||
|
||||
|
||||
def get_first_directory_in_directory(content: list[str | Any], dirname: str) -> str | None:
|
||||
"""Return the first directory in dirname or None."""
|
||||
directory = None
|
||||
for path in content:
|
||||
if path.full_path.startswith(dirname) and path.full_path != dirname:
|
||||
if path.is_directory:
|
||||
directory = path.filename
|
||||
break
|
||||
return directory
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
"""JSON utils."""
|
||||
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
__all__ = ["json_loads"]
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
"""Custom logger for HACS."""
|
||||
|
||||
import logging
|
||||
|
||||
from ..const import PACKAGE_NAME
|
||||
|
||||
LOGGER: logging.Logger = logging.getLogger(PACKAGE_NAME)
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
"""Path utils"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..base import HacsBase
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _get_safe_paths(
|
||||
config_path: str,
|
||||
appdaemon_path: str,
|
||||
plugin_path: str,
|
||||
python_script_path: str,
|
||||
theme_path: str,
|
||||
) -> set[str]:
|
||||
"""Get safe paths."""
|
||||
return {
|
||||
Path(f"{config_path}/{appdaemon_path}").as_posix(),
|
||||
Path(f"{config_path}/{plugin_path}").as_posix(),
|
||||
Path(f"{config_path}/{python_script_path}").as_posix(),
|
||||
Path(f"{config_path}/{theme_path}").as_posix(),
|
||||
Path(f"{config_path}/custom_components/").as_posix(),
|
||||
Path(f"{config_path}/custom_templates/").as_posix(),
|
||||
}
|
||||
|
||||
|
||||
def is_safe(hacs: HacsBase, path: str | Path) -> bool:
|
||||
"""Helper to check if path is safe to remove."""
|
||||
configuration = hacs.configuration
|
||||
return Path(path).as_posix() not in _get_safe_paths(
|
||||
hacs.core.config_path,
|
||||
configuration.appdaemon_path,
|
||||
configuration.plugin_path,
|
||||
configuration.python_script_path,
|
||||
configuration.theme_path,
|
||||
)
|
||||