mirror of
https://github.com/sudoxnym/connectd.git
synced 2026-04-14 11:37:42 +00:00
add REST API and HACS integration for home assistant
- api.py exposes /api/stats, /api/health, /api/state on port 8099 - daemon updates shared state for API - HACS custom component with sensors for all stats - config flow for easy setup - example automations in docs 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
bd1efa2ae7
commit
1c525097f4
10 changed files with 589 additions and 0 deletions
125
api.py
Normal file
125
api.py
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
connectd/api.py - REST API for stats and control
|
||||
|
||||
exposes daemon stats for home assistant integration.
|
||||
runs on port 8099 by default.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import threading
|
||||
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||
from datetime import datetime
|
||||
|
||||
from db import Database
|
||||
|
||||
API_PORT = int(os.environ.get('CONNECTD_API_PORT', 8099))
|
||||
|
||||
# shared state (updated by daemon)
|
||||
_daemon_state = {
|
||||
'running': False,
|
||||
'dry_run': False,
|
||||
'last_scout': None,
|
||||
'last_match': None,
|
||||
'last_intro': None,
|
||||
'last_lost': None,
|
||||
'intros_today': 0,
|
||||
'lost_intros_today': 0,
|
||||
'started_at': None,
|
||||
}
|
||||
|
||||
|
||||
def update_daemon_state(state_dict):
|
||||
"""update shared daemon state (called by daemon)"""
|
||||
global _daemon_state
|
||||
_daemon_state.update(state_dict)
|
||||
|
||||
|
||||
def get_daemon_state():
|
||||
"""get current daemon state"""
|
||||
return _daemon_state.copy()
|
||||
|
||||
|
||||
class APIHandler(BaseHTTPRequestHandler):
|
||||
"""simple REST API handler"""
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""suppress default logging"""
|
||||
pass
|
||||
|
||||
def _send_json(self, data, status=200):
|
||||
"""send JSON response"""
|
||||
self.send_response(status)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
self.end_headers()
|
||||
self.wfile.write(json.dumps(data).encode())
|
||||
|
||||
def do_GET(self):
|
||||
"""handle GET requests"""
|
||||
if self.path == '/api/stats':
|
||||
self._handle_stats()
|
||||
elif self.path == '/api/health':
|
||||
self._handle_health()
|
||||
elif self.path == '/api/state':
|
||||
self._handle_state()
|
||||
else:
|
||||
self._send_json({'error': 'not found'}, 404)
|
||||
|
||||
def _handle_stats(self):
|
||||
"""return database statistics"""
|
||||
try:
|
||||
db = Database()
|
||||
stats = db.stats()
|
||||
db.close()
|
||||
self._send_json(stats)
|
||||
except Exception as e:
|
||||
self._send_json({'error': str(e)}, 500)
|
||||
|
||||
def _handle_health(self):
|
||||
"""return daemon health status"""
|
||||
state = get_daemon_state()
|
||||
|
||||
health = {
|
||||
'status': 'running' if state['running'] else 'stopped',
|
||||
'dry_run': state['dry_run'],
|
||||
'uptime_seconds': None,
|
||||
}
|
||||
|
||||
if state['started_at']:
|
||||
uptime = datetime.now() - datetime.fromisoformat(state['started_at'])
|
||||
health['uptime_seconds'] = int(uptime.total_seconds())
|
||||
|
||||
self._send_json(health)
|
||||
|
||||
def _handle_state(self):
|
||||
"""return full daemon state"""
|
||||
state = get_daemon_state()
|
||||
|
||||
# convert datetimes to strings
|
||||
for key in ['last_scout', 'last_match', 'last_intro', 'last_lost', 'started_at']:
|
||||
if state[key] and isinstance(state[key], datetime):
|
||||
state[key] = state[key].isoformat()
|
||||
|
||||
self._send_json(state)
|
||||
|
||||
|
||||
def run_api_server():
|
||||
"""run the API server in a thread"""
|
||||
server = HTTPServer(('0.0.0.0', API_PORT), APIHandler)
|
||||
print(f"connectd api running on port {API_PORT}")
|
||||
server.serve_forever()
|
||||
|
||||
|
||||
def start_api_thread():
|
||||
"""start API server in background thread"""
|
||||
thread = threading.Thread(target=run_api_server, daemon=True)
|
||||
thread.start()
|
||||
return thread
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# standalone mode for testing
|
||||
print(f"starting connectd api on port {API_PORT}...")
|
||||
run_api_server()
|
||||
29
daemon.py
29
daemon.py
|
|
@ -30,6 +30,7 @@ from introd.lost_intro import draft_lost_intro, get_lost_intro_config
|
|||
from introd.send import send_email
|
||||
from introd.deliver import deliver_intro, determine_best_contact
|
||||
from config import get_lost_config
|
||||
from api import start_api_thread, update_daemon_state
|
||||
|
||||
# daemon config
|
||||
SCOUT_INTERVAL = 3600 * 4 # full scout every 4 hours
|
||||
|
|
@ -59,9 +60,27 @@ class ConnectDaemon:
|
|||
signal.signal(signal.SIGINT, self._shutdown)
|
||||
signal.signal(signal.SIGTERM, self._shutdown)
|
||||
|
||||
# update API state
|
||||
self._update_api_state()
|
||||
|
||||
def _shutdown(self, signum, frame):
|
||||
print("\nconnectd: shutting down...")
|
||||
self.running = False
|
||||
self._update_api_state()
|
||||
|
||||
def _update_api_state(self):
|
||||
"""update API state for HA integration"""
|
||||
update_daemon_state({
|
||||
'running': self.running,
|
||||
'dry_run': self.dry_run,
|
||||
'last_scout': self.last_scout.isoformat() if self.last_scout else None,
|
||||
'last_match': self.last_match.isoformat() if self.last_match else None,
|
||||
'last_intro': self.last_intro.isoformat() if self.last_intro else None,
|
||||
'last_lost': self.last_lost.isoformat() if self.last_lost else None,
|
||||
'intros_today': self.intros_today,
|
||||
'lost_intros_today': self.lost_intros_today,
|
||||
'started_at': datetime.now().isoformat(),
|
||||
})
|
||||
|
||||
def log(self, msg):
|
||||
"""timestamped log"""
|
||||
|
|
@ -447,6 +466,11 @@ class ConnectDaemon:
|
|||
def run(self):
|
||||
"""main daemon loop"""
|
||||
self.log("connectd daemon starting...")
|
||||
|
||||
# start API server
|
||||
start_api_thread()
|
||||
self.log("api server started on port 8099")
|
||||
|
||||
if self.dry_run:
|
||||
self.log("*** DRY RUN MODE - no intros will be sent ***")
|
||||
self.log(f"scout interval: {SCOUT_INTERVAL}s")
|
||||
|
|
@ -457,6 +481,7 @@ class ConnectDaemon:
|
|||
|
||||
# initial scout
|
||||
self.scout_cycle()
|
||||
self._update_api_state()
|
||||
|
||||
while self.running:
|
||||
now = datetime.now()
|
||||
|
|
@ -464,19 +489,23 @@ class ConnectDaemon:
|
|||
# scout cycle
|
||||
if not self.last_scout or (now - self.last_scout).seconds >= SCOUT_INTERVAL:
|
||||
self.scout_cycle()
|
||||
self._update_api_state()
|
||||
|
||||
# match cycle
|
||||
if not self.last_match or (now - self.last_match).seconds >= MATCH_INTERVAL:
|
||||
self.match_priority_users()
|
||||
self.match_strangers()
|
||||
self._update_api_state()
|
||||
|
||||
# intro cycle
|
||||
if not self.last_intro or (now - self.last_intro).seconds >= INTRO_INTERVAL:
|
||||
self.send_stranger_intros()
|
||||
self._update_api_state()
|
||||
|
||||
# lost builder cycle
|
||||
if not self.last_lost or (now - self.last_lost).seconds >= LOST_INTERVAL:
|
||||
self.send_lost_builder_intros()
|
||||
self._update_api_state()
|
||||
|
||||
# sleep between checks
|
||||
time.sleep(60)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ services:
|
|||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- "8099:8099"
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./db:/app/db
|
||||
|
|
|
|||
88
hacs/README.md
Normal file
88
hacs/README.md
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
# connectd home assistant integration
|
||||
|
||||
monitor your connectd daemon from home assistant.
|
||||
|
||||
## installation
|
||||
|
||||
### HACS (recommended)
|
||||
|
||||
1. open HACS in home assistant
|
||||
2. click the three dots menu → custom repositories
|
||||
3. add `https://github.com/sudoxnym/connectd` with category "integration"
|
||||
4. search for "connectd" and install
|
||||
5. restart home assistant
|
||||
6. go to settings → devices & services → add integration → connectd
|
||||
|
||||
### manual
|
||||
|
||||
1. copy `custom_components/connectd` to your HA `config/custom_components/` directory
|
||||
2. restart home assistant
|
||||
3. go to settings → devices & services → add integration → connectd
|
||||
|
||||
## configuration
|
||||
|
||||
enter the host and port of your connectd daemon:
|
||||
- **host**: IP or hostname where connectd is running (e.g., `192.168.1.8`)
|
||||
- **port**: API port (default: `8099`)
|
||||
|
||||
## sensors
|
||||
|
||||
the integration creates these sensors:
|
||||
|
||||
### stats
|
||||
- `sensor.connectd_total_humans` - total discovered humans
|
||||
- `sensor.connectd_high_score_humans` - humans with high values alignment
|
||||
- `sensor.connectd_total_matches` - total matches found
|
||||
- `sensor.connectd_total_intros` - total intro drafts
|
||||
- `sensor.connectd_sent_intros` - intros successfully sent
|
||||
- `sensor.connectd_active_builders` - active builder count
|
||||
- `sensor.connectd_lost_builders` - lost builder count
|
||||
- `sensor.connectd_recovering_builders` - recovering builder count
|
||||
- `sensor.connectd_lost_outreach_sent` - lost builder outreach count
|
||||
|
||||
### state
|
||||
- `sensor.connectd_intros_today` - intros sent today
|
||||
- `sensor.connectd_lost_intros_today` - lost builder intros today
|
||||
- `sensor.connectd_status` - daemon status (running/dry_run/stopped)
|
||||
|
||||
### per-platform
|
||||
- `sensor.connectd_github_humans`
|
||||
- `sensor.connectd_mastodon_humans`
|
||||
- `sensor.connectd_reddit_humans`
|
||||
- `sensor.connectd_lemmy_humans`
|
||||
- `sensor.connectd_discord_humans`
|
||||
- `sensor.connectd_lobsters_humans`
|
||||
|
||||
## example dashboard card
|
||||
|
||||
```yaml
|
||||
type: entities
|
||||
title: connectd
|
||||
entities:
|
||||
- entity: sensor.connectd_status
|
||||
- entity: sensor.connectd_total_humans
|
||||
- entity: sensor.connectd_intros_today
|
||||
- entity: sensor.connectd_lost_intros_today
|
||||
- entity: sensor.connectd_active_builders
|
||||
- entity: sensor.connectd_lost_builders
|
||||
```
|
||||
|
||||
## automations
|
||||
|
||||
example: notify when an intro is sent:
|
||||
|
||||
```yaml
|
||||
automation:
|
||||
- alias: "connectd intro notification"
|
||||
trigger:
|
||||
- platform: state
|
||||
entity_id: sensor.connectd_intros_today
|
||||
condition:
|
||||
- condition: template
|
||||
value_template: "{{ trigger.to_state.state | int > trigger.from_state.state | int }}"
|
||||
action:
|
||||
- service: notify.mobile_app
|
||||
data:
|
||||
title: "connectd"
|
||||
message: "sent intro #{{ states('sensor.connectd_intros_today') }} today"
|
||||
```
|
||||
84
hacs/custom_components/connectd/__init__.py
Normal file
84
hacs/custom_components/connectd/__init__.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
"""connectd integration for home assistant."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "connectd"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""set up connectd from a config entry."""
|
||||
host = entry.data["host"]
|
||||
port = entry.data["port"]
|
||||
|
||||
coordinator = ConnectdDataUpdateCoordinator(hass, host, port)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
|
||||
|
||||
class ConnectdDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""class to manage fetching connectd data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, host: str, port: int) -> None:
|
||||
"""initialize."""
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.base_url = f"http://{host}:{port}"
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self):
|
||||
"""fetch data from connectd api."""
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# get stats
|
||||
async with session.get(f"{self.base_url}/api/stats") as resp:
|
||||
if resp.status != 200:
|
||||
raise UpdateFailed(f"error fetching stats: {resp.status}")
|
||||
stats = await resp.json()
|
||||
|
||||
# get state
|
||||
async with session.get(f"{self.base_url}/api/state") as resp:
|
||||
if resp.status != 200:
|
||||
raise UpdateFailed(f"error fetching state: {resp.status}")
|
||||
state = await resp.json()
|
||||
|
||||
return {"stats": stats, "state": state}
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
raise UpdateFailed(f"error communicating with connectd: {err}")
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"unexpected error: {err}")
|
||||
64
hacs/custom_components/connectd/config_flow.py
Normal file
64
hacs/custom_components/connectd/config_flow.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
"""config flow for connectd integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import aiohttp
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.data_entry_flow import FlowResult
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
DEFAULT_PORT = 8099
|
||||
|
||||
|
||||
class ConnectdConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""handle a config flow for connectd."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict | None = None
|
||||
) -> FlowResult:
|
||||
"""handle the initial step."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
host = user_input[CONF_HOST]
|
||||
port = user_input.get(CONF_PORT, DEFAULT_PORT)
|
||||
|
||||
# test connection
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
url = f"http://{host}:{port}/api/health"
|
||||
async with session.get(url, timeout=5) as resp:
|
||||
if resp.status == 200:
|
||||
# connection works
|
||||
await self.async_set_unique_id(f"{host}:{port}")
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"connectd ({host})",
|
||||
data={
|
||||
"host": host,
|
||||
"port": port,
|
||||
},
|
||||
)
|
||||
else:
|
||||
errors["base"] = "cannot_connect"
|
||||
except aiohttp.ClientError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
errors["base"] = "unknown"
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default="192.168.1.8"): str,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
11
hacs/custom_components/connectd/manifest.json
Normal file
11
hacs/custom_components/connectd/manifest.json
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"domain": "connectd",
|
||||
"name": "connectd",
|
||||
"codeowners": ["@sudoxnym"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://github.com/sudoxnym/connectd",
|
||||
"iot_class": "local_polling",
|
||||
"issue_tracker": "https://github.com/sudoxnym/connectd/issues",
|
||||
"requirements": [],
|
||||
"version": "1.0.0"
|
||||
}
|
||||
162
hacs/custom_components/connectd/sensor.py
Normal file
162
hacs/custom_components/connectd/sensor.py
Normal file
|
|
@ -0,0 +1,162 @@
|
|||
"""sensor platform for connectd."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import DOMAIN, ConnectdDataUpdateCoordinator
|
||||
|
||||
SENSORS = [
|
||||
# stats sensors
|
||||
("total_humans", "total humans", "mdi:account-group", "stats"),
|
||||
("high_score_humans", "high score humans", "mdi:account-star", "stats"),
|
||||
("total_matches", "total matches", "mdi:handshake", "stats"),
|
||||
("total_intros", "total intros", "mdi:email-outline", "stats"),
|
||||
("sent_intros", "sent intros", "mdi:email-check", "stats"),
|
||||
("active_builders", "active builders", "mdi:hammer-wrench", "stats"),
|
||||
("lost_builders", "lost builders", "mdi:account-question", "stats"),
|
||||
("recovering_builders", "recovering builders", "mdi:account-heart", "stats"),
|
||||
("lost_outreach_sent", "lost outreach sent", "mdi:heart-pulse", "stats"),
|
||||
|
||||
# state sensors
|
||||
("intros_today", "intros today", "mdi:email-fast", "state"),
|
||||
("lost_intros_today", "lost intros today", "mdi:heart-outline", "state"),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""set up connectd sensors."""
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id]
|
||||
|
||||
entities = []
|
||||
for sensor_key, name, icon, data_source in SENSORS:
|
||||
entities.append(
|
||||
ConnectdSensor(coordinator, sensor_key, name, icon, data_source)
|
||||
)
|
||||
|
||||
# add status sensor
|
||||
entities.append(ConnectdStatusSensor(coordinator))
|
||||
|
||||
# add platform sensors (by_platform dict)
|
||||
entities.append(ConnectdPlatformSensor(coordinator, "github"))
|
||||
entities.append(ConnectdPlatformSensor(coordinator, "mastodon"))
|
||||
entities.append(ConnectdPlatformSensor(coordinator, "reddit"))
|
||||
entities.append(ConnectdPlatformSensor(coordinator, "lemmy"))
|
||||
entities.append(ConnectdPlatformSensor(coordinator, "discord"))
|
||||
entities.append(ConnectdPlatformSensor(coordinator, "lobsters"))
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class ConnectdSensor(CoordinatorEntity, SensorEntity):
|
||||
"""connectd sensor entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ConnectdDataUpdateCoordinator,
|
||||
sensor_key: str,
|
||||
name: str,
|
||||
icon: str,
|
||||
data_source: str,
|
||||
) -> None:
|
||||
"""initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._sensor_key = sensor_key
|
||||
self._attr_name = f"connectd {name}"
|
||||
self._attr_unique_id = f"connectd_{sensor_key}"
|
||||
self._attr_icon = icon
|
||||
self._data_source = data_source
|
||||
self._attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""return the state."""
|
||||
if self.coordinator.data:
|
||||
data = self.coordinator.data.get(self._data_source, {})
|
||||
return data.get(self._sensor_key, 0)
|
||||
return None
|
||||
|
||||
|
||||
class ConnectdStatusSensor(CoordinatorEntity, SensorEntity):
|
||||
"""connectd daemon status sensor."""
|
||||
|
||||
def __init__(self, coordinator: ConnectdDataUpdateCoordinator) -> None:
|
||||
"""initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_name = "connectd status"
|
||||
self._attr_unique_id = "connectd_status"
|
||||
self._attr_icon = "mdi:connection"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""return the state."""
|
||||
if self.coordinator.data:
|
||||
state = self.coordinator.data.get("state", {})
|
||||
if state.get("running"):
|
||||
return "running" if not state.get("dry_run") else "dry_run"
|
||||
return "stopped"
|
||||
return "unavailable"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""return extra attributes."""
|
||||
if self.coordinator.data:
|
||||
state = self.coordinator.data.get("state", {})
|
||||
return {
|
||||
"last_scout": state.get("last_scout"),
|
||||
"last_match": state.get("last_match"),
|
||||
"last_intro": state.get("last_intro"),
|
||||
"last_lost": state.get("last_lost"),
|
||||
"started_at": state.get("started_at"),
|
||||
}
|
||||
return {}
|
||||
|
||||
|
||||
class ConnectdPlatformSensor(CoordinatorEntity, SensorEntity):
|
||||
"""connectd per-platform sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ConnectdDataUpdateCoordinator,
|
||||
platform: str,
|
||||
) -> None:
|
||||
"""initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._platform = platform
|
||||
self._attr_name = f"connectd {platform} humans"
|
||||
self._attr_unique_id = f"connectd_platform_{platform}"
|
||||
self._attr_icon = self._get_platform_icon(platform)
|
||||
self._attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def _get_platform_icon(self, platform: str) -> str:
|
||||
"""get icon for platform."""
|
||||
icons = {
|
||||
"github": "mdi:github",
|
||||
"mastodon": "mdi:mastodon",
|
||||
"reddit": "mdi:reddit",
|
||||
"lemmy": "mdi:alpha-l-circle",
|
||||
"discord": "mdi:discord",
|
||||
"lobsters": "mdi:web",
|
||||
"bluesky": "mdi:cloud",
|
||||
"matrix": "mdi:matrix",
|
||||
}
|
||||
return icons.get(platform, "mdi:web")
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""return the state."""
|
||||
if self.coordinator.data:
|
||||
stats = self.coordinator.data.get("stats", {})
|
||||
by_platform = stats.get("by_platform", {})
|
||||
return by_platform.get(self._platform, 0)
|
||||
return 0
|
||||
18
hacs/custom_components/connectd/strings.json
Normal file
18
hacs/custom_components/connectd/strings.json
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "connectd daemon",
|
||||
"description": "connect to your connectd daemon for monitoring.",
|
||||
"data": {
|
||||
"host": "host",
|
||||
"port": "port"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "failed to connect to connectd api",
|
||||
"unknown": "unexpected error"
|
||||
}
|
||||
}
|
||||
}
|
||||
6
hacs/hacs.json
Normal file
6
hacs/hacs.json
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"name": "connectd",
|
||||
"render_readme": true,
|
||||
"domains": ["sensor"],
|
||||
"homeassistant": "2023.1.0"
|
||||
}
|
||||
Loading…
Reference in a new issue