diff --git a/.devcontainer.json b/.devcontainer.json
new file mode 100644
index 0000000..47dd79b
--- /dev/null
+++ b/.devcontainer.json
@@ -0,0 +1,55 @@
+{
+ "name": "Sinapsi Alfa Modbus integration development",
+ "image": "mcr.microsoft.com/devcontainers/python:3.11-bullseye",
+ "postCreateCommand": "scripts/setup",
+ "forwardPorts": [
+ 8123
+ ],
+ "portsAttributes": {
+ "8123": {
+ "label": "Home Assistant",
+ "onAutoForward": "notify"
+ }
+ },
+ "customizations": {
+ "vscode": {
+ "extensions": [
+ "ms-python.python",
+ "github.vscode-pull-request-github",
+ "ryanluker.vscode-coverage-gutters",
+ "ms-python.vscode-pylance",
+ "charliermarsh.ruff",
+ "ms-python.black-formatter",
+ "esbenp.prettier-vscode"
+ ],
+ "settings": {
+ "files.eol": "\n",
+ "files.trimTrailingWhitespace": true,
+ "editor.tabSize": 4,
+ "editor.formatOnPaste": false,
+ "editor.formatOnSave": true,
+ "editor.formatOnType": true,
+ "editor.defaultFormatter": "esbenp.prettier-vscode",
+ // Ruff config
+ "notebook.formatOnSave.enabled": false,
+ "notebook.codeActionsOnSave": {
+ "notebook.source.fixAll": false,
+ "notebook.source.organizeImports": false
+ },
+ "[python]": {
+ "editor.formatOnSave": true,
+ "editor.codeActionsOnSave": {
+ "source.fixAll": true,
+ "source.organizeImports": true
+ },
+ "editor.defaultFormatter": "charliermarsh.ruff"
+ }
+ // End Ruff config
+ }
+ }
+ },
+ "remoteUser": "vscode",
+ "features": {
+ "ghcr.io/devcontainers/features/rust:1": {}
+ }
+}
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..94f480d
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+* text=auto eol=lf
\ No newline at end of file
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 0000000..7aab465
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,2 @@
+# Want to support the project? Donate here!
+custom: https://www.buymeacoffee.com/alexdelprete
diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml
new file mode 100644
index 0000000..92fe7a5
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug.yml
@@ -0,0 +1,55 @@
+---
+name: "Bug report"
+description: "Report a bug with the integration"
+labels: "Bug"
+body:
+- type: markdown
+ attributes:
+ value: Before you open a new issue, search through the existing issues to see if others have had the same problem.
+- type: textarea
+ attributes:
+ label: "System Health details"
+ description: "Paste the data from the System Health card in Home Assistant (https://www.home-assistant.io/more-info/system-health#github-issues)"
+ validations:
+ required: true
+- type: checkboxes
+ attributes:
+ label: Checklist
+ options:
+ - label: I have enabled debug logging for my installation.
+ required: true
+ - label: I have filled out the issue template to the best of my ability.
+ required: true
+ - label: This issue only contains 1 issue (if you have multiple issues, open one issue for each issue).
+ required: true
+ - label: This issue is not a duplicate issue of any [previous issues](https://github.com/ludeeus/integration_blueprint/issues?q=is%3Aissue+label%3A%22Bug%22+)..
+ required: true
+- type: textarea
+ attributes:
+ label: "Describe the issue"
+ description: "A clear and concise description of what the issue is."
+ validations:
+ required: true
+- type: textarea
+ attributes:
+ label: Reproduction steps
+ description: "Without steps to reproduce, it will be hard to fix. It is very important that you fill out this part. Issues without it will be closed."
+ value: |
+ 1.
+ 2.
+ 3.
+ ...
+ validations:
+ required: true
+- type: textarea
+ attributes:
+ label: "Debug logs"
+ description: "To enable debug logs check this https://www.home-assistant.io/integrations/logger/, this **needs** to include _everything_ from startup of Home Assistant to the point where you encounter the issue."
+ render: text
+ validations:
+ required: true
+
+- type: textarea
+ attributes:
+ label: "Diagnostics dump"
+ description: "Drag the diagnostics dump file here. (see https://www.home-assistant.io/integrations/diagnostics/ for info)"
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000..b0b5ee8
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,11 @@
+blank_issues_enabled: false
+contact_links:
+ - name: I need support
+ url: https://github.com/nathanmarlor/alexdelprete/ha-abb-powerone-pvi-sunspec/discussions/new?category=support
+ about: Questions? Problems? Get help here
+ - name: Feature request
+ url: https://github.com/nathanmarlor/alexdelprete/ha-abb-powerone-pvi-sunspec/discussions/new?category=ideas
+ about: Share ideas for new features
+ - name: Anything else
+ url: https://github.com/nathanmarlor/alexdelprete/ha-abb-powerone-pvi-sunspec/discussions/new?category=general
+ about: If it's something else or you're not sure, open a discussion
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 0000000..433467b
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,47 @@
+---
+name: "Feature request"
+description: "Suggest an idea for this project"
+labels: "Feature+Request"
+body:
+- type: markdown
+ attributes:
+ value: Before you open a new feature request, search through the existing feature requests to see if others have had the same idea.
+- type: checkboxes
+ attributes:
+ label: Checklist
+ options:
+ - label: I have filled out the template to the best of my ability.
+ required: true
+ - label: This only contains 1 feature request (if you have multiple feature requests, open one feature request for each feature request).
+ required: true
+ - label: This issue is not a duplicate feature request of [previous feature requests](https://github.com/ludeeus/integration_blueprint/issues?q=is%3Aissue+label%3A%22Feature+Request%22+).
+ required: true
+
+- type: textarea
+ attributes:
+ label: "Is your feature request related to a problem? Please describe."
+ description: "A clear and concise description of what the problem is."
+ placeholder: "I'm always frustrated when [...]"
+ validations:
+ required: true
+
+- type: textarea
+ attributes:
+ label: "Describe the solution you'd like"
+ description: "A clear and concise description of what you want to happen."
+ validations:
+ required: true
+
+- type: textarea
+ attributes:
+ label: "Describe alternatives you've considered"
+ description: "A clear and concise description of any alternative solutions or features you've considered."
+ validations:
+ required: true
+
+- type: textarea
+ attributes:
+ label: "Additional context"
+ description: "Add any other context or screenshots about the feature request here."
+ validations:
+ required: true
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..28adb41
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,15 @@
+# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: "daily"
+
+ - package-ecosystem: "pip"
+ directory: "/"
+ schedule:
+ interval: "daily"
+ ignore:
+ # Dependabot should not update Home Assistant as that should match the homeassistant key in hacs.json
+ - dependency-name: "homeassistant"
\ No newline at end of file
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 0000000..4799b87
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,45 @@
+name: "Lint"
+
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+
+jobs:
+ ruff:
+ name: "Ruff"
+ runs-on: "ubuntu-latest"
+
+ # see: https://github.com/stefanzweifel/git-auto-commit-action#usage
+ permissions:
+ # Give the default GITHUB_TOKEN write permission to commit and push the
+ # added or changed files to the repository.
+ contents: write
+
+ steps:
+ - name: "Checkout the repository"
+ uses: "actions/checkout@v4.1.1"
+ with:
+ token: ${{ secrets.WORKFLOW_PAT || github.token }}
+ ref: ${{ github.head_ref }}
+
+ - name: "Set up Python"
+ uses: actions/setup-python@v5.0.0
+ with:
+ python-version: "3.11"
+ cache: "pip"
+
+ - name: "Install requirements"
+ run: python3 -m pip install -r requirements.txt
+
+ - name: "Format"
+ run: python3 -m ruff format .
+
+ - name: "Check"
+ run: python3 -m ruff check .
+
+ - name: "Auto Commit"
+ uses: stefanzweifel/git-auto-commit-action@v5.0.0
+ with:
+ commit_message: 'Style fixes by ruff'
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..1267ccc
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,37 @@
+name: "Release"
+
+on:
+ release:
+ types:
+ - "published"
+
+permissions: {}
+
+jobs:
+ release:
+ name: "Release"
+ runs-on: "ubuntu-latest"
+ permissions:
+ contents: write
+ steps:
+ - name: "Checkout the repository"
+ uses: "actions/checkout@v4.1.1"
+ with:
+ token: ${{ secrets.WORKFLOW_PAT || github.token }}
+
+ - name: "Adjust version number"
+ shell: "bash"
+ run: |
+ yq -i -o json '.version="${{ github.event.release.tag_name }}"' \
+ "${{ github.workspace }}/custom_components/abb_powerone_pvi_sunspec/manifest.json"
+
+ - name: "ZIP the integration directory"
+ shell: "bash"
+ run: |
+ cd "${{ github.workspace }}/custom_components/abb_powerone_pvi_sunspec"
+ zip abb_powerone_pvi_sunspec.zip -r ./
+
+ - name: "Upload the ZIP file to the release"
+ uses: softprops/action-gh-release@v0.1.15
+ with:
+ files: ${{ github.workspace }}/custom_components/abb_powerone_pvi_sunspec/abb_powerone_pvi_sunspec.zip
diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml
new file mode 100644
index 0000000..42f6aa4
--- /dev/null
+++ b/.github/workflows/validate.yml
@@ -0,0 +1,39 @@
+name: "Validate"
+
+on:
+ workflow_dispatch:
+ push:
+ pull_request:
+ schedule:
+ - cron: "0 0 * * *"
+
+jobs:
+ hassfest: # https://developers.home-assistant.io/blog/2020/04/16/hassfest
+ name: "Hassfest Validation"
+ runs-on: "ubuntu-latest"
+ steps:
+ - name: "Checkout the repository"
+ uses: "actions/checkout@v4.1.1"
+ with:
+ token: ${{ secrets.WORKFLOW_PAT || github.token }}
+ ref: ${{ github.head_ref }}
+
+ - name: "Run hassfest validation"
+ uses: "home-assistant/actions/hassfest@master"
+
+ hacs: # https://github.com/hacs/action
+ name: "HACS Validation"
+ runs-on: "ubuntu-latest"
+ steps:
+ - name: "Checkout the repository"
+ uses: "actions/checkout@v4.1.1"
+ with:
+ token: ${{ secrets.WORKFLOW_PAT || github.token }}
+ ref: ${{ github.head_ref }}
+
+ - name: "Run HACS validation"
+ uses: "hacs/action@main"
+ with:
+ category: "integration"
+ # Remove this 'ignore' key when you have added brand images for your integration to https://github.com/home-assistant/brands
+ #ignore: "brands"
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..94fa16c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,17 @@
+# artifacts
+__pycache__
+.pytest*
+*.egg-info
+*/build/*
+*/dist/*
+
+
+# misc
+.coverage
+.vscode
+coverage.xml
+
+
+# Home Assistant configuration
+config/*
+!config/configuration.yaml
\ No newline at end of file
diff --git a/.ruff.toml b/.ruff.toml
new file mode 100644
index 0000000..52bd904
--- /dev/null
+++ b/.ruff.toml
@@ -0,0 +1,67 @@
+# The contents of this file is based on https://github.com/home-assistant/core/blob/dev/pyproject.toml
+
+target-version = "py311"
+
+select = [
+ "B007", # Loop control variable {name} not used within loop body
+ "B014", # Exception handler with duplicate exception
+ "C", # complexity
+ "D", # docstrings
+ "E", # pycodestyle
+ "F", # pyflakes/autoflake
+ "ICN001", # import concentions; {name} should be imported as {asname}
+ "PGH004", # Use specific rule codes when using noqa
+ "PLC0414", # Useless import alias. Import alias does not rename original package.
+ "SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
+ "SIM117", # Merge with-statements that use the same scope
+ "SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
+ "SIM201", # Use {left} != {right} instead of not {left} == {right}
+ "SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
+ "SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
+ "SIM401", # Use get from dict with default instead of an if block
+ "T20", # flake8-print
+ "TRY004", # Prefer TypeError exception for invalid type
+ "RUF006", # Store a reference to the return value of asyncio.create_task
+ "UP", # pyupgrade
+ "W", # pycodestyle
+]
+
+ignore = [
+ "D202", # No blank lines allowed after function docstring
+ "D203", # 1 blank line required before class docstring
+ "D213", # Multi-line docstring summary should start at the second line
+ "D404", # First word of the docstring should not be This
+ "D406", # Section name should end with a newline
+ "D407", # Section name underlining
+ "D411", # Missing blank line before section
+ "E501", # line too long
+ "E731", # do not assign a lambda expression, use a def
+
+ # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
+ "W191",
+ "E111",
+ "E114",
+ "E117",
+ "D206",
+ "D300",
+ "Q000",
+ "Q001",
+ "Q002",
+ "Q003",
+ "COM812",
+ "COM819",
+ "ISC001",
+ "ISC002",
+
+ # Disabled because ruff does not understand type of __all__ generated by a function
+ "PLE0605",
+]
+
+[flake8-pytest-style]
+fixture-parentheses = false
+
+[pyupgrade]
+keep-runtime-typing = true
+
+[mccabe]
+max-complexity = 25
\ No newline at end of file
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..3f09cee
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,11 @@
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "Run Home Assistant on port 8123",
+ "type": "shell",
+ "command": "scripts/develop",
+ "problemMatcher": []
+ },
+ ]
+}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..30ff413
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 - 2024 Alessandro Del Prete @alexdelprete
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..36ec975
--- /dev/null
+++ b/README.md
@@ -0,0 +1,84 @@
+# HA Custom Component for Sinapsi Alfa energy monitoring device
+
+[![GitHub Release][releases-shield]][releases]
+[![BuyMeCoffee][buymecoffee-shield]][buymecoffee]
+[![Community Forum][forum-shield]][forum]
+
+_This project is not endorsed by, directly affiliated with, maintained, authorized, or sponsored by ABB or FIMER_
+
+# Introduction
+
+HA Custom Component to integrate data from ABB/Power-One/FIMER PV mono-phase and three-phase inverters that support SunSpec Modbus Models M1/M103/M160, natively or through the VSN300/VSN700 wifi logger card. The VSN300/VSN700 cards provide a SunSpec to Aurora protocol adapter so that all modbus commands are translated to the proprietary Aurora protocol.
+
+The component has been originally developed by @binsentsu for SolarEdge inverters, I adapted it, adding some features, rewriting all the registers' mapping, for my Power-One Aurora PVI-10.0-OUTD 3-phase inverter to which I added a VSN300 card. It has also been tested with an ABB TRIO-8.5-TL-OUTD-S through a VSN300 and REACT2-3.6-TL through a VSN700 datalogger.
+
+Register address map has been implemented following the vendor's specification documentation, available in the [doc](https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec/tree/master/doc) folder.
+
+
+### Features
+
+- Installation/Configuration through Config Flow UI
+- Separate sensor per register
+- Configurable TCP modbus port, also at runtime (no restart needed)
+- Configurable modbus slave address, also at runtime (no restart needed)
+- Configurable register map base address, also at runtime (no restart needed)
+- Configurable polling interval, also at runtime (no restart needed)
+- Supports SunSpec models M1, M103, M160
+
+# Installation through HACS
+
+This integration is available in [HACS][hacs] official repository. Click this button to open HA directly on the integration page so you can easily install it:
+
+[![Quick installation link](https://my.home-assistant.io/badges/hacs_repository.svg)][my-hacs]
+
+1. Either click the button above, or navigate to HACS in Home Assistant and:
+ - 'Explore & Download Repositories'
+ - Search for 'ABB Power-One PVI SunSpec'
+ - Download
+2. Restart Home Assistant
+3. Go to Settings > Devices and Services > Add Integration
+4. Search for and select 'ABB Power-One PVI SunSpec' (if the integration is not found, do a hard-refresh (ctrl+F5) in the browser)
+5. Proceed with the configuration
+
+# Manual Installation
+
+Download the source code archive from the release page. Unpack the archive and copy the contents of custom_components folder to your home-assistant config/custom_components folder. Restart Home Assistant, and then the integration can be added and configured through the native integration setup UI. If you don't see it in the native integrations list, press ctrl-F5 to refresh the browser while you're on that page and retry.
+
+# Enabling Modbus TCP on the inverter
+
+Enable Modbus TCP client on the VSN300, take note of the Unit ID (aka Slave ID) of the inverter (depends on the model, default on some models is 2 on others is 247) and during the configuration of the component, use the appropriate Slave address. Another important parameter is the registers map base address, default is 40000 but it may vary. All these parameters can be reconfigured after installation, clicking CONFIGURE on the integration.
+
+# Configuration
+
+Configuration is done via config flow right after adding the integration. After the first configuration you can change parameters (except custom name and ip/hostname) at runtime through the integration page configuration, without the need to restart HA (this works since v2.5.0).
+
+![](https://user-images.githubusercontent.com/7027842/214734702-bf899013-5e28-47b5-87a7-827e49ca465b.gif)
+
+- **custom name**: custom name for the inverter, that will be used as prefix for sensors created by the component
+- **ip/hostname**: IP/hostname of the inverter - this is used as unique_id, if you change it and reinstall you will lose historical data, that's why I advice to use hostname, so you can change IP without losing historical data
+- **tcp port**: TCP port of the datalogger
+- **slave id**: the unit id of the inverter in the chain: default is 254, if using VS300/VS700 it's usually 2
+- **register map base address**: the base address from where the register map starts, usually it's 40000, but for ABB VSN300/VSN700 dataloggers it's 0
+- **polling period**: frequency, in seconds, to read the registers and update the sensors
+
+
+
+# Sensor screenshot
+
+
+# Coffee
+
+_If you like this integration, I'll gladly accept some quality coffee, but please don't feel obliged._ :)
+
+[![BuyMeCoffee][buymecoffee-shield]][buymecoffee]
+
+---
+
+[buymecoffee]: https://www.buymeacoffee.com/alexdelprete
+[buymecoffee-shield]: https://img.shields.io/badge/buy%20me%20a%20coffee-donate-white?style=for-the-badge
+[hacs]: https://hacs.xyz
+[my-hacs]: https://my.home-assistant.io/redirect/hacs_repository/?owner=alexdelprete&repository=ha-abb-powerone-pvi-sunspec&category=integration
+[forum-shield]: https://img.shields.io/badge/community-forum-darkred?style=for-the-badge
+[forum]: https://community.home-assistant.io/t/custom-component-abb-power-one-fimer-pv-inverters-sunspec-modbus-tcp/316363?u=alexdelprete
+[releases-shield]: https://img.shields.io/github/v/release/alexdelprete/ha-abb-powerone-pvi-sunspec?style=for-the-badge&color=darkgreen
+[releases]: https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec/releases
diff --git a/config/configuration.yaml b/config/configuration.yaml
new file mode 100644
index 0000000..f27fc5e
--- /dev/null
+++ b/config/configuration.yaml
@@ -0,0 +1,10 @@
+# https://www.home-assistant.io/integrations/default_config/
+default_config:
+
+# https://www.home-assistant.io/integrations/logger/
+logger:
+ default: info
+ logs:
+ custom_components.sinapsi_alfa: debug
+# If you need to debug uncommment the line below (doc: https://www.home-assistant.io/integrations/debugpy/)
+debugpy:
diff --git a/custom_components/sinapsi_alfa/__init__.py b/custom_components/sinapsi_alfa/__init__.py
new file mode 100644
index 0000000..9c4dff3
--- /dev/null
+++ b/custom_components/sinapsi_alfa/__init__.py
@@ -0,0 +1,161 @@
+"""ABB Power-One PVI SunSpec Integration.
+
+https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec
+"""
+
+import asyncio
+import logging
+
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.core import HomeAssistant
+from homeassistant.exceptions import ConfigEntryNotReady
+from homeassistant.helpers import device_registry as dr
+
+from .const import (
+ CONF_HOST,
+ CONF_NAME,
+ DATA,
+ DOMAIN,
+ PLATFORMS,
+ STARTUP_MESSAGE,
+ UPDATE_LISTENER,
+)
+from .coordinator import ABBPowerOneFimerCoordinator
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def get_instance_count(hass: HomeAssistant) -> int:
+ """Return number of instances."""
+ entries = [
+ entry
+ for entry in hass.config_entries.async_entries(DOMAIN)
+ if not entry.disabled_by
+ ]
+ return len(entries)
+
+
+async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
+ """Set up this integration using UI."""
+ if hass.data.get(DOMAIN) is None:
+ hass.data.setdefault(DOMAIN, {})
+ _LOGGER.info(STARTUP_MESSAGE)
+
+ _LOGGER.debug(f"Setup config_entry for {DOMAIN}")
+ coordinator = ABBPowerOneFimerCoordinator(hass, config_entry)
+ # If the refresh fails, async_config_entry_first_refresh() will
+ # raise ConfigEntryNotReady and setup will try again later
+ # ref.: https://developers.home-assistant.io/docs/integration_setup_failures
+ await coordinator.async_config_entry_first_refresh()
+ if not coordinator.api.data["comm_sernum"]:
+ raise ConfigEntryNotReady(
+ f"Timeout connecting to {config_entry.data.get(CONF_NAME)}"
+ )
+
+ # Update listener for config option changes
+ update_listener = config_entry.add_update_listener(_async_update_listener)
+
+ # Add coordinator and update_listener to config_entry
+ hass.data[DOMAIN][config_entry.entry_id] = {
+ DATA: coordinator,
+ UPDATE_LISTENER: update_listener,
+ }
+
+ # Setup platforms
+ for platform in PLATFORMS:
+ hass.async_add_job(
+ hass.config_entries.async_forward_entry_setup(config_entry, platform)
+ )
+
+ # Regiser device
+ await async_update_device_registry(hass, config_entry)
+
+ return True
+
+
+async def async_update_device_registry(hass: HomeAssistant, config_entry):
+ """Manual device registration."""
+ coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA]
+ device_registry = dr.async_get(hass)
+ device_registry.async_get_or_create(
+ config_entry_id=config_entry.entry_id,
+ hw_version=None,
+ configuration_url=f"http://{config_entry.data.get(CONF_HOST)}",
+ identifiers={(DOMAIN, coordinator.api.data["comm_sernum"])},
+ manufacturer=coordinator.api.data["comm_manufact"],
+ model=coordinator.api.data["comm_model"],
+ name=config_entry.data.get(CONF_NAME),
+ serial_number=coordinator.api.data["comm_sernum"],
+ sw_version=coordinator.api.data["comm_version"],
+ via_device=None,
+ )
+
+
+async def _async_update_listener(hass: HomeAssistant, config_entry):
+ """Handle options update."""
+ await hass.config_entries.async_reload(config_entry.entry_id)
+
+
+async def async_remove_config_entry_device(
+ hass: HomeAssistant, config_entry, device_entry
+) -> bool:
+ """Delete device if not entities."""
+ if DOMAIN in device_entry.identifiers:
+ _LOGGER.error(
+ "You cannot delete the device using device delete. Remove the integration instead."
+ )
+ return False
+ return True
+
+
+async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
+ """Handle removal of config_entry."""
+ _LOGGER.debug("Unload config_entry")
+ # Check if there are other instances
+ if get_instance_count(hass) == 0:
+ _LOGGER.debug("Unload config_entry: no more entries found")
+
+ _LOGGER.debug("Unload integration platforms")
+ # Unload a config entry
+ unloaded = all(
+ await asyncio.gather(
+ *[
+ hass.config_entries.async_forward_entry_unload(config_entry, platform)
+ for platform in PLATFORMS
+ ]
+ )
+ )
+
+ _LOGGER.debug("Detach config update listener")
+ hass.data[DOMAIN][config_entry.entry_id][UPDATE_LISTENER]()
+
+ if unloaded:
+ _LOGGER.debug("Unload integration")
+ hass.data[DOMAIN].pop(config_entry.entry_id)
+ return True # unloaded
+ else:
+ _LOGGER.debug("Unload config_entry failed: integration not unloaded")
+ return False # unload failed
+
+
+# Sample migration code in case it's needed
+# async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry):
+# """Migrate an old config_entry."""
+# version = config_entry.version
+
+# # 1-> 2: Migration format
+# if version == 1:
+# # Get handler to coordinator from config
+# coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA]
+# _LOGGER.debug("Migrating from version %s", version)
+# old_uid = config_entry.unique_id
+# new_uid = coordinator.api.data["comm_sernum"]
+# if old_uid != new_uid:
+# hass.config_entries.async_update_entry(
+# config_entry, unique_id=new_uid
+# )
+# _LOGGER.debug("Migration to version %s complete: OLD_UID: %s - NEW_UID: %s", config_entry.version, old_uid, new_uid)
+# if config_entry.unique_id == new_uid:
+# config_entry.version = 2
+# _LOGGER.debug("Migration to version %s complete: NEW_UID: %s", config_entry.version, config_entry.unique_id)
+# return True
diff --git a/custom_components/sinapsi_alfa/api.py b/custom_components/sinapsi_alfa/api.py
new file mode 100644
index 0000000..89690cc
--- /dev/null
+++ b/custom_components/sinapsi_alfa/api.py
@@ -0,0 +1,647 @@
+"""API Platform for ABB Power-One PVI SunSpec.
+
+https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec
+"""
+
+import logging
+import socket
+import threading
+
+from pymodbus.client import ModbusTcpClient
+from pymodbus.constants import Endian
+from pymodbus.exceptions import ConnectionException, ModbusException
+from pymodbus.payload import BinaryPayloadDecoder
+
+from .const import DEVICE_GLOBAL_STATUS, DEVICE_MODEL, DEVICE_STATUS, INVERTER_TYPE
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class ConnectionError(Exception):
+ """Empty Error Class."""
+
+ pass
+
+
+class ModbusError(Exception):
+ """Empty Error Class."""
+
+ pass
+
+
+class ABBPowerOneFimerAPI:
+ """Thread safe wrapper class for pymodbus."""
+
+ def __init__(
+ self,
+ hass,
+ name,
+ host,
+ port,
+ slave_id,
+ base_addr,
+ scan_interval,
+ ):
+ """Initialize the Modbus API Client."""
+ self._hass = hass
+ self._name = name
+ self._host = host
+ self._port = port
+ self._slave_id = slave_id
+ self._base_addr = base_addr
+ self._update_interval = scan_interval
+ # Ensure ModBus Timeout is 1s less than scan_interval
+ # https://github.com/binsentsu/home-assistant-solaredge-modbus/pull/183
+ self._timeout = self._update_interval - 1
+ self._client = ModbusTcpClient(
+ host=self._host, port=self._port, timeout=self._timeout
+ )
+ self._lock = threading.Lock()
+ self._sensors = []
+ self.data = {}
+ # Initialize ModBus data structure before first read
+ self.data["accurrent"] = 1
+ self.data["accurrenta"] = 1
+ self.data["accurrentb"] = 1
+ self.data["accurrentc"] = 1
+ self.data["acvoltageab"] = 1
+ self.data["acvoltagebc"] = 1
+ self.data["acvoltageca"] = 1
+ self.data["acvoltagean"] = 1
+ self.data["acvoltagebn"] = 1
+ self.data["acvoltagecn"] = 1
+ self.data["acpower"] = 1
+ self.data["acfreq"] = 1
+ self.data["comm_options"] = 1
+ self.data["comm_manufact"] = ""
+ self.data["comm_model"] = ""
+ self.data["comm_version"] = ""
+ self.data["comm_sernum"] = ""
+ self.data["mppt_nr"] = 1
+ self.data["dccurr"] = 1
+ self.data["dcvolt"] = 1
+ self.data["dcpower"] = 1
+ self.data["dc1curr"] = 1
+ self.data["dc1volt"] = 1
+ self.data["dc1power"] = 1
+ self.data["dc2curr"] = 1
+ self.data["dc2volt"] = 1
+ self.data["dc2power"] = 1
+ self.data["invtype"] = ""
+ self.data["status"] = ""
+ self.data["statusvendor"] = ""
+ self.data["totalenergy"] = 1
+ self.data["tempcab"] = 1
+ self.data["tempoth"] = 1
+
+ @property
+ def name(self):
+ """Return the device name."""
+ return self._name
+
+ @property
+ def host(self):
+ """Return the device name."""
+ return self._host
+
+ def check_port(self) -> bool:
+ """Check if port is available."""
+ with self._lock:
+ sock_timeout = float(3)
+ _LOGGER.debug(
+ f"Check_Port: opening socket on {self._host}:{self._port} with a {sock_timeout}s timeout."
+ )
+ socket.setdefaulttimeout(sock_timeout)
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock_res = sock.connect_ex((self._host, self._port))
+ is_open = sock_res == 0 # True if open, False if not
+ if is_open:
+ sock.shutdown(socket.SHUT_RDWR)
+ _LOGGER.debug(
+ f"Check_Port (SUCCESS): port open on {self._host}:{self._port}"
+ )
+ else:
+ _LOGGER.debug(
+ f"Check_Port (ERROR): port not available on {self._host}:{self._port} - error: {sock_res}"
+ )
+ sock.close()
+ return is_open
+
+ def close(self):
+ """Disconnect client."""
+ try:
+ if self._client.is_socket_open():
+ _LOGGER.debug("Closing Modbus TCP connection")
+ with self._lock:
+ self._client.close()
+ return True
+ else:
+ _LOGGER.debug("Modbus TCP connection already closed")
+ except ConnectionException as connect_error:
+ _LOGGER.debug(f"Close Connection connect_error: {connect_error}")
+ raise ConnectionError() from connect_error
+
+ def connect(self):
+ """Connect client."""
+ _LOGGER.debug(
+ f"API Client connect to IP: {self._host} port: {self._port} slave id: {self._slave_id} timeout: {self._timeout}"
+ )
+ if self.check_port():
+ _LOGGER.debug("Inverter ready for Modbus TCP connection")
+ try:
+ with self._lock:
+ self._client.connect()
+ if not self._client.connected:
+ raise ConnectionError(
+ f"Failed to connect to {self._host}:{self._port} slave id {self._slave_id} timeout: {self._timeout}"
+ )
+ else:
+ _LOGGER.debug("Modbus TCP Client connected")
+ return True
+ except ModbusException:
+ raise ConnectionError(
+ f"Failed to connect to {self._host}:{self._port} slave id {self._slave_id} timeout: {self._timeout}"
+ )
+ else:
+ _LOGGER.debug("Inverter not ready for Modbus TCP connection")
+ raise ConnectionError(f"Inverter not active on {self._host}:{self._port}")
+
+ def read_holding_registers(self, slave, address, count):
+ """Read holding registers."""
+ kwargs = {"slave": slave} if slave else {}
+ try:
+ with self._lock:
+ return self._client.read_holding_registers(address, count, **kwargs)
+ except ConnectionException as connect_error:
+ _LOGGER.debug(f"Read Holding Registers connect_error: {connect_error}")
+ raise ConnectionError() from connect_error
+ except ModbusException as modbus_error:
+ _LOGGER.debug(f"Read Holding Registers modbus_error: {modbus_error}")
+ raise ModbusError() from modbus_error
+
+ def calculate_value(self, value, scalefactor):
+ """Apply Scale Factor."""
+ return value * 10**scalefactor
+
+ async def async_get_data(self):
+ """Read Data Function."""
+
+ try:
+ if self.connect():
+ _LOGGER.debug(
+ "Start Get data (Slave ID: %s - Base Address: %s)",
+ self._slave_id,
+ self._base_addr,
+ )
+ # HA way to call a sync function from async function
+ # https://developers.home-assistant.io/docs/asyncio_working_with_async?#calling-sync-functions-from-async
+ result = await self._hass.async_add_executor_job(
+ self.read_sunspec_modbus
+ )
+ self.close()
+ _LOGGER.debug("End Get data")
+ if result:
+ _LOGGER.debug("Get Data Result: valid")
+ return True
+ else:
+ _LOGGER.debug("Get Data Result: invalid")
+ return False
+ else:
+ _LOGGER.debug("Get Data failed: client not connected")
+ return False
+ except ConnectionException as connect_error:
+ _LOGGER.debug(f"Async Get Data connect_error: {connect_error}")
+ raise ConnectionError() from connect_error
+ except ModbusException as modbus_error:
+ _LOGGER.debug(f"Async Get Data modbus_error: {modbus_error}")
+ raise ModbusError() from modbus_error
+
+ def read_sunspec_modbus(self):
+ """Read Modbus Data Function."""
+ try:
+ self.read_sunspec_modbus_model_1()
+ self.read_sunspec_modbus_model_101_103()
+ self.read_sunspec_modbus_model_160()
+ result = True
+ _LOGGER.debug(f"read_sunspec_modbus: success {result}")
+ except Exception as modbus_error:
+ _LOGGER.debug(f"read_sunspec_modbus: failed with error: {modbus_error}")
+ result = False
+ raise ModbusError() from modbus_error
+ return result
+
+ def read_sunspec_modbus_model_1(self):
+ """Read SunSpec Model 1 Data."""
+ # A single register is 2 bytes. Max number of registers in one read for Modbus/TCP is 123
+ # https://control.com/forums/threads/maximum-amount-of-holding-registers-per-request.9904/post-86251
+ #
+ # So we have to do 2 read-cycles, one for M1 and the other for M103+M160
+ #
+ # Start address 4 read 64 registers to read M1 (Common Inverter Info) in 1-pass
+ # Start address 72 read 92 registers to read (M101 or M103)+M160 (Realtime Power/Energy Data) in 1-pass
+ try:
+ read_model_1_data = self.read_holding_registers(
+ slave=self._slave_id, address=(self._base_addr + 4), count=64
+ )
+ _LOGGER.debug("(read_rt_1) Slave ID: %s", self._slave_id)
+ _LOGGER.debug("(read_rt_1) Base Address: %s", self._base_addr)
+ except ModbusException as modbus_error:
+ _LOGGER.debug(f"ReadM1 modbus_error: {modbus_error}")
+ raise ModbusError() from modbus_error
+
+ # No connection errors, we can start scraping registers
+ decoder = BinaryPayloadDecoder.fromRegisters(
+ read_model_1_data.registers, byteorder=Endian.BIG
+ )
+
+ # registers 4 to 43
+ comm_manufact = str.strip(decoder.decode_string(size=32).decode("ascii"))
+ comm_model = str.strip(decoder.decode_string(size=32).decode("ascii"))
+ comm_options = str.strip(decoder.decode_string(size=16).decode("ascii"))
+ self.data["comm_manufact"] = comm_manufact.rstrip(" \t\r\n\0\u0000")
+ self.data["comm_model"] = comm_model.rstrip(" \t\r\n\0\u0000")
+ self.data["comm_options"] = comm_options.rstrip(" \t\r\n\0\u0000")
+ _LOGGER.debug("(read_rt_1) Manufacturer: %s", self.data["comm_manufact"])
+ _LOGGER.debug("(read_rt_1) Model: %s", self.data["comm_model"])
+ _LOGGER.debug("(read_rt_1) Options: %s", self.data["comm_options"])
+
+ # Model based on options register, if unknown, raise an error to report it
+ # First char is the model: if non-printable char, hex string of the char is provided
+ # So we need to check if it's a char or an hex value string and convert both to a number
+ # Then we lookup in the model table, if it's there, good, otherwise we provide the given model
+ # test also with opt_model = '0x0DED/0xFFFF'
+ opt_model = self.data["comm_options"]
+ if opt_model.startswith("0x"):
+ opt_model_int = int(opt_model[0:4], 16)
+ _LOGGER.debug(
+ "(opt_notprintable) opt_model: %s - opt_model_int: %s",
+ opt_model,
+ opt_model_int,
+ )
+ else:
+ opt_model_int = ord(opt_model[0])
+ _LOGGER.debug(
+ "(opt_printable) opt_model: %s - opt_model_int: %s",
+ opt_model,
+ opt_model_int,
+ )
+ if opt_model_int in DEVICE_MODEL:
+ self.data["comm_model"] = DEVICE_MODEL[opt_model_int]
+ _LOGGER.debug("(opt_comm_model) comm_model: %s", self.data["comm_model"])
+ else:
+ _LOGGER.error(
+ "(opt_comm_model) Model unknown, report to @alexdelprete on the forum the following data: Manuf.: %s - Model: %s - Options: %s - OptModel: %s - OptModelInt: %s",
+ self.data["comm_manufact"],
+ self.data["comm_model"],
+ self.data["comm_options"],
+ opt_model,
+ opt_model_int,
+ )
+
+ # registers 44 to 67
+ comm_version = str.strip(decoder.decode_string(size=16).decode("ascii"))
+ comm_sernum = str.strip(decoder.decode_string(size=32).decode("ascii"))
+ self.data["comm_version"] = comm_version.rstrip(" \t\r\n\0\u0000")
+ self.data["comm_sernum"] = comm_sernum.rstrip(" \t\r\n\0\u0000")
+ _LOGGER.debug("(read_rt_1) Version: %s", self.data["comm_version"])
+ _LOGGER.debug("(read_rt_1) Sernum: %s", self.data["comm_sernum"])
+
+ return True
+
+ def read_sunspec_modbus_model_101_103(self):
+ """Read SunSpec Model 101/103 Data."""
+ # Max number of registers in one read for Modbus/TCP is 123
+ # (ref.: https://control.com/forums/threads/maximum-amount-of-holding-registers-per-request.9904/post-86251)
+ #
+ # So we could do 2 sweeps, one for M1 and the other for M103+M160. Since some old inverters have problems
+ # with large sweeps, we'll split it in 3 sweeps:
+ # - Sweep 1 (M1): Start address 4 read 64 registers to read M1 (Common Inverter Info)
+ # - Sweep 2 (M103): Start address 70 read 40 registers to read M103+M160 (Realtime Power/Energy Data)
+ # - Sweep 3 (M160): Start address 124 read 40 registers to read M1 (Common Inverter Info)
+ try:
+ read_model_101_103_data = self.read_holding_registers(
+ slave=self._slave_id, address=(self._base_addr + 70), count=40
+ )
+ _LOGGER.debug("(read_rt_101_103) Slave ID: %s", self._slave_id)
+ _LOGGER.debug("(read_rt_101_103) Base Address: %s", self._base_addr)
+ except ModbusException as modbus_error:
+ _LOGGER.debug(f"Read M101/M103 modbus_error: {modbus_error}")
+ raise ModbusError() from modbus_error
+
+ # No connection errors, we can start scraping registers
+ decoder = BinaryPayloadDecoder.fromRegisters(
+ read_model_101_103_data.registers, byteorder=Endian.BIG
+ )
+
+ # register 70
+ invtype = decoder.decode_16bit_uint()
+ _LOGGER.debug("(read_rt_101_103) Inverter Type (int): %s", invtype)
+ _LOGGER.debug(
+ "(read_rt_101_103) Inverter Type (str): %s", INVERTER_TYPE[invtype]
+ )
+ # make sure the value is in the known status list
+ if invtype not in INVERTER_TYPE:
+ invtype = 999
+ _LOGGER.debug("(read_rt_101_103) Inverter Type Unknown (int): %s", invtype)
+ _LOGGER.debug(
+ "(read_rt_101_103) Inverter Type Unknown (str): %s",
+ INVERTER_TYPE[invtype],
+ )
+ self.data["invtype"] = INVERTER_TYPE[invtype]
+
+ # skip register 71
+ decoder.skip_bytes(2)
+
+ # registers 72 to 76
+ accurrent = decoder.decode_16bit_uint()
+
+ if invtype == 103:
+ accurrenta = decoder.decode_16bit_uint()
+ accurrentb = decoder.decode_16bit_uint()
+ accurrentc = decoder.decode_16bit_uint()
+ else:
+ decoder.skip_bytes(6)
+
+ accurrentsf = decoder.decode_16bit_int()
+ accurrent = self.calculate_value(accurrent, accurrentsf)
+ self.data["accurrent"] = round(accurrent, abs(accurrentsf))
+
+ if invtype == 103:
+ accurrenta = self.calculate_value(accurrenta, accurrentsf)
+ accurrentb = self.calculate_value(accurrentb, accurrentsf)
+ accurrentc = self.calculate_value(accurrentc, accurrentsf)
+ self.data["accurrenta"] = round(accurrenta, abs(accurrentsf))
+ self.data["accurrentb"] = round(accurrentb, abs(accurrentsf))
+ self.data["accurrentc"] = round(accurrentc, abs(accurrentsf))
+
+ # registers 77 to 83
+ if invtype == 103:
+ acvoltageab = decoder.decode_16bit_uint()
+ acvoltagebc = decoder.decode_16bit_uint()
+ acvoltageca = decoder.decode_16bit_uint()
+ else:
+ decoder.skip_bytes(6)
+
+ acvoltagean = decoder.decode_16bit_uint()
+
+ if invtype == 103:
+ acvoltagebn = decoder.decode_16bit_uint()
+ acvoltagecn = decoder.decode_16bit_uint()
+ else:
+ decoder.skip_bytes(4)
+
+ acvoltagesf = decoder.decode_16bit_int()
+
+ acvoltagean = self.calculate_value(acvoltagean, acvoltagesf)
+ self.data["acvoltagean"] = round(acvoltagean, abs(acvoltagesf))
+
+ if invtype == 103:
+ acvoltageab = self.calculate_value(acvoltageab, acvoltagesf)
+ acvoltagebc = self.calculate_value(acvoltagebc, acvoltagesf)
+ acvoltageca = self.calculate_value(acvoltageca, acvoltagesf)
+ acvoltagebn = self.calculate_value(acvoltagebn, acvoltagesf)
+ acvoltagecn = self.calculate_value(acvoltagecn, acvoltagesf)
+ self.data["acvoltageab"] = round(acvoltageab, abs(acvoltagesf))
+ self.data["acvoltagebc"] = round(acvoltagebc, abs(acvoltagesf))
+ self.data["acvoltageca"] = round(acvoltageca, abs(acvoltagesf))
+ self.data["acvoltagebn"] = round(acvoltagebn, abs(acvoltagesf))
+ self.data["acvoltagecn"] = round(acvoltagecn, abs(acvoltagesf))
+
+ # registers 84 to 85
+ acpower = decoder.decode_16bit_int()
+ acpowersf = decoder.decode_16bit_int()
+ acpower = self.calculate_value(acpower, acpowersf)
+ self.data["acpower"] = round(acpower, abs(acpowersf))
+
+ # registers 86 to 87
+ acfreq = decoder.decode_16bit_uint()
+ acfreqsf = decoder.decode_16bit_int()
+ acfreq = self.calculate_value(acfreq, acfreqsf)
+ self.data["acfreq"] = round(acfreq, abs(acfreqsf))
+
+ # skip register 88-93
+ decoder.skip_bytes(12)
+
+ # registers 94 to 96
+ totalenergy = decoder.decode_32bit_uint()
+ totalenergysf = decoder.decode_16bit_uint()
+ totalenergy = self.calculate_value(totalenergy, totalenergysf)
+ # ensure that totalenergy is always an increasing value (total_increasing)
+ _LOGGER.debug("(read_rt_101_103) Total Energy Value Read: %s", totalenergy)
+ _LOGGER.debug(
+ "(read_rt_101_103) Total Energy Previous Value: %s",
+ self.data["totalenergy"],
+ )
+ if totalenergy < self.data["totalenergy"]:
+ _LOGGER.error(
+ "(read_rt_101_103) Total Energy less than previous value! Value Read: %s - Previous Value: %s",
+ totalenergy,
+ self.data["totalenergy"],
+ )
+ else:
+ self.data["totalenergy"] = totalenergy
+
+ # registers 97 to 100 (for monophase inverters)
+ if invtype == 101:
+ dccurr = decoder.decode_16bit_int()
+ dccurrsf = decoder.decode_16bit_int()
+ dcvolt = decoder.decode_16bit_int()
+ dcvoltsf = decoder.decode_16bit_int()
+ dccurr = self.calculate_value(dccurr, dccurrsf)
+ dcvolt = self.calculate_value(dcvolt, dcvoltsf)
+ self.data["dccurr"] = round(dccurr, abs(dccurrsf))
+ self.data["dcvolt"] = round(dcvolt, abs(dcvoltsf))
+ _LOGGER.debug(
+ "(read_rt_101_103) DC Current Value read: %s", self.data["dccurr"]
+ )
+ _LOGGER.debug(
+ "(read_rt_101_103) DC Voltage Value read: %s", self.data["dcvolt"]
+ )
+ else:
+ decoder.skip_bytes(8)
+
+ # registers 101 to 102
+ dcpower = decoder.decode_16bit_int()
+ dcpowersf = decoder.decode_16bit_int()
+ dcpower = self.calculate_value(dcpower, dcpowersf)
+ self.data["dcpower"] = round(dcpower, abs(dcpowersf))
+ _LOGGER.debug("(read_rt_101_103) DC Power Value read: %s", self.data["dcpower"])
+ # register 103
+ tempcab = decoder.decode_16bit_int()
+ # skip registers 104-105
+ decoder.skip_bytes(4)
+ # register 106 to 107
+ tempoth = decoder.decode_16bit_int()
+ tempsf = decoder.decode_16bit_int()
+ # Fix for tempcab: in some inverters SF must be -2 not -1 as per specs
+ tempcab_fix = tempcab
+ tempcab = self.calculate_value(tempcab, tempsf)
+ if tempcab > 50:
+ tempcab = self.calculate_value(tempcab_fix, -2)
+ tempoth = self.calculate_value(tempoth, tempsf)
+ self.data["tempoth"] = round(tempoth, abs(tempsf))
+ self.data["tempcab"] = round(tempcab, abs(tempsf))
+ _LOGGER.debug("(read_rt_101_103) Temp Oth Value read: %s", self.data["tempoth"])
+ _LOGGER.debug("(read_rt_101_103) Temp Cab Value read: %s", self.data["tempcab"])
+ # register 108
+ status = decoder.decode_16bit_int()
+ # make sure the value is in the known status list
+ if status not in DEVICE_STATUS:
+ _LOGGER.debug("Unknown Operating State: %s", status)
+ status = 999
+ self.data["status"] = DEVICE_STATUS[status]
+ _LOGGER.debug(
+ "(read_rt_101_103) Device Status Value read: %s", self.data["status"]
+ )
+
+ # register 109
+ statusvendor = decoder.decode_16bit_int()
+ # make sure the value is in the known status list
+ if statusvendor not in DEVICE_GLOBAL_STATUS:
+ _LOGGER.debug(
+ "(read_rt_101_103) Unknown Vendor Operating State: %s", statusvendor
+ )
+ statusvendor = 999
+ self.data["statusvendor"] = DEVICE_GLOBAL_STATUS[statusvendor]
+ _LOGGER.debug(
+ "(read_rt_101_103) Status Vendor Value read: %s", self.data["statusvendor"]
+ )
+ _LOGGER.debug("(read_rt_101_103) Completed")
+ return True
+
+ def read_sunspec_modbus_model_160(self):
+ """Read SunSpec Model 160 Data."""
+ # Max number of registers in one read for Modbus/TCP is 123
+ # https://control.com/forums/threads/maximum-amount-of-holding-registers-per-request.9904/post-86251
+ #
+ # So we have to do 2 read-cycles, one for M1 and the other for M103+M160
+ #
+ # Start address 4 read 64 registers to read M1 (Common Inverter Info) in 1-pass
+ # Start address 70 read 94 registers to read M103+M160 (Realtime Power/Energy Data) in 1-pass
+ try:
+ read_model_160_data = self.read_holding_registers(
+ slave=self._slave_id, address=(self._base_addr + 122), count=42
+ )
+ _LOGGER.debug("(read_rt_160) Slave ID: %s", self._slave_id)
+ _LOGGER.debug("(read_rt_160) Base Address: %s", self._base_addr)
+ except ModbusException as modbus_error:
+ _LOGGER.debug(f"Read M160 modbus_error: {modbus_error}")
+ raise ModbusError() from modbus_error
+
+ # No connection errors, we can start scraping registers
+ decoder = BinaryPayloadDecoder.fromRegisters(
+ read_model_160_data.registers, byteorder=Endian.BIG
+ )
+
+ # register 122
+ multi_mppt_id = decoder.decode_16bit_int()
+
+ # Model 160 has different offset for UNO-DM-PLUS and REACT2 inverters
+ # need to check and try the specific offset address (start address is 41104)
+ if multi_mppt_id != 160:
+ _LOGGER.debug(
+ "(read_rt_160) Model not 160 try another offset - multi_mppt_id: %s",
+ multi_mppt_id,
+ )
+ try:
+ # try address 41104 for UNO-DM-PLUS and REACT2
+ read_model_160_data = self.read_holding_registers(
+ slave=self._slave_id, address=(self._base_addr + 1104), count=42
+ )
+ _LOGGER.debug("(read_rt_160) Slave ID: %s", self._slave_id)
+ _LOGGER.debug("(read_rt_160) Base Address: %s", self._base_addr)
+ except ModbusException as modbus_error:
+ _LOGGER.debug(f"Read M160 modbus_error: {modbus_error}")
+ raise ModbusError() from modbus_error
+ # No connection errors, we can start scraping registers
+ decoder = BinaryPayloadDecoder.fromRegisters(
+ read_model_160_data.registers, byteorder=Endian.BIG
+ )
+
+ # register 122
+ multi_mppt_id = decoder.decode_16bit_int()
+
+ if multi_mppt_id != 160:
+ _LOGGER.debug(
+ "(read_rt_160) Model not 160 (UNO-DM/REACT2) - multi_mppt_id: %s",
+ multi_mppt_id,
+ )
+ return False
+ else:
+ _LOGGER.debug(
+ "(read_rt_160) Model is 160 (UNO-DM/REACT2) - multi_mppt_id: %s",
+ multi_mppt_id,
+ )
+ else:
+ _LOGGER.debug(
+ "(read_rt_160) Model is 160 - multi_mppt_id: %s", multi_mppt_id
+ )
+
+ # skip register 123
+ decoder.skip_bytes(2)
+
+ # registers 124 to 126
+ dcasf = decoder.decode_16bit_int()
+ dcvsf = decoder.decode_16bit_int()
+ dcwsf = decoder.decode_16bit_int()
+
+ # skip register 127 to 129
+ decoder.skip_bytes(6)
+
+ # register 130 (# of DC modules)
+ multi_mppt_nr = decoder.decode_16bit_int()
+ self.data["mppt_nr"] = multi_mppt_nr
+ _LOGGER.debug("(read_rt_160) mppt_nr %s", multi_mppt_nr)
+
+ # if we have at least one DC module
+ if multi_mppt_nr >= 1:
+ # skip register 131 to 140
+ decoder.skip_bytes(20)
+
+ # registers 141 to 143
+ dc1curr = decoder.decode_16bit_uint()
+ dc1volt = decoder.decode_16bit_uint()
+ dc1power = decoder.decode_16bit_uint()
+ dc1curr = self.calculate_value(dc1curr, dcasf)
+ self.data["dc1curr"] = round(dc1curr, abs(dcasf))
+ dc1volt = self.calculate_value(dc1volt, dcvsf)
+ self.data["dc1volt"] = round(dc1volt, abs(dcvsf))
+ # this fixes dcvolt -0.0 for UNO-DM/REACT2 models
+ self.data["dcvolt"] = round(dc1volt, abs(dcvsf))
+ dc1power = self.calculate_value(dc1power, dcwsf)
+ self.data["dc1power"] = round(dc1power, abs(dcwsf))
+ _LOGGER.debug(
+ "(read_rt_160) dc1curr: %s Round: %s SF: %s",
+ dc1curr,
+ self.data["dc1curr"],
+ dcasf,
+ )
+ _LOGGER.debug("(read_rt_160) dc1volt %s", self.data["dc1volt"])
+ _LOGGER.debug("(read_rt_160) dc1power %s", self.data["dc1power"])
+
+ # if we have more than one DC module
+ if multi_mppt_nr > 1:
+ # skip register 144 to 160
+ decoder.skip_bytes(34)
+
+ # registers 161 to 163
+ dc2curr = decoder.decode_16bit_uint()
+ dc2volt = decoder.decode_16bit_uint()
+ dc2power = decoder.decode_16bit_uint()
+ dc2curr = self.calculate_value(dc2curr, dcasf)
+ self.data["dc2curr"] = round(dc2curr, abs(dcasf))
+ dc2volt = self.calculate_value(dc2volt, dcvsf)
+ self.data["dc2volt"] = round(dc2volt, abs(dcvsf))
+ dc2power = self.calculate_value(dc2power, dcwsf)
+ self.data["dc2power"] = round(dc2power, abs(dcwsf))
+ _LOGGER.debug(
+ "(read_rt_160) dc2curr: %s Round: %s SF: %s",
+ dc2curr,
+ self.data["dc2curr"],
+ dcasf,
+ )
+ _LOGGER.debug("(read_rt_160) dc2volt %s", self.data["dc2volt"])
+ _LOGGER.debug("(read_rt_160) dc2power %s", self.data["dc2power"])
+
+ _LOGGER.debug("(read_rt_160) Completed")
+ return True
diff --git a/custom_components/sinapsi_alfa/config_flow.py b/custom_components/sinapsi_alfa/config_flow.py
new file mode 100644
index 0000000..23ae319
--- /dev/null
+++ b/custom_components/sinapsi_alfa/config_flow.py
@@ -0,0 +1,228 @@
+"""Config Flow for ABB Power-One PVI SunSpec.
+
+https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec
+"""
+
+import ipaddress
+import logging
+import re
+
+import voluptuous as vol
+from homeassistant import config_entries
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.core import HomeAssistant, callback
+from homeassistant.helpers import config_validation as cv
+from homeassistant.helpers.selector import selector
+from pymodbus.exceptions import ConnectionException
+
+from .api import ABBPowerOneFimerAPI
+from .const import (
+ CONF_BASE_ADDR,
+ CONF_HOST,
+ CONF_NAME,
+ CONF_PORT,
+ CONF_SCAN_INTERVAL,
+ CONF_SLAVE_ID,
+ DEFAULT_BASE_ADDR,
+ DEFAULT_NAME,
+ DEFAULT_PORT,
+ DEFAULT_SCAN_INTERVAL,
+ DEFAULT_SLAVE_ID,
+ DOMAIN,
+)
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def host_valid(host):
+ """Return True if hostname or IP address is valid."""
+ try:
+ if ipaddress.ip_address(host).version == (4 or 6):
+ return True
+ except ValueError:
+ disallowed = re.compile(r"[^a-zA-Z\d\-]")
+ return all(x and not disallowed.search(x) for x in host.split("."))
+
+
+@callback
+def get_host_from_config(hass: HomeAssistant):
+ """Return the hosts already configured."""
+ return {
+ config_entry.data.get(CONF_HOST)
+ for config_entry in hass.config_entries.async_entries(DOMAIN)
+ }
+
+
+class ABBPowerOneFimerConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
+ """ABB Power-One PVI SunSpec config flow."""
+
+ VERSION = 1
+ CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
+
+ @staticmethod
+ @callback
+ def async_get_options_flow(config_entry: ConfigEntry):
+ """Initiate Options Flow Instance."""
+ return ABBPowerOneFimerOptionsFlow(config_entry)
+
+ def _host_in_configuration_exists(self, host) -> bool:
+ """Return True if host exists in configuration."""
+ if host in get_host_from_config(self.hass):
+ return True
+ return False
+
+ async def test_connection(
+ self, name, host, port, slave_id, base_addr, scan_interval
+ ):
+ """Return true if credentials is valid."""
+ _LOGGER.debug(f"Test connection to {host}:{port} slave id {slave_id}")
+ try:
+ _LOGGER.debug("Creating API Client")
+ self.api = ABBPowerOneFimerAPI(
+ self.hass, name, host, port, slave_id, base_addr, scan_interval
+ )
+ _LOGGER.debug("API Client created: calling get data")
+ self.api_data = await self.api.async_get_data()
+ _LOGGER.debug("API Client: get data")
+ _LOGGER.debug(f"API Client Data: {self.api_data}")
+ return self.api.data["comm_sernum"]
+ except ConnectionException as connerr:
+ _LOGGER.error(
+ f"Failed to connect to host: {host}:{port} - slave id: {slave_id} - Exception: {connerr}"
+ )
+ return False
+
+ async def async_step_user(self, user_input=None):
+ """Handle the initial step."""
+ errors = {}
+
+ if user_input is not None:
+ name = user_input[CONF_NAME]
+ host = user_input[CONF_HOST]
+ port = user_input[CONF_PORT]
+ slave_id = user_input[CONF_SLAVE_ID]
+ base_addr = user_input[CONF_BASE_ADDR]
+ scan_interval = user_input[CONF_SCAN_INTERVAL]
+
+ if self._host_in_configuration_exists(host):
+ errors[CONF_HOST] = "Device Already Configured"
+ elif not host_valid(user_input[CONF_HOST]):
+ errors[CONF_HOST] = "invalid Host IP"
+ else:
+ uid = await self.test_connection(
+ name, host, port, slave_id, base_addr, scan_interval
+ )
+ if uid is not False:
+ _LOGGER.debug(f"Device unique id: {uid}")
+ await self.async_set_unique_id(uid)
+ self._abort_if_unique_id_configured()
+ return self.async_create_entry(
+ title=user_input[CONF_NAME], data=user_input
+ )
+ else:
+ errors[
+ CONF_HOST
+ ] = "Connection to device failed (S/N not retreived)"
+
+ return self.async_show_form(
+ step_id="user",
+ data_schema=vol.Schema(
+ {
+ vol.Required(
+ CONF_NAME,
+ default=DEFAULT_NAME,
+ ): cv.string,
+ vol.Required(
+ CONF_HOST,
+ ): cv.string,
+ vol.Required(
+ CONF_PORT,
+ default=DEFAULT_PORT,
+ ): vol.Coerce(int),
+ vol.Required(
+ CONF_SLAVE_ID,
+ default=DEFAULT_SLAVE_ID,
+ ): vol.Coerce(int),
+ vol.Required(
+ CONF_BASE_ADDR,
+ default=DEFAULT_BASE_ADDR,
+ ): vol.Coerce(int),
+ vol.Required(
+ CONF_SCAN_INTERVAL,
+ default=DEFAULT_SCAN_INTERVAL,
+ ): selector(
+ {
+ "number": {
+ "min": 30,
+ "max": 600,
+ "step": 10,
+ "unit_of_measurement": "s",
+ "mode": "slider",
+ }
+ }
+ ),
+ },
+ ),
+ errors=errors,
+ )
+
+
+class ABBPowerOneFimerOptionsFlow(config_entries.OptionsFlow):
+ """Config flow options handler."""
+
+ VERSION = 1
+
+ def __init__(self, config_entry: ConfigEntry) -> None:
+ """Initialize option flow instance."""
+ self.config_entry = config_entry
+ self.data_schema = vol.Schema(
+ {
+ vol.Required(
+ CONF_PORT,
+ default=self.config_entry.data.get(CONF_PORT),
+ ): vol.Coerce(int),
+ vol.Required(
+ CONF_SLAVE_ID,
+ default=self.config_entry.data.get(CONF_SLAVE_ID),
+ ): vol.Coerce(int),
+ vol.Required(
+ CONF_BASE_ADDR,
+ default=self.config_entry.data.get(CONF_BASE_ADDR),
+ ): vol.Coerce(int),
+ vol.Required(
+ CONF_SCAN_INTERVAL,
+ default=self.config_entry.data.get(CONF_SCAN_INTERVAL),
+ ): selector(
+ {
+ "number": {
+ "min": 30,
+ "max": 600,
+ "step": 10,
+ "unit_of_measurement": "s",
+ "mode": "slider",
+ }
+ }
+ ),
+ }
+ )
+
+ async def async_step_init(self, user_input=None):
+ """Manage the options."""
+
+ if user_input is not None:
+ # complete non-edited entries before update (ht @PeteRage)
+ if CONF_NAME in self.config_entry.data:
+ user_input[CONF_NAME] = self.config_entry.data.get(CONF_NAME)
+ if CONF_HOST in self.config_entry.data:
+ user_input[CONF_HOST] = self.config_entry.data.get(CONF_HOST)
+
+ # write updated config entries (ht @PeteRage / @fuatakgun)
+ self.hass.config_entries.async_update_entry(
+ self.config_entry, data=user_input, options=self.config_entry.options
+ )
+ self.async_abort(reason="configuration updated")
+
+ # write empty options entries (ht @PeteRage / @fuatakgun)
+ return self.async_create_entry(title="", data={})
+
+ return self.async_show_form(step_id="init", data_schema=self.data_schema)
diff --git a/custom_components/sinapsi_alfa/const.py b/custom_components/sinapsi_alfa/const.py
new file mode 100644
index 0000000..735fc28
--- /dev/null
+++ b/custom_components/sinapsi_alfa/const.py
@@ -0,0 +1,480 @@
+"""Constants for ABB Power-One PVI SunSpec.
+
+https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec
+"""
+
+from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
+from homeassistant.const import (
+ UnitOfElectricCurrent,
+ UnitOfElectricPotential,
+ UnitOfEnergy,
+ UnitOfFrequency,
+ UnitOfPower,
+ UnitOfTemperature,
+)
+
+# Base component constants
+NAME = "ABB/Power-One/FIMER PVI SunSpec ModBus TCP"
+DOMAIN = "abb_powerone_pvi_sunspec"
+DOMAIN_DATA = f"{DOMAIN}_data"
+VERSION = "3.0.0"
+ATTRIBUTION = "by @alexdelprete"
+ISSUE_URL = "https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec/issues"
+
+# Icons
+ICON = "mdi:format-quote-close"
+
+# Device classes
+BINARY_SENSOR_DEVICE_CLASS = "connectivity"
+
+# Platforms
+SENSOR = "sensor"
+PLATFORMS = [
+ "sensor",
+]
+UPDATE_LISTENER = "update_listener"
+DATA = "data"
+
+# Configuration and options
+CONF_NAME = "name"
+CONF_HOST = "host"
+CONF_PORT = "port"
+CONF_SLAVE_ID = "slave_id"
+CONF_BASE_ADDR = "base_addr"
+CONF_SCAN_INTERVAL = "scan_interval"
+DEFAULT_NAME = "ABB Inverter"
+DEFAULT_PORT = 502
+DEFAULT_SLAVE_ID = 2
+DEFAULT_BASE_ADDR = 0
+DEFAULT_SCAN_INTERVAL = 60
+MIN_SCAN_INTERVAL = 30
+STARTUP_MESSAGE = f"""
+-------------------------------------------------------------------
+{NAME}
+Version: {VERSION}
+{ATTRIBUTION}
+This is a custom integration!
+If you have any issues with this you need to open an issue here:
+{ISSUE_URL}
+-------------------------------------------------------------------
+"""
+
+# Sensors for all inverters
+SENSOR_TYPES_COMMON = {
+ "Manufacturer": [
+ "Manufacturer",
+ "comm_manufact",
+ None,
+ "mdi:information-outline",
+ None,
+ None,
+ ],
+ "Model": ["Model", "comm_model", None, "mdi:information-outline", None, None],
+ "Options": ["Options", "comm_options", None, "mdi:information-outline", None, None],
+ "Version": [
+ "Firmware Version",
+ "comm_version",
+ None,
+ "mdi:information-outline",
+ None,
+ None,
+ ],
+ "Serial": ["Serial", "comm_sernum", None, "mdi:information-outline", None, None],
+ "Inverter_Type": [
+ "Inverter Type",
+ "invtype",
+ None,
+ "mdi:information-outline",
+ None,
+ None,
+ ],
+ "AC_Current": [
+ "AC Current",
+ "accurrent",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_VoltageAN": [
+ "AC Voltage AN",
+ "acvoltagean",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_Power": [
+ "AC Power",
+ "acpower",
+ UnitOfPower.WATT,
+ "mdi:solar-power",
+ SensorDeviceClass.POWER,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_Frequency": [
+ "AC Frequency",
+ "acfreq",
+ UnitOfFrequency.HERTZ,
+ "mdi:sine-wave",
+ SensorDeviceClass.FREQUENCY,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC_Power": [
+ "DC Power",
+ "dcpower",
+ UnitOfPower.WATT,
+ "mdi:solar-power",
+ SensorDeviceClass.POWER,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "Total_Energy": [
+ "Total Energy",
+ "totalenergy",
+ UnitOfEnergy.WATT_HOUR,
+ "mdi:solar-power",
+ SensorDeviceClass.ENERGY,
+ SensorStateClass.TOTAL_INCREASING,
+ ],
+ "Status": [
+ "Operating State",
+ "status",
+ None,
+ "mdi:information-outline",
+ None,
+ None,
+ ],
+ "Status_Vendor": [
+ "Vendor Operating State",
+ "statusvendor",
+ None,
+ "mdi:information-outline",
+ None,
+ None,
+ ],
+ "Temp_Cab": [
+ "Ambient Temperature",
+ "tempcab",
+ UnitOfTemperature.CELSIUS,
+ "mdi:temperature-celsius",
+ SensorDeviceClass.TEMPERATURE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "Temp_Oth": [
+ "Inverter Temperature",
+ "tempoth",
+ UnitOfTemperature.CELSIUS,
+ "mdi:temperature-celsius",
+ SensorDeviceClass.TEMPERATURE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "MPPT_Count": [
+ "MPPT Count",
+ "mppt_nr",
+ None,
+ "mdi:information-outline",
+ None,
+ None,
+ ],
+}
+
+# Sensors for single phase inverters, apparently does not have any specific sensors
+SENSOR_TYPES_SINGLE_PHASE = {}
+
+# Sensors for three phase inverters
+SENSOR_TYPES_THREE_PHASE = {
+ "AC_CurrentA": [
+ "AC Current A",
+ "accurrenta",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_CurrentB": [
+ "AC Current B",
+ "accurrentb",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_CurrentC": [
+ "AC Current C",
+ "accurrentc",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_VoltageAB": [
+ "AC Voltage AB",
+ "acvoltageab",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_VoltageBC": [
+ "AC Voltage BC",
+ "acvoltagebc",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_VoltageCA": [
+ "AC Voltage CA",
+ "acvoltageca",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_VoltageBN": [
+ "AC Voltage BN",
+ "acvoltagebn",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "AC_VoltageCN": [
+ "AC Voltage CN",
+ "acvoltagecn",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+}
+
+# Sensors for single mppt inverters
+SENSOR_TYPES_SINGLE_MPPT = {
+ "DC_Curr": [
+ "DC Current",
+ "dccurr",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC_Volt": [
+ "DC Voltage",
+ "dcvolt",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+}
+
+# Sensors for single dual inverters
+SENSOR_TYPES_DUAL_MPPT = {
+ "DC1_Curr": [
+ "DC1 Current",
+ "dc1curr",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC1_Volt": [
+ "DC1 Voltage",
+ "dc1volt",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC1_Power": [
+ "DC1 Power",
+ "dc1power",
+ UnitOfPower.WATT,
+ "mdi:solar-power",
+ SensorDeviceClass.POWER,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC2_Curr": [
+ "DC2 Current",
+ "dc2curr",
+ UnitOfElectricCurrent.AMPERE,
+ "mdi:current-ac",
+ SensorDeviceClass.CURRENT,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC2_Volt": [
+ "DC2 Voltage",
+ "dc2volt",
+ UnitOfElectricPotential.VOLT,
+ "mdi:lightning-bolt",
+ SensorDeviceClass.VOLTAGE,
+ SensorStateClass.MEASUREMENT,
+ ],
+ "DC2_Power": [
+ "DC2 Power",
+ "dc2power",
+ UnitOfPower.WATT,
+ "mdi:solar-power",
+ SensorDeviceClass.POWER,
+ SensorStateClass.MEASUREMENT,
+ ],
+}
+
+INVERTER_TYPE = {101: "Single Phase", 103: "Three Phase", 999: "Unknown"}
+
+DEVICE_GLOBAL_STATUS = {
+ 0: "Sending Parameters",
+ 1: "Wait Sun/Grid",
+ 2: "Checking Grid",
+ 3: "Measuring Riso",
+ 4: "DcDc Start",
+ 5: "Inverter Start",
+ 6: "Run",
+ 7: "Recovery",
+ 8: "Pause",
+ 9: "Ground Fault",
+ 10: "OTH Fault",
+ 11: "Address Setting",
+ 12: "Self Test",
+ 13: "Self Test Fail",
+ 14: "Sensor Test + Measure Riso",
+ 15: "Leak Fault",
+ 16: "Waiting for manual reset",
+ 17: "Internal Error E026",
+ 18: "Internal Error E027",
+ 19: "Internal Error E028",
+ 20: "Internal Error E029",
+ 21: "Internal Error E030",
+ 22: "Sending Wind Table",
+ 23: "Failed Sending table",
+ 24: "UTH Fault",
+ 25: "Remote OFF",
+ 26: "Interlock Fail",
+ 27: "Executing Autotest",
+ 30: "Waiting Sun",
+ 31: "Temperature Fault",
+ 32: "Fan Staucked",
+ 33: "Int. Com. Fault",
+ 34: "Slave Insertion",
+ 35: "DC Switch Open",
+ 36: "TRAS Switch Open",
+ 37: "MASTER Exclusion",
+ 38: "Auto Exclusion",
+ 98: "Erasing Internal EEprom",
+ 99: "Erasing External EEprom",
+ 100: "Counting EEprom",
+ 101: "Freeze",
+ 116: "Standby",
+ 200: "Dsp Programming",
+ 999: "Unknown",
+}
+
+DEVICE_STATUS = {
+ 0: "Stand By",
+ 1: "Checking Grid",
+ 2: "Run",
+ 3: "Bulk OV",
+ 4: "Out OC",
+ 5: "IGBT Sat",
+ 6: "Bulk UV",
+ 7: "Degauss Error",
+ 8: "No Parameters",
+ 9: "Bulk Low",
+ 10: "Grid OV",
+ 11: "Communication Error",
+ 12: "Degaussing",
+ 13: "Starting",
+ 14: "Bulk Cap Fail",
+ 15: "Leak Fail",
+ 16: "DcDc Fail",
+ 17: "Ileak Sensor Fail",
+ 18: "SelfTest: relay inverter",
+ 19: "SelfTest: wait for sensor test",
+ 20: "SelfTest: test relay DcDc + sensor",
+ 21: "SelfTest: relay inverter fail",
+ 22: "SelfTest timeout fail",
+ 23: "SelfTest: relay DcDc fail",
+ 24: "Self Test 1",
+ 25: "Waiting self test start",
+ 26: "Dc Injection",
+ 27: "Self Test 2",
+ 28: "Self Test 3",
+ 29: "Self Test 4",
+ 30: "Internal Error",
+ 31: "Internal Error",
+ 40: "Forbidden State",
+ 41: "Input UC",
+ 42: "Zero Power",
+ 43: "Grid Not Present",
+ 44: "Waiting Start",
+ 45: "MPPT",
+ 46: "Grid Fail",
+ 47: "Input OC",
+ 255: "Inverter Dsp not programmed",
+ 999: "Unkown",
+}
+
+DEVICE_MODEL = {
+ 0: "UNO-DM-3.3-TL-PLUS",
+ 1: "UNO-DM-4.0-TL-PLUS",
+ 3: "UNO-DM-4.6-TL-PLUS",
+ 4: "UNO-DM-5.0-TL-PLUS",
+ 5: "UNO-DM-6.0-TL-PLUS",
+ 11: "UNO-DM-2.0-TL-PLUS",
+ 12: "UNO-DM-3.0-TL-PLUS",
+ 13: "REACT2-UNO-5.0-TL",
+ 14: "REACT2-UNO-3.6-TL",
+ 15: "UNO-DM-5.0-TL-PLUS",
+ 16: "UNO-DM-6.0-TL-PLUS",
+ 19: "REACT2-5.0-TL",
+ 49: "PVI-3.0-OUTD",
+ 50: "PVI-3.3-OUTD",
+ 51: "PVI-3.6-OUTD",
+ 52: "PVI-4.2-OUTD",
+ 53: "PVI-5000-OUTD",
+ 54: "PVI-6000-OUTD",
+ 65: "PVI-CENTRAL-350",
+ 66: "PVI-CENTRAL-350",
+ 67: "PVI-CENTRAL-50",
+ 68: "PVI-12.5-OUTD",
+ 69: "PVI-CENTRAL-67",
+ 70: "TRIO-27.6-TL-OUTD",
+ 71: "UNO-2.5-OUTD",
+ 72: "PVI-4.6-OUTD-I",
+ 74: "PVI-1700-OUTD",
+ 76: "PVI-CENTRAL-350",
+ 77: "PVI-CENTRAL-250",
+ 78: "PVI-12.5-OUTD",
+ 79: "PVI-3600-OUTD",
+ 80: "3-phase interface (3G74)",
+ 81: "PVI-8.0-OUTD-PLUS",
+ 82: "TRIO-8.5-TL-OUTD-S",
+ 83: "PVS-12.5-TL",
+ 84: "PVI-12.5-OUTD-I",
+ 85: "PVI-12.5-OUTD-I",
+ 86: "PVI-12.5-OUTD-I",
+ 88: "PVI-10.0-OUTD",
+ 89: "TRIO-27.6-TL-OUTD",
+ 90: "PVI-12.5-OUTD-I",
+ 99: "CDD",
+ 102: "TRIO-20-TL-OUTD",
+ 103: "UNO-2.0-OUTD",
+ 104: "PVI-3.8-OUTD-I",
+ 105: "PVI-2000-IND",
+ 106: "PVI-1700-IND",
+ 107: "TRIO-7.5-OUTD",
+ 108: "PVI-3600-IND",
+ 110: "PVI-10.0-OUTD",
+ 111: "PVI-2000-OUTD",
+ 113: "PVI-8.0-OUTD",
+ 114: "TRIO-5.8-OUTD",
+ 116: "PVI-10.0-OUTD-I",
+ 117: "PVI-10.0-OUTD-I",
+ 118: "PVI-10.0-OUTD-I",
+ 119: "PVI-10.0-I-OUTD",
+ 121: "TRIO-20-TL-OUTD",
+ 122: "PVI-10.0-OUTD-I",
+ 224: "UNO-2.0-TL-OUTD",
+ 242: "UNO-3.0-TL-OUTD",
+}
diff --git a/custom_components/sinapsi_alfa/coordinator.py b/custom_components/sinapsi_alfa/coordinator.py
new file mode 100644
index 0000000..f5e5d8b
--- /dev/null
+++ b/custom_components/sinapsi_alfa/coordinator.py
@@ -0,0 +1,94 @@
+"""Data Update Coordinator for ABB Power-One PVI SunSpec.
+
+https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec
+"""
+
+import logging
+from datetime import datetime, timedelta
+
+from homeassistant.config_entries import ConfigEntry
+from homeassistant.core import HomeAssistant
+from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
+
+from .api import ABBPowerOneFimerAPI
+from .const import (
+ CONF_BASE_ADDR,
+ CONF_HOST,
+ CONF_NAME,
+ CONF_PORT,
+ CONF_SCAN_INTERVAL,
+ CONF_SLAVE_ID,
+ DEFAULT_SCAN_INTERVAL,
+ DOMAIN,
+ MIN_SCAN_INTERVAL,
+)
+
+_LOGGER = logging.getLogger(__name__)
+
+
+class ABBPowerOneFimerCoordinator(DataUpdateCoordinator):
+ """Class to manage fetching data from the API."""
+
+ config_entry: ConfigEntry
+
+ def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
+ """Initialize data update coordinator."""
+
+ # get scan_interval from user config
+ self.scan_interval = config_entry.data.get(
+ CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
+ )
+ # enforce scan_interval lower bound
+ if self.scan_interval < MIN_SCAN_INTERVAL:
+ self.scan_interval = MIN_SCAN_INTERVAL
+ # set coordinator update interval
+ self.update_interval = timedelta(seconds=self.scan_interval)
+ _LOGGER.debug(
+ f"Scan Interval: scan_interval={self.scan_interval} update_interval={self.update_interval}"
+ )
+
+ # set update method and interval for coordinator
+ super().__init__(
+ hass,
+ _LOGGER,
+ name=f"{DOMAIN} ({config_entry.unique_id})",
+ update_method=self.async_update_data,
+ update_interval=self.update_interval,
+ )
+
+ self.last_update_time = datetime.now()
+ self.last_update_success = True
+
+ self.api = ABBPowerOneFimerAPI(
+ hass,
+ config_entry.data.get(CONF_NAME),
+ config_entry.data.get(CONF_HOST),
+ config_entry.data.get(CONF_PORT),
+ config_entry.data.get(CONF_SLAVE_ID),
+ config_entry.data.get(CONF_BASE_ADDR),
+ self.scan_interval,
+ )
+
+ _LOGGER.debug("Coordinator Config Data: %s", config_entry.data)
+ _LOGGER.debug(
+ "Coordinator API init: Host: %s Port: %s ID: %s ScanInterval: %s",
+ config_entry.data.get(CONF_HOST),
+ config_entry.data.get(CONF_PORT),
+ config_entry.data.get(CONF_SLAVE_ID),
+ self.scan_interval,
+ )
+
+ async def async_update_data(self):
+ """Update data method."""
+ _LOGGER.debug(f"Data Coordinator: Update started at {datetime.now()}")
+ try:
+ self.last_update_status = await self.api.async_get_data()
+ self.last_update_time = datetime.now()
+ _LOGGER.debug(
+ f"Data Coordinator: Update completed at {self.last_update_time}"
+ )
+ return self.last_update_status
+ except Exception as ex:
+ self.last_update_status = False
+ _LOGGER.debug(f"Coordinator Update Error: {ex} at {self.last_update_time}")
+ raise UpdateFailed() from ex
diff --git a/custom_components/sinapsi_alfa/manifest.json b/custom_components/sinapsi_alfa/manifest.json
new file mode 100644
index 0000000..d465788
--- /dev/null
+++ b/custom_components/sinapsi_alfa/manifest.json
@@ -0,0 +1,13 @@
+{
+ "domain": "sinapsi_alfa",
+ "name": "Alfa by Sinapsi",
+ "codeowners": ["@alexdelprete"],
+ "config_flow": true,
+ "documentation": "https://github.com/alexdelprete/ha-sinapsi-alfa",
+ "integration_type": "hub",
+ "iot_class": "local_polling",
+ "issue_tracker": "https://github.com/alexdelprete/ha-sinapsi-alfa/issues",
+ "loggers": ["custom_components.sinapsi_alfa"],
+ "requirements": ["pymodbus>=3.5.4"],
+ "version": "0.1.0"
+}
\ No newline at end of file
diff --git a/custom_components/sinapsi_alfa/sensor.py b/custom_components/sinapsi_alfa/sensor.py
new file mode 100644
index 0000000..d8553a5
--- /dev/null
+++ b/custom_components/sinapsi_alfa/sensor.py
@@ -0,0 +1,194 @@
+"""Sensor Platform Device for ABB Power-One PVI SunSpec.
+
+https://github.com/alexdelprete/ha-abb-powerone-pvi-sunspec
+"""
+
+import logging
+from typing import Any
+
+from homeassistant.components.sensor import SensorEntity
+from homeassistant.core import HomeAssistant, callback
+from homeassistant.helpers.entity import EntityCategory
+from homeassistant.helpers.update_coordinator import CoordinatorEntity
+
+from .const import (
+ CONF_NAME,
+ DATA,
+ DOMAIN,
+ INVERTER_TYPE,
+ SENSOR_TYPES_COMMON,
+ SENSOR_TYPES_DUAL_MPPT,
+ SENSOR_TYPES_SINGLE_MPPT,
+ SENSOR_TYPES_SINGLE_PHASE,
+ SENSOR_TYPES_THREE_PHASE,
+)
+
+_LOGGER = logging.getLogger(__name__)
+
+
+def add_sensor_defs(coordinator, config_entry, sensor_list, sensor_definitions):
+ """Class Initializitation."""
+
+ for sensor_info in sensor_definitions.values():
+ sensor_data = {
+ "name": sensor_info[0],
+ "key": sensor_info[1],
+ "unit": sensor_info[2],
+ "icon": sensor_info[3],
+ "device_class": sensor_info[4],
+ "state_class": sensor_info[5],
+ }
+ sensor_list.append(
+ ABBPowerOneFimerSensor(coordinator, config_entry, sensor_data)
+ )
+
+
+async def async_setup_entry(hass: HomeAssistant, config_entry, async_add_entities):
+ """Sensor Platform setup."""
+
+ # Get handler to coordinator from config
+ coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA]
+
+ _LOGGER.debug("(sensor) Name: %s", config_entry.data.get(CONF_NAME))
+ _LOGGER.debug("(sensor) Manufacturer: %s", coordinator.api.data["comm_manufact"])
+ _LOGGER.debug("(sensor) Model: %s", coordinator.api.data["comm_model"])
+ _LOGGER.debug("(sensor) SW Version: %s", coordinator.api.data["comm_version"])
+ _LOGGER.debug("(sensor) Inverter Type (str): %s", coordinator.api.data["invtype"])
+ _LOGGER.debug("(sensor) MPPT #: %s", coordinator.api.data["mppt_nr"])
+ _LOGGER.debug("(sensor) Serial#: %s", coordinator.api.data["comm_sernum"])
+
+ sensor_list = []
+ add_sensor_defs(coordinator, config_entry, sensor_list, SENSOR_TYPES_COMMON)
+
+ if coordinator.api.data["invtype"] == INVERTER_TYPE[101]:
+ add_sensor_defs(
+ coordinator, config_entry, sensor_list, SENSOR_TYPES_SINGLE_PHASE
+ )
+ elif coordinator.api.data["invtype"] == INVERTER_TYPE[103]:
+ add_sensor_defs(
+ coordinator, config_entry, sensor_list, SENSOR_TYPES_THREE_PHASE
+ )
+
+ _LOGGER.debug(
+ "(sensor) DC Voltages : single=%s dc1=%s dc2=%s",
+ coordinator.api.data["dcvolt"],
+ coordinator.api.data["dc1volt"],
+ coordinator.api.data["dc2volt"],
+ )
+ if coordinator.api.data["mppt_nr"] == 1:
+ add_sensor_defs(
+ coordinator, config_entry, sensor_list, SENSOR_TYPES_SINGLE_MPPT
+ )
+ else:
+ add_sensor_defs(coordinator, config_entry, sensor_list, SENSOR_TYPES_DUAL_MPPT)
+
+ async_add_entities(sensor_list)
+
+ return True
+
+
+class ABBPowerOneFimerSensor(CoordinatorEntity, SensorEntity):
+ """Representation of an ABB SunSpec Modbus sensor."""
+
+ def __init__(self, coordinator, config_entry, sensor_data):
+ """Class Initializitation."""
+ super().__init__(coordinator)
+ self.coordinator = coordinator
+ self._name = sensor_data["name"]
+ self._key = sensor_data["key"]
+ self._unit_of_measurement = sensor_data["unit"]
+ self._icon = sensor_data["icon"]
+ self._device_class = sensor_data["device_class"]
+ self._state_class = sensor_data["state_class"]
+ self._device_name = self.coordinator.api.name
+ self._device_host = self.coordinator.api.host
+ self._device_model = self.coordinator.api.data["comm_model"]
+ self._device_manufact = self.coordinator.api.data["comm_manufact"]
+ self._device_sn = self.coordinator.api.data["comm_sernum"]
+ self._device_swver = self.coordinator.api.data["comm_version"]
+ self._device_hwver = self.coordinator.api.data["comm_options"]
+
+ @callback
+ def _handle_coordinator_update(self) -> None:
+ """Fetch new state data for the sensor."""
+ self._state = self.coordinator.api.data[self._key]
+ self.async_write_ha_state()
+ # write debug log only on first sensor to avoid spamming the log
+ if self.name == "Manufacturer":
+ _LOGGER.debug(
+ "_handle_coordinator_update: sensors state written to state machine"
+ )
+
+ @property
+ def has_entity_name(self):
+ """Return the name state."""
+ return True
+
+ @property
+ def name(self):
+ """Return the name."""
+ return f"{self._name}"
+
+ @property
+ def native_unit_of_measurement(self):
+ """Return the unit of measurement."""
+ return self._unit_of_measurement
+
+ @property
+ def icon(self):
+ """Return the sensor icon."""
+ return self._icon
+
+ @property
+ def device_class(self):
+ """Return the sensor device_class."""
+ return self._device_class
+
+ @property
+ def state_class(self):
+ """Return the sensor state_class."""
+ return self._state_class
+
+ @property
+ def entity_category(self):
+ """Return the sensor entity_category."""
+ if self._state_class is None:
+ return EntityCategory.DIAGNOSTIC
+ else:
+ return None
+
+ @property
+ def native_value(self):
+ """Return the state of the sensor."""
+ if self._key in self.coordinator.api.data:
+ return self.coordinator.api.data[self._key]
+
+ @property
+ def state_attributes(self) -> dict[str, Any] | None:
+ """Return the attributes."""
+ return None
+
+ @property
+ def should_poll(self) -> bool:
+ """No need to poll. Coordinator notifies entity of updates."""
+ return False
+
+ @property
+ def unique_id(self):
+ """Return a unique ID to use for this entity."""
+ return f"{self._device_sn}_{self._key}"
+
+ @property
+ def device_info(self):
+ """Return device specific attributes."""
+ return {
+ "configuration_url": f"http://{self._device_host}",
+ "hw_version": None,
+ "identifiers": {(DOMAIN, self._device_sn)},
+ "manufacturer": self._device_manufact,
+ "model": self._device_model,
+ "name": self._device_name,
+ "serial_number": self._device_sn,
+ "sw_version": self._device_swver,
+ "via_device": None,
+ }
diff --git a/custom_components/sinapsi_alfa/translations/en.json b/custom_components/sinapsi_alfa/translations/en.json
new file mode 100644
index 0000000..93becaa
--- /dev/null
+++ b/custom_components/sinapsi_alfa/translations/en.json
@@ -0,0 +1,38 @@
+{
+ "config": {
+ "step": {
+ "user": {
+ "title": "Alfa Connection Configuration",
+ "description": "If you need help with the configuration go to: https://github.com/alexdelprete/ha-sinapsi-alfa",
+ "data": {
+ "name": "Custom Name of the device (used for sensors' prefix)",
+ "host": "IP or hostname",
+ "port": "TCP port",
+ "slave_id": "Modbus Slave address of the device",
+ "base_addr": "Modbus Register Map Base Address",
+ "scan_interval": "Polling Period (min: 30s max: 600s)"
+ }
+ }
+ },
+ "error": {
+ "already_configured": "Device is already configured"
+ },
+ "abort": {
+ "already_configured": "Device is already configured"
+ }
+ },
+ "options": {
+ "step": {
+ "init": {
+ "title": "Alfa Connection Options",
+ "description": "Set Connection Options",
+ "data": {
+ "port": "TCP port",
+ "slave_id": "Modbus Slave address of the inverter",
+ "base_addr": "Modbus Register Map Base Address",
+ "scan_interval": "Polling Period (min: 30s max: 600s)"
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/doc/alfa-ha-modbus-configuration.yaml b/doc/alfa-ha-modbus-configuration.yaml
new file mode 100644
index 0000000..206c31b
--- /dev/null
+++ b/doc/alfa-ha-modbus-configuration.yaml
@@ -0,0 +1,135 @@
+modbus:
+ - name: Alfa
+ type: tcp
+ host:
+ port: 502
+ sensors:
+ - name: Potenza Attiva Prelevata Istantanea
+ device_class: power
+ unit_of_measurement: W
+ state_class: measurement
+ data_type: uint16
+ address: 2
+ - name: Potenza Attiva Immessa Istantanea
+ device_class: power
+ unit_of_measurement: W
+ state_class: measurement
+ data_type: uint16
+ address: 12
+ - name: Potenza Attiva Prodotta Istantanea
+ device_class: power
+ unit_of_measurement: W
+ state_class: measurement
+ data_type: uint16
+ address: 921
+ - name: Energia Attiva Prelevata Totale
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 5
+ - name: Energia Attiva Immessa Totale
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 15
+ - name: Energia Attiva Prodotta Totale
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 924
+ - name: Pot Att Prel Quart Media
+ device_class: power
+ unit_of_measurement: W
+ state_class: measurement
+ data_type: uint16
+ address: 9
+ - name: Pot Att Imm Quart Media
+ device_class: power
+ unit_of_measurement: W
+ state_class: measurement
+ data_type: uint16
+ address: 19
+ - name: Tot Energ Att Prel Giorno-1 F1
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 30
+ - name: Tot Energ Att Prel Giorno-1 F2
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 32
+ - name: Tot Energ Att Prel Giorno-1 F3
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 34
+ - name: Tot Energ Att Prel Giorno-1 F4
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 36
+ - name: Tot Energ Att Prel Giorno-1 F5
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 38
+ - name: Tot Energ Att Prel Giorno-1 F6
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 40
+ - name: Tot Energ Att Imm Giorno-1 F1
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 54
+ - name: Tot Energ Att Imm Giorno-1 F2
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 56
+ - name: Tot Energ Att Imm Giorno-1 F3
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 58
+ - name: Tot Energ Att Imm Giorno-1 F4
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 60
+ - name: Tot Energ Att Imm Giorno-1 F5
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 62
+ - name: Tot Energ Att Imm Giorno-1 F6
+ device_class: energy
+ unit_of_measurement: Wh
+ state_class: total_increasing
+ data_type: uint32
+ address: 64
+ - name: Fascia oraria corrente
+ data_type: uint16
+ address: 203
+ - name: Data evento
+ data_type: uint32
+ address: 780
+ - name: Tempo residuo distacco
+ data_type: uint16
+ address: 782
\ No newline at end of file
diff --git a/doc/distacco.yaml b/doc/distacco.yaml
new file mode 100644
index 0000000..19b07ee
--- /dev/null
+++ b/doc/distacco.yaml
@@ -0,0 +1,9 @@
+sensor:
+ - platform: template
+ sensors:
+ distacco:
+ friendly_name: "Avviso distacco"
+ value_template: >
+ {% if (states('sensor.data_evento') | int ) > 4294967294 %} Nessun avviso
+ {% else %} {{ (states('sensor.data_evento') | int + (states('sensor.tempo_residuo_distacco') | int)) | timestamp_local }}
+ {% endif %}
\ No newline at end of file
diff --git a/gfxfiles/alfa-400-200x35.png b/gfxfiles/alfa-400-200x35.png
new file mode 100644
index 0000000..37e8f88
Binary files /dev/null and b/gfxfiles/alfa-400-200x35.png differ
diff --git a/gfxfiles/alfa-400.png b/gfxfiles/alfa-400.png
new file mode 100644
index 0000000..d819536
Binary files /dev/null and b/gfxfiles/alfa-400.png differ
diff --git a/gfxfiles/config.png b/gfxfiles/config.png
new file mode 100644
index 0000000..aa2bf88
Binary files /dev/null and b/gfxfiles/config.png differ
diff --git a/gfxfiles/demo.png b/gfxfiles/demo.png
new file mode 100644
index 0000000..86842c2
Binary files /dev/null and b/gfxfiles/demo.png differ
diff --git a/gfxfiles/icon.png b/gfxfiles/icon.png
new file mode 100644
index 0000000..d2b80e3
Binary files /dev/null and b/gfxfiles/icon.png differ
diff --git a/gfxfiles/icon@2x.png b/gfxfiles/icon@2x.png
new file mode 100644
index 0000000..1c02844
Binary files /dev/null and b/gfxfiles/icon@2x.png differ
diff --git a/gfxfiles/logo.png b/gfxfiles/logo.png
new file mode 100644
index 0000000..d9cf8f6
Binary files /dev/null and b/gfxfiles/logo.png differ
diff --git a/gfxfiles/logo@2x.png b/gfxfiles/logo@2x.png
new file mode 100644
index 0000000..f63b5ca
Binary files /dev/null and b/gfxfiles/logo@2x.png differ
diff --git a/hacs.json b/hacs.json
new file mode 100644
index 0000000..f60b804
--- /dev/null
+++ b/hacs.json
@@ -0,0 +1,8 @@
+{
+ "name": "Alfa by Sinapsi",
+ "homeassistant": "2023.11.0",
+ "content_in_root": false,
+ "render_readme": true,
+ "zip_release": true,
+ "filename": "sinapsi_alfa.zip"
+}
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..5425e75
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+colorlog==6.8.2
+homeassistant==2024.1.2
+pip>=21.0,<24.1
+ruff==0.2.1
+pymodbus>=3.5
\ No newline at end of file
diff --git a/scripts/develop b/scripts/develop
new file mode 100644
index 0000000..20366e8
--- /dev/null
+++ b/scripts/develop
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+set -e
+
+cd "$(dirname "$0")/.."
+
+# Create config dir if not present
+if [[ ! -d "${PWD}/config" ]]; then
+ mkdir -p "${PWD}/config"
+ hass --config "${PWD}/config" --script ensure_config
+fi
+
+# Set the path to custom_components
+## This let's us have the structure we want /custom_components/integration_blueprint
+## while at the same time have Home Assistant configuration inside /config
+## without resulting to symlinks.
+export PYTHONPATH="${PYTHONPATH}:${PWD}/custom_components"
+
+# Start Home Assistant
+hass --config "${PWD}/config" --debug
\ No newline at end of file
diff --git a/scripts/lint b/scripts/lint
new file mode 100644
index 0000000..752d23a
--- /dev/null
+++ b/scripts/lint
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+set -e
+
+cd "$(dirname "$0")/.."
+
+ruff check . --fix
\ No newline at end of file
diff --git a/scripts/setup b/scripts/setup
new file mode 100644
index 0000000..abe537a
--- /dev/null
+++ b/scripts/setup
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+set -e
+
+cd "$(dirname "$0")/.."
+
+python3 -m pip install --requirement requirements.txt
\ No newline at end of file