Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
"ignorePaths": [
"**/*.json",
"**/*.yaml",
"**/*_pb2.py",
"**/*.proto",
".gitignore",
],
"import": [
Expand Down
3 changes: 3 additions & 0 deletions .cspell/custom-dictionary-workspace.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ afci
AIO
AIO's
aiohttp
aiomqtt
Alertfeed
allclose
Anson
Expand Down Expand Up @@ -169,6 +170,7 @@ ivtime
jedlix
jsyaml
kaiming
keepalive
killall
kopt
Kostal
Expand Down Expand Up @@ -249,6 +251,7 @@ onmouseover
openweathermap
overfitting
ownerapi
pbgw
pdata
pdetails
perc
Expand Down
14 changes: 14 additions & 0 deletions apps/predbat/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from ha import HAInterface, HAHistory
from db_manager import DatabaseManager
from fox import FoxAPI
from gateway import GatewayMQTT
from web_mcp import PredbatMCPServer
from load_ml_component import LoadMLComponent
from datetime import datetime, timezone, timedelta
Expand Down Expand Up @@ -307,6 +308,19 @@
"phase": 1,
"can_restart": True,
},
"gateway": {
"class": GatewayMQTT,
"name": "PredBat Gateway",
"event_filter": "predbat_gateway_",
"args": {
"gateway_device_id": {"required": True, "config": "gateway_device_id"},
"mqtt_host": {"required": True, "config": "gateway_mqtt_host"},
"mqtt_port": {"required": False, "config": "gateway_mqtt_port", "default": 8883},
"mqtt_token": {"required": True, "config": "gateway_mqtt_token"},
},
"phase": 1,
"can_restart": True,
},
}


Expand Down
22 changes: 19 additions & 3 deletions apps/predbat/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,20 @@ def previous_days_modal_filter(self, data):
num_gaps += gap_minutes
gap_list.append((gap_start_minute_previous, gap_minutes))

# Filter false-positive gaps where sensor was actively reporting
if hasattr(self, "load_data_point_minutes") and self.load_data_point_minutes:
filtered_gaps = []
for gap_start, gap_minutes_len in gap_list:
gap_data_count = sum(1 for m in self.load_data_point_minutes if gap_start <= m < gap_start + gap_minutes_len)
# Need at least 1 data point per hour (min 2) to consider sensor "active"
min_data_points = max(gap_minutes_len // 60, 2)
if gap_data_count >= min_data_points:
self.log("Info: Skipping gap at minute {} ({} min) - sensor active ({} of {} points)".format(gap_start, gap_minutes_len, gap_data_count, min_data_points))
else:
filtered_gaps.append((gap_start, gap_minutes_len))
gap_list = filtered_gaps
num_gaps = sum(g[1] for g in gap_list)

# Work out total number of gap_minutes
if num_gaps > 0:
self.log("Warn: Found {} gaps in load_today totalling {} minutes to fill using average data".format(len(gap_list), num_gaps))
Expand Down Expand Up @@ -588,7 +602,7 @@ def minute_data_import_export(self, max_days_previous, now_utc, key, scale=1.0,

return import_today

def minute_data_load(self, now_utc, entity_name, max_days_previous, load_scaling=1.0, required_unit=None, interpolate=False, pad=True):
def minute_data_load(self, now_utc, entity_name, max_days_previous, load_scaling=1.0, required_unit=None, interpolate=False, pad=True, data_point_minutes=None):
"""
Download one or more entities for load data
"""
Expand Down Expand Up @@ -639,6 +653,7 @@ def minute_data_load(self, now_utc, entity_name, max_days_previous, load_scaling
accumulate=load_minutes,
required_unit=required_unit,
interpolate=interpolate,
data_point_minutes=data_point_minutes,
)
else:
if history is None:
Expand Down Expand Up @@ -680,6 +695,7 @@ def fetch_sensor_data(self, save=True):
self.pv_today = {}
self.load_minutes = {}
self.load_minutes_age = 0
self.load_data_point_minutes = set()
self.load_forecast = {}
self.load_forecast_array = []
self.pv_forecast_minute = {}
Expand Down Expand Up @@ -733,7 +749,7 @@ def fetch_sensor_data(self, save=True):
else:
# Load data
if "load_today" in self.args:
self.load_minutes, self.load_minutes_age = self.minute_data_load(self.now_utc, "load_today", self.max_days_previous, required_unit="kWh", load_scaling=1.0, interpolate=True)
self.load_minutes, self.load_minutes_age = self.minute_data_load(self.now_utc, "load_today", self.max_days_previous, required_unit="kWh", load_scaling=1.0, interpolate=True, data_point_minutes=self.load_data_point_minutes)
self.log("Found {} load_today datapoints going back {} days".format(len(self.load_minutes), self.load_minutes_age))
self.load_minutes_now = get_now_from_cumulative(self.load_minutes, self.minutes_now, backwards=True)
self.load_last_period = (self.load_minutes.get(0, 0) - self.load_minutes.get(PREDICT_STEP, 0)) * 60 / PREDICT_STEP
Expand Down Expand Up @@ -1283,7 +1299,7 @@ def download_ge_data(self, now_utc):

age = now_utc - oldest_data_time
self.load_minutes_age = age.days
self.load_minutes, _ = minute_data(mdata, self.max_days_previous, now_utc, "consumption", "last_updated", backwards=True, smoothing=True, scale=1.0, clean_increment=True, interpolate=True)
self.load_minutes, _ = minute_data(mdata, self.max_days_previous, now_utc, "consumption", "last_updated", backwards=True, smoothing=True, scale=1.0, clean_increment=True, interpolate=True, data_point_minutes=self.load_data_point_minutes)
self.import_today, _ = minute_data(mdata, self.max_days_previous, now_utc, "import", "last_updated", backwards=True, smoothing=True, scale=self.import_export_scaling, clean_increment=True)
self.export_today, _ = minute_data(mdata, self.max_days_previous, now_utc, "export", "last_updated", backwards=True, smoothing=True, scale=self.import_export_scaling, clean_increment=True)
self.pv_today, _ = minute_data(mdata, self.max_days_previous, now_utc, "pv", "last_updated", backwards=True, smoothing=True, scale=self.import_export_scaling, clean_increment=True)
Expand Down
Loading
Loading