Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions apps/predbat/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,6 +842,18 @@
"type": "switch",
"default": True,
},
{
"name": "rate_retention_days",
"friendly_name": "Rate Retention Days",
"type": "input_number",
"min": 1,
"max": 365,
"step": 1,
"unit": "days",
"icon": "mdi:database-clock",
"enable": "expert_mode",
"default": 7,
},
{
"name": "set_charge_freeze",
"friendly_name": "Set Charge Freeze",
Expand Down Expand Up @@ -1453,6 +1465,7 @@
"days_previous": True,
"days_previous_weight": True,
"battery_scaling": True,
"rate_retention_days": True,
"forecast_hours": True,
"import_export_scaling": True,
"inverter_limit_charge": True,
Expand Down Expand Up @@ -2106,6 +2119,7 @@
"rates_export_override": {"type": "dict_list"},
"days_previous": {"type": "integer_list"},
"days_previous_weight": {"type": "float_list"},
"rate_retention_days": {"type": "integer"},
"forecast_hours": {"type": "integer"},
"notify_devices": {"type": "string_list"},
"battery_scaling": {"type": "sensor_list", "sensor_type": "float", "entries": "num_inverters", "modify": False},
Expand Down
34 changes: 34 additions & 0 deletions apps/predbat/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -907,10 +907,30 @@ def fetch_sensor_data(self, save=True):
futurerate = FutureRate(self)
self.future_energy_rates_import, self.future_energy_rates_export = futurerate.futurerate_analysis(self.rate_import, self.rate_export)

# Load stored rates for past minutes (frozen historical rates)
if self.rate_store:
today = datetime.now()
stored_import, stored_export = self.rate_store.load_rates(today)

if stored_import or stored_export:
# Merge frozen past rates into current rate tables
for minute in range(0, self.minutes_now):
if stored_import and minute in stored_import:
self.rate_import[minute] = stored_import[minute]
if stored_export and minute in stored_export:
self.rate_export[minute] = stored_export[minute]

self.log(
"Loaded {} frozen import rates and {} frozen export rates from storage".format(
len([m for m in stored_import.keys() if m < self.minutes_now]) if stored_import else 0, len([m for m in stored_export.keys() if m < self.minutes_now]) if stored_export else 0
)
)

# Replicate and scan import rates
if self.rate_import:
self.rate_scan(self.rate_import, print=False)
self.rate_import, self.rate_import_replicated = self.rate_replicate(self.rate_import, self.io_adjusted, is_import=True)

self.rate_import_no_io = self.rate_import.copy()
for car_n in range(self.num_cars):
self.rate_import = self.rate_add_io_slots(car_n, self.rate_import, self.octopus_slots[car_n])
Expand All @@ -929,6 +949,7 @@ def fetch_sensor_data(self, save=True):
if self.rate_export:
self.rate_scan_export(self.rate_export, print=False)
self.rate_export, self.rate_export_replicated = self.rate_replicate(self.rate_export, is_import=False)

# For export tariff only load the saving session if enabled
if self.rate_export_max > 0:
self.load_saving_slot(self.octopus_saving_slots, export=True, rate_replicate=self.rate_export_replicated)
Expand All @@ -944,6 +965,11 @@ def fetch_sensor_data(self, save=True):
if self.rate_import or self.rate_export:
self.set_rate_thresholds()

# Save final computed rate tables to persistent storage (with frozen past slots)
if self.rate_store:
today = datetime.now()
self.rate_store.save_rates(today, self.rate_import, self.rate_export, self.minutes_now)

# Find discharging windows
if self.rate_export:
self.high_export_rates, lowest, highest = self.rate_scan_window(self.rate_export, 5, self.rate_export_cost_threshold, True, alt_rates=self.rate_import)
Expand Down Expand Up @@ -1497,6 +1523,14 @@ def apply_manual_rates(self, rates, manual_items, is_import=True, rate_replicate
rates[minute] = rate
rate_replicate[minute] = "manual"

# Track manual override in rate store
if self.rate_store:
today = datetime.now()
if is_import:
self.rate_store.update_manual_override(today, minute, rate, None)
else:
self.rate_store.update_manual_override(today, minute, None, rate)

return rates

def basic_rates(self, info, rtype, prev=None, rate_replicate=None):
Expand Down
189 changes: 189 additions & 0 deletions apps/predbat/persistent_store.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
# -----------------------------------------------------------------------------
# Predbat Home Battery System
# Copyright Trefor Southwell 2026 - All Rights Reserved
# This application maybe used for personal use only and not for commercial use
# -----------------------------------------------------------------------------
# fmt: off
# pylint: disable=consider-using-f-string
# pylint: disable=line-too-long
# pylint: disable=attribute-defined-outside-init

"""
Base class for persistent JSON file storage with backup and cleanup.
Provides common functionality for components needing to store state across restarts.
"""

import json
import os
from datetime import datetime, timedelta
from pathlib import Path


class PersistentStore:
"""
Abstract base class for persistent JSON file storage.
Handles load/save with backup, cleanup of old files, and automatic timestamping.
"""

def __init__(self, base):
"""Initialize with reference to base PredBat instance"""
self.base = base
self.log = base.log

def load(self, filepath):
"""
Load data from JSON file with automatic backup restoration on corruption.

Args:
filepath: Path to JSON file to load

Returns:
Loaded data dict or None if file doesn't exist or is corrupted
"""
try:
if not os.path.exists(filepath):
return None

with open(filepath, 'r') as f:
data = json.load(f)
return data

except (json.JSONDecodeError, IOError) as e:
self.log(f"Warn: Failed to load {filepath}: {e}")

# Try to restore from backup
backup_path = filepath + '.bak'
if os.path.exists(backup_path):
try:
self.log(f"Warn: Attempting to restore from backup: {backup_path}")
with open(backup_path, 'r') as f:
data = json.load(f)
self.log(f"Warn: Successfully restored from backup")
return data
except (json.JSONDecodeError, IOError) as e2:
self.log(f"Error: Backup restoration failed: {e2}")

return None

def save(self, filepath, data, backup=True):
"""
Save data to JSON file with automatic backup and timestamp.

Args:
filepath: Path to JSON file to save
data: Dict to save (will add last_updated timestamp)
backup: Whether to backup existing file before overwrite

Returns:
True if successful, False otherwise
"""
try:
# Add timestamp
data['last_updated'] = datetime.now().astimezone().isoformat()

# Create directory if needed
os.makedirs(os.path.dirname(filepath), exist_ok=True)

# Backup existing file if requested
if backup and os.path.exists(filepath):
self.backup_file(filepath)

# Write new file
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)

# Cleanup old backups
self.cleanup_backups(filepath)

return True

except (IOError, OSError) as e:
self.log(f"Error: Failed to save {filepath}: {e}")
return False

def backup_file(self, filepath):
"""
Create backup copy of file.

Args:
filepath: Path to file to backup
"""
try:
backup_path = filepath + '.bak'
if os.path.exists(filepath):
import shutil
shutil.copy2(filepath, backup_path)
except (IOError, OSError) as e:
self.log(f"Warn: Failed to backup {filepath}: {e}")

def cleanup_backups(self, filepath):
"""
Remove backup files older than 1 day.

Args:
filepath: Path to main file (will check for .bak file)
"""
try:
backup_path = filepath + '.bak'
if os.path.exists(backup_path):
# Check file age
file_time = datetime.fromtimestamp(os.path.getmtime(backup_path))
age = datetime.now() - file_time

if age > timedelta(days=1):
os.remove(backup_path)
self.log(f"Info: Cleaned up old backup: {backup_path}")

except (IOError, OSError) as e:
self.log(f"Warn: Failed to cleanup backup for {filepath}: {e}")

def cleanup(self, directory, pattern, retention_days):
"""
Remove files matching pattern older than retention period.

Args:
directory: Directory to search
pattern: Glob pattern for files to cleanup
retention_days: Number of days to retain files

Returns:
Number of files removed
"""
try:
if not os.path.exists(directory):
return 0

path = Path(directory)
cutoff_time = datetime.now() - timedelta(days=retention_days)
removed_count = 0

for file_path in path.glob(pattern):
try:
file_time = datetime.fromtimestamp(file_path.stat().st_mtime)
if file_time < cutoff_time:
file_path.unlink()
removed_count += 1
self.log(f"Info: Cleaned up old file: {file_path}")
except (IOError, OSError) as e:
self.log(f"Warn: Failed to remove {file_path}: {e}")

return removed_count

except Exception as e:
self.log(f"Error: Cleanup failed for {directory}/{pattern}: {e}")
return 0

def get_last_updated(self, filepath):
"""
Get last_updated timestamp from JSON file.

Args:
filepath: Path to JSON file

Returns:
ISO 8601 timestamp string or None
"""
data = self.load(filepath)
if data and 'last_updated' in data:
return data['last_updated']
return None
6 changes: 5 additions & 1 deletion apps/predbat/predbat.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
THIS_VERSION = "v8.34.6"

# fmt: off
PREDBAT_FILES = ["predbat.py", "const.py", "hass.py", "config.py", "prediction.py", "gecloud.py", "utils.py", "inverter.py", "ha.py", "download.py", "web.py", "web_helper.py", "predheat.py", "futurerate.py", "octopus.py", "solcast.py", "execute.py", "plan.py", "fetch.py", "output.py", "userinterface.py", "energydataservice.py", "alertfeed.py", "compare.py", "db_manager.py", "db_engine.py", "plugin_system.py", "ohme.py", "components.py", "fox.py", "carbon.py", "temperature.py", "web_mcp.py", "component_base.py", "axle.py", "solax.py", "solis.py", "unit_test.py", "load_ml_component.py", "load_predictor.py", "oauth_mixin.py", "predbat_metrics.py", "web_metrics_dashboard.py"]
PREDBAT_FILES = ["predbat.py", "const.py", "hass.py", "config.py", "prediction.py", "gecloud.py", "utils.py", "inverter.py", "ha.py", "download.py", "web.py", "web_helper.py", "predheat.py", "futurerate.py", "octopus.py", "solcast.py", "execute.py", "plan.py", "fetch.py", "output.py", "userinterface.py", "energydataservice.py", "alertfeed.py", "compare.py", "db_manager.py", "db_engine.py", "plugin_system.py", "ohme.py", "components.py", "fox.py", "carbon.py", "temperature.py", "web_mcp.py", "component_base.py", "axle.py", "solax.py", "solis.py", "unit_test.py", "load_ml_component.py", "load_predictor.py", "oauth_mixin.py", "predbat_metrics.py", "web_metrics_dashboard.py", "persistent_store.py", "rate_store.py"]
# fmt: on

from download import predbat_update_move, predbat_update_download, check_install
Expand Down Expand Up @@ -86,6 +86,7 @@
from userinterface import UserInterface
from compare import Compare
from plugin_system import PluginSystem
from rate_store import RateStore


class PredBat(hass.Hass, Octopus, Energidataservice, Fetch, Plan, Execute, Output, UserInterface):
Expand Down Expand Up @@ -488,6 +489,7 @@ def reset(self):
self.rate_import_no_io = {}
self.rate_export = {}
self.rate_gas = {}
self.rate_store = None
self.rate_slots = []
self.low_rates = []
self.high_export_rates = []
Expand Down Expand Up @@ -1559,6 +1561,8 @@ def initialize(self):
self.validate_config()
self.comparison = Compare(self)

self.rate_store = RateStore(self)

self.components.initialize(phase=1)
if not self.components.start(phase=1):
self.log("Error: Some components failed to start (phase1)")
Expand Down
Loading
Loading