Skip to content

Commit afff705

Browse files
Merge pull request #31 from BottlecapDave/develop
New release
2 parents 7e98312 + 99d0bb6 commit afff705

File tree

5 files changed

+84
-75
lines changed

5 files changed

+84
-75
lines changed

.github/workflows/main.yml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
name: Main
22
on:
33
workflow_dispatch:
4-
schedule:
5-
- cron: '0 1 * * *'
64
push:
75
branches:
86
- develop

custom_components/target_timeframes/binary_sensor.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,16 +35,12 @@ async def async_setup_entry(hass, entry, async_add_entities):
3535
for sub_entry_id, sub_entry in entry.subentries.items():
3636
config = dict(sub_entry.data)
3737
entities = []
38+
platform = entity_platform.async_get_current_platform()
3839

3940
if config[CONFIG_KIND] == CONFIG_KIND_TARGET_RATE or config[CONFIG_KIND] == CONFIG_KIND_ROLLING_TARGET_RATE:
4041
if config[CONFIG_KIND] == CONFIG_KIND_TARGET_RATE:
4142
entities.append(TargetTimeframesTargetRate(hass, data_source_id, entry, sub_entry, config, data_dict))
42-
else:
43-
entities.append(TargetTimeframesRollingTargetRate(hass, data_source_id, entry, sub_entry, config, data_dict))
4443

45-
platform = entity_platform.async_get_current_platform()
46-
47-
if config[CONFIG_KIND] == CONFIG_KIND_TARGET_RATE:
4844
platform.async_register_entity_service(
4945
"update_target_timeframe_config",
5046
vol.All(
@@ -68,6 +64,8 @@ async def async_setup_entry(hass, entry, async_add_entities):
6864
"async_update_target_timeframe_config",
6965
)
7066
else:
67+
entities.append(TargetTimeframesRollingTargetRate(hass, data_source_id, entry, sub_entry, config, data_dict))
68+
7169
platform.async_register_entity_service(
7270
"update_rolling_target_timeframe_config",
7371
vol.All(
@@ -89,6 +87,7 @@ async def async_setup_entry(hass, entry, async_add_entities):
8987
),
9088
"async_update_rolling_target_timeframe_config",
9189
)
92-
90+
9391
async_add_entities(entities, config_subentry_id=sub_entry_id)
92+
9493
return True

custom_components/target_timeframes/entities/__init__.py

Lines changed: 35 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def apply_offset(date_time: datetime, offset: str, inverse = False):
3333

3434
return date_time + timedelta(hours=hours, minutes=minutes, seconds=seconds)
3535

36-
def is_target_timeframe_complete_in_period(current_date: datetime, start_time: datetime, end_time: datetime, target_timeframes: list | None):
36+
def is_target_timeframe_complete_in_period(current_date: datetime, start_time: datetime, end_time: datetime, target_timeframes: list | None, context: str = None):
3737
if target_timeframes is None or len(target_timeframes) < 1:
3838
return False
3939

@@ -43,7 +43,7 @@ def is_target_timeframe_complete_in_period(current_date: datetime, start_time: d
4343
target_timeframes[-1]["end"] <= current_date
4444
)
4545

46-
def get_start_and_end_times(current_date: datetime, target_start_time: str, target_end_time: str, start_time_not_in_past = True):
46+
def get_start_and_end_times(current_date: datetime, target_start_time: str, target_end_time: str, start_time_not_in_past = True, context: str = None):
4747
if (target_start_time is not None):
4848
target_start = parse_datetime(current_date.strftime(f"%Y-%m-%dT{target_start_time}:00%z"))
4949
else:
@@ -58,15 +58,15 @@ def get_start_and_end_times(current_date: datetime, target_start_time: str, targ
5858
target_end = as_utc(target_end)
5959

6060
if (target_start >= target_end):
61-
_LOGGER.debug(f'{target_start} is after {target_end}, so setting target end to tomorrow')
61+
_LOGGER.debug(f'{context} - {target_start} is after {target_end}, so setting target end to tomorrow')
6262
if target_start > current_date:
6363
target_start = target_start - timedelta(days=1)
6464
else:
6565
target_end = target_end + timedelta(days=1)
6666

6767
# If our start date has passed, reset it to current_date to avoid picking a slot in the past
6868
if (start_time_not_in_past == True and target_start < current_date and current_date < target_end):
69-
_LOGGER.debug(f'Rolling target and {target_start} is in the past. Setting start to {current_date}')
69+
_LOGGER.debug(f'{context} - Rolling target and {target_start} is in the past. Setting start to {current_date}')
7070
target_start = current_date
7171

7272
# If our start and end are both in the past, then look to the next day
@@ -76,8 +76,8 @@ def get_start_and_end_times(current_date: datetime, target_start_time: str, targ
7676

7777
return (target_start, target_end)
7878

79-
def get_fixed_applicable_time_periods(target_start: datetime, target_end: datetime, time_period_values: list):
80-
_LOGGER.debug(f'Finding rates between {target_start} and {target_end}')
79+
def get_fixed_applicable_time_periods(target_start: datetime, target_end: datetime, time_period_values: list, context: str = None):
80+
_LOGGER.debug(f'{context} - Finding rates between {target_start} and {target_end}')
8181

8282
# Retrieve the rates that are applicable for our target rate
8383
applicable_rates = []
@@ -93,12 +93,12 @@ def get_fixed_applicable_time_periods(target_start: datetime, target_end: dateti
9393
hours = (date_diff.days * 24) + (date_diff.seconds // 3600)
9494
periods = hours * 2
9595
if len(applicable_rates) < periods:
96-
_LOGGER.debug(f'Incorrect number of periods discovered. Require {periods}, but only have {len(applicable_rates)}')
96+
_LOGGER.debug(f'{context} - Incorrect number of periods discovered. Require {periods}, but only have {len(applicable_rates)}')
9797
return None
9898

9999
return applicable_rates
100100

101-
def get_rolling_applicable_time_periods(current_date: datetime, time_period_values: list, target_hours: float):
101+
def get_rolling_applicable_time_periods(current_date: datetime, time_period_values: list, target_hours: float, context: str = None):
102102
# Retrieve the rates that are applicable for our target rate
103103
applicable_time_periods = []
104104
periods = target_hours * 2
@@ -114,12 +114,12 @@ def get_rolling_applicable_time_periods(current_date: datetime, time_period_valu
114114

115115
# Make sure that we have enough rates that meet our target period
116116
if len(applicable_time_periods) < periods:
117-
_LOGGER.debug(f'Incorrect number of periods discovered. Require {periods}, but only have {len(applicable_time_periods)}')
117+
_LOGGER.debug(f'{context} - Incorrect number of periods discovered. Require {periods}, but only have {len(applicable_time_periods)}')
118118
return None
119119

120120
return applicable_time_periods
121121

122-
def __get_valid_to(rate):
122+
def __get_end(rate):
123123
return (rate["end"].timestamp(), rate["end"].fold)
124124

125125
def calculate_continuous_times(
@@ -130,7 +130,8 @@ def calculate_continuous_times(
130130
min_value = None,
131131
max_value = None,
132132
weighting: list = None,
133-
hours_mode = CONFIG_TARGET_HOURS_MODE_EXACT
133+
hours_mode = CONFIG_TARGET_HOURS_MODE_EXACT,
134+
context: str = None
134135
):
135136
if (applicable_time_periods is None or target_hours <= 0):
136137
return []
@@ -139,12 +140,12 @@ def calculate_continuous_times(
139140
total_required_time_periods = math.ceil(target_hours * 2)
140141

141142
if weighting is not None and len(weighting) != total_required_time_periods:
142-
raise ValueError("Weighting does not match target hours")
143+
raise ValueError(f"{context} - Weighting does not match target hours")
143144

144145
best_continuous_time_periods = None
145146
best_continuous_time_periods_total = None
146147

147-
_LOGGER.debug(f'{applicable_time_periods_count} applicable time periods found')
148+
_LOGGER.debug(f'{context} - {applicable_time_periods_count} applicable time periods found')
148149

149150
# Loop through our rates and try and find the block of time that meets our desired
150151
# hours and has the lowest combined rates
@@ -196,15 +197,14 @@ def calculate_continuous_times(
196197
if ((best_continuous_time_periods is None or is_best_continuous_rates) and has_required_hours):
197198
best_continuous_time_periods = continuous_time_periods
198199
best_continuous_time_periods_total = continuous_rates_total
199-
_LOGGER.debug(f'New best block discovered {continuous_rates_total} ({continuous_time_periods[0]["start"] if len(continuous_time_periods) > 0 else None} - {continuous_time_periods[-1]["end"] if len(continuous_time_periods) > 0 else None})')
200+
_LOGGER.debug(f'{context} - New best block discovered {continuous_rates_total} ({continuous_time_periods[0]["start"] if len(continuous_time_periods) > 0 else None} - {continuous_time_periods[-1]["end"] if len(continuous_time_periods) > 0 else None})')
200201
else:
201-
_LOGGER.debug(f'Total rates for current block {continuous_rates_total} ({continuous_time_periods[0]["start"] if len(continuous_time_periods) > 0 else None} - {continuous_time_periods[-1]["end"] if len(continuous_time_periods) > 0 else None}). Total rates for best block {best_continuous_time_periods_total}')
202+
_LOGGER.debug(f'{context} - Total rates for current block {continuous_rates_total} ({continuous_time_periods[0]["start"] if len(continuous_time_periods) > 0 else None} - {continuous_time_periods[-1]["end"] if len(continuous_time_periods) > 0 else None}). Total rates for best block {best_continuous_time_periods_total}')
202203

203204
if best_continuous_time_periods is not None:
204205
# Make sure our rates are in ascending order before returning
205-
best_continuous_time_periods.sort(key=__get_valid_to)
206+
best_continuous_time_periods.sort(key=__get_end)
206207
return best_continuous_time_periods
207-
208208
return []
209209

210210
def highest_last_time_period(time_period):
@@ -230,7 +230,8 @@ def calculate_intermittent_times(
230230
find_latest_time_periods = False,
231231
min_value = None,
232232
max_value = None,
233-
hours_mode = CONFIG_TARGET_HOURS_MODE_EXACT
233+
hours_mode = CONFIG_TARGET_HOURS_MODE_EXACT,
234+
context: str = None
234235
):
235236
if (applicable_time_periods is None):
236237
return []
@@ -250,29 +251,29 @@ def calculate_intermittent_times(
250251

251252
applicable_time_periods = list(filter(lambda rate: (min_value is None or rate["value"] >= min_value) and (max_value is None or rate["value"] <= max_value), applicable_time_periods))
252253

253-
_LOGGER.debug(f'{len(applicable_time_periods)} applicable time periods found')
254+
_LOGGER.debug(f'{context} - {len(applicable_time_periods)} applicable time periods found')
254255

255256
if ((hours_mode == CONFIG_TARGET_HOURS_MODE_EXACT and len(applicable_time_periods) >= total_required_time_periods) or hours_mode == CONFIG_TARGET_HOURS_MODE_MAXIMUM):
256257
applicable_time_periods = applicable_time_periods[:total_required_time_periods]
257258

258259
# Make sure our rates are in ascending order before returning
259-
applicable_time_periods.sort(key=__get_valid_to)
260+
applicable_time_periods.sort(key=__get_end)
260261

261262
return applicable_time_periods
262263
elif len(applicable_time_periods) >= total_required_time_periods:
263264
# Make sure our rates are in ascending order before returning
264-
applicable_time_periods.sort(key=__get_valid_to)
265+
applicable_time_periods.sort(key=__get_end)
265266

266267
return applicable_time_periods
267268

268269
return []
269270

270-
def get_target_time_period_info(current_date: datetime, applicable_rates, offset: str = None):
271+
def get_target_time_period_info(current_date: datetime, applicable_time_periods, offset: str = None, context: str = None):
271272
is_active = False
272273
next_time = None
273274
current_duration_in_hours = 0
274275
next_duration_in_hours = 0
275-
total_applicable_rates = len(applicable_rates) if applicable_rates is not None else 0
276+
total_applicable_time_periods = len(applicable_time_periods) if applicable_time_periods is not None else 0
276277

277278
overall_total_value = 0
278279
overall_min_value = None
@@ -286,30 +287,30 @@ def get_target_time_period_info(current_date: datetime, applicable_rates, offset
286287
next_min_value = None
287288
next_max_value = None
288289

289-
if (total_applicable_rates > 0):
290+
if (total_applicable_time_periods > 0):
290291

291292
# Find the applicable rates that when combine become a continuous block. This is more for
292293
# intermittent rates.
293-
applicable_rates.sort(key=__get_valid_to)
294+
applicable_time_periods.sort(key=__get_end)
294295
applicable_rate_blocks = list()
295-
block_valid_from = applicable_rates[0]["start"]
296+
block_valid_from = applicable_time_periods[0]["start"]
296297

297298
total_value = 0
298299
min_value = None
299300
max_value = None
300301

301-
for index, rate in enumerate(applicable_rates):
302-
if (index > 0 and applicable_rates[index - 1]["end"] != rate["start"]):
303-
diff = applicable_rates[index - 1]["end"] - block_valid_from
302+
for index, rate in enumerate(applicable_time_periods):
303+
if (index > 0 and applicable_time_periods[index - 1]["end"] != rate["start"]):
304+
diff = applicable_time_periods[index - 1]["end"] - block_valid_from
304305
minutes = diff.total_seconds() / 60
305306
periods = minutes / 30
306307
if periods < 1:
307-
_LOGGER.error(f"Less than 1 period discovered. Defaulting to 1 period. Rate start: {rate["start"]}; Applicable rates: {applicable_rates}")
308+
_LOGGER.error(f"{context} - Less than 1 period discovered. Defaulting to 1 period. Rate start: {rate["start"]}; Applicable rates: {applicable_time_periods}")
308309
periods = 1
309310

310311
applicable_rate_blocks.append({
311312
"start": block_valid_from,
312-
"end": applicable_rates[index - 1]["end"],
313+
"end": applicable_time_periods[index - 1]["end"],
313314
"duration_in_hours": minutes / 60,
314315
"average_value": total_value / periods,
315316
"min_value": min_value,
@@ -336,11 +337,11 @@ def get_target_time_period_info(current_date: datetime, applicable_rates, offset
336337
overall_max_value = rate["value"]
337338

338339
# Make sure our final block is added
339-
diff = applicable_rates[-1]["end"] - block_valid_from
340+
diff = applicable_time_periods[-1]["end"] - block_valid_from
340341
minutes = diff.total_seconds() / 60
341342
applicable_rate_blocks.append({
342343
"start": block_valid_from,
343-
"end": applicable_rates[-1]["end"],
344+
"end": applicable_time_periods[-1]["end"],
344345
"duration_in_hours": minutes / 60,
345346
"average_value": total_value / (minutes / 30),
346347
"min_value": min_value,
@@ -372,7 +373,7 @@ def get_target_time_period_info(current_date: datetime, applicable_rates, offset
372373

373374
return {
374375
"is_active": is_active,
375-
"overall_average_value": round(overall_total_value / total_applicable_rates, 5) if total_applicable_rates > 0 else 0,
376+
"overall_average_value": round(overall_total_value / total_applicable_time_periods, 5) if total_applicable_time_periods > 0 else 0,
376377
"overall_min_value": overall_min_value,
377378
"overall_max_value": overall_max_value,
378379
"current_duration_in_hours": current_duration_in_hours,

custom_components/target_timeframes/entities/rolling_target_timeframe.py

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -128,32 +128,33 @@ async def async_update(self):
128128

129129
should_evaluate = should_evaluate_target_timeframes(current_date, self._target_timeframes, self._config[CONFIG_TARGET_TARGET_TIMES_EVALUATION_MODE])
130130
if should_evaluate:
131-
_LOGGER.debug(f'{len(self._data_source_data) if self._data_source_data is not None else None} time periods found')
131+
_LOGGER.debug(f'{self._config[CONFIG_TARGET_NAME]} - {len(self._data_source_data) if self._data_source_data is not None else None} time periods found')
132132

133133
if len(self._data_source_data) > 0:
134134
# True by default for backwards compatibility
135-
find_last_rates = False
135+
find_last_time_periods = False
136136
if CONFIG_TARGET_LATEST_VALUES in self._config:
137-
find_last_rates = self._config[CONFIG_TARGET_LATEST_VALUES]
137+
find_last_time_periods = self._config[CONFIG_TARGET_LATEST_VALUES]
138138

139139
target_hours = float(self._config[CONFIG_TARGET_HOURS])
140140

141141
find_highest_values = False
142142
if (CONFIG_TARGET_FIND_HIGHEST_VALUES in self._config):
143143
find_highest_values = self._config[CONFIG_TARGET_FIND_HIGHEST_VALUES]
144144

145-
min_rate = None
145+
min_value = None
146146
if CONFIG_TARGET_MIN_VALUE in self._config:
147-
min_rate = self._config[CONFIG_TARGET_MIN_VALUE]
147+
min_value = self._config[CONFIG_TARGET_MIN_VALUE]
148148

149-
max_rate = None
149+
max_value = None
150150
if CONFIG_TARGET_MAX_VALUE in self._config:
151-
max_rate = self._config[CONFIG_TARGET_MAX_VALUE]
151+
max_value = self._config[CONFIG_TARGET_MAX_VALUE]
152152

153153
applicable_time_periods = get_rolling_applicable_time_periods(
154154
current_local_date,
155155
self._data_source_data,
156-
self._config[CONFIG_ROLLING_TARGET_HOURS_LOOK_AHEAD]
156+
self._config[CONFIG_ROLLING_TARGET_HOURS_LOOK_AHEAD],
157+
self._config[CONFIG_TARGET_NAME]
157158
)
158159

159160
if applicable_time_periods is not None:
@@ -165,28 +166,30 @@ async def async_update(self):
165166
applicable_time_periods,
166167
target_hours,
167168
find_highest_values,
168-
find_last_rates,
169-
min_rate,
170-
max_rate,
169+
find_last_time_periods,
170+
min_value,
171+
max_value,
171172
weighting,
172-
hours_mode = self._config[CONFIG_TARGET_HOURS_MODE]
173+
self._config[CONFIG_TARGET_HOURS_MODE],
174+
self._config[CONFIG_TARGET_NAME]
173175
)
174176
elif (self._config[CONFIG_TARGET_TYPE] == CONFIG_TARGET_TYPE_INTERMITTENT):
175177
self._target_timeframes = calculate_intermittent_times(
176178
applicable_time_periods,
177179
target_hours,
178180
find_highest_values,
179-
find_last_rates,
180-
min_rate,
181-
max_rate,
182-
hours_mode = self._config[CONFIG_TARGET_HOURS_MODE]
181+
find_last_time_periods,
182+
min_value,
183+
max_value,
184+
self._config[CONFIG_TARGET_HOURS_MODE],
185+
self._config[CONFIG_TARGET_NAME]
183186
)
184187
else:
185-
_LOGGER.error(f"Unexpected target type: {self._config[CONFIG_TARGET_TYPE]}")
188+
_LOGGER.error(f"{self._config[CONFIG_TARGET_NAME]} - Unexpected target type: {self._config[CONFIG_TARGET_TYPE]}")
186189

187190
self._attributes["target_times"] = self._target_timeframes
188191
self._attributes["target_times_last_evaluated"] = current_date
189-
_LOGGER.debug(f"calculated rates: {self._target_timeframes}")
192+
_LOGGER.debug(f"{self._config[CONFIG_TARGET_NAME]} - calculated rates: {self._target_timeframes}")
190193

191194
self._attributes["time_periods_incomplete"] = applicable_time_periods is None
192195

@@ -210,7 +213,7 @@ async def async_update(self):
210213

211214
self._state = active_result["is_active"]
212215

213-
_LOGGER.debug(f"calculated: {self._state}")
216+
_LOGGER.debug(f"{self._config[CONFIG_TARGET_NAME]} - calculated: {self._state}")
214217
self._attributes = dict_to_typed_dict(self._attributes)
215218

216219
@callback
@@ -241,7 +244,7 @@ async def async_added_to_hass(self):
241244
self._attributes = self._config.copy()
242245
self._target_timeframes = None
243246

244-
_LOGGER.debug(f'Restored TargetTimeframesTargetRate state: {self._state}')
247+
_LOGGER.debug(f'{self._config[CONFIG_TARGET_NAME]} - Restored state: {self._state}')
245248

246249
self.async_on_remove(
247250
self._hass.bus.async_listen(EVENT_DATA_SOURCE, self._async_handle_event)
@@ -250,6 +253,7 @@ async def async_added_to_hass(self):
250253
@callback
251254
async def async_update_rolling_target_timeframe_config(self, target_hours=None, target_look_ahead_hours=None, target_offset=None, target_minimum_value=None, target_maximum_value=None, target_weighting=None, persist_changes=False):
252255
"""Update sensors config"""
256+
_LOGGER.debug(f"{self._config[CONFIG_TARGET_NAME]} - async_update_rolling_target_timeframe_config called: {self._config}")
253257

254258
config = dict(self._config)
255259
if target_hours is not None:

0 commit comments

Comments
 (0)