Skip to content

Commit 19cedc8

Browse files
fix: future cancellation resulted in reset, retry endpoints fialing (#817)
* fix: future cancellation resulted in reset, retry endpoints fialing * fix: update reset func to check if indexed --------- Co-authored-by: Gaisberg <None> Co-authored-by: Spoked <[email protected]>
1 parent 2676fe8 commit 19cedc8

File tree

5 files changed

+53
-64
lines changed

5 files changed

+53
-64
lines changed

src/program/db/db_functions.py

+12-24
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import os
22
import shutil
3+
from threading import Event
34
from typing import TYPE_CHECKING
45

56
import alembic
@@ -171,15 +172,9 @@ def reset_media_item(item: "MediaItem"):
171172
item.reset()
172173
session.commit()
173174

174-
def reset_streams(item: "MediaItem", active_stream_hash: str = None):
175+
def reset_streams(item: "MediaItem"):
175176
"""Reset streams associated with a MediaItem."""
176177
with db.Session() as session:
177-
item.store_state()
178-
item = session.merge(item)
179-
if active_stream_hash:
180-
stream = session.query(Stream).filter(Stream.infohash == active_stream_hash).first()
181-
if stream:
182-
blacklist_stream(item, stream, session)
183178

184179
session.execute(
185180
delete(StreamRelation).where(StreamRelation.parent_id == item._id)
@@ -188,20 +183,11 @@ def reset_streams(item: "MediaItem", active_stream_hash: str = None):
188183
session.execute(
189184
delete(StreamBlacklistRelation).where(StreamBlacklistRelation.media_item_id == item._id)
190185
)
191-
item.active_stream = {}
192186
session.commit()
193187

194188
def clear_streams(item: "MediaItem"):
195189
"""Clear all streams for a media item."""
196-
with db.Session() as session:
197-
item = session.merge(item)
198-
session.execute(
199-
delete(StreamRelation).where(StreamRelation.parent_id == item._id)
200-
)
201-
session.execute(
202-
delete(StreamBlacklistRelation).where(StreamBlacklistRelation.media_item_id == item._id)
203-
)
204-
session.commit()
190+
reset_streams(item)
205191

206192
def clear_streams_by_id(media_item_id: int):
207193
"""Clear all streams for a media item by the MediaItem _id."""
@@ -358,7 +344,7 @@ def store_item(item: "MediaItem"):
358344
finally:
359345
session.close()
360346

361-
def run_thread_with_db_item(fn, service, program, input_id: int = None):
347+
def run_thread_with_db_item(fn, service, program, input_id, cancellation_event: Event):
362348
from program.media.item import MediaItem
363349
if input_id:
364350
with db.Session() as session:
@@ -378,11 +364,12 @@ def run_thread_with_db_item(fn, service, program, input_id: int = None):
378364
logger.log("PROGRAM", f"Service {service.__name__} emitted {item} from input item {input_item} of type {type(item).__name__}, backing off.")
379365
program.em.remove_id_from_queues(input_item._id)
380366

381-
input_item.store_state()
382-
session.commit()
367+
if not cancellation_event.is_set():
368+
input_item.store_state()
369+
session.commit()
383370

384371
session.expunge_all()
385-
yield res
372+
return res
386373
else:
387374
# Indexing returns a copy of the item, was too lazy to create a copy attr func so this will do for now
388375
indexed_item = next(fn(input_item), None)
@@ -393,9 +380,10 @@ def run_thread_with_db_item(fn, service, program, input_id: int = None):
393380
indexed_item.store_state()
394381
session.delete(input_item)
395382
indexed_item = session.merge(indexed_item)
396-
session.commit()
397-
logger.debug(f"{input_item._id} is now {indexed_item._id} after indexing...")
398-
yield indexed_item._id
383+
if not cancellation_event.is_set():
384+
session.commit()
385+
logger.debug(f"{input_item._id} is now {indexed_item._id} after indexing...")
386+
return indexed_item._id
399387
return
400388
else:
401389
# Content services

src/program/media/item.py

+25-26
Original file line numberDiff line numberDiff line change
@@ -132,8 +132,8 @@ def __init__(self, item: dict | None) -> None:
132132
#Post processing
133133
self.subtitles = item.get("subtitles", [])
134134

135-
def store_state(self) -> None:
136-
new_state = self._determine_state()
135+
def store_state(self, given_state=None) -> None:
136+
new_state = given_state if given_state else self._determine_state()
137137
if self.last_state and self.last_state != new_state:
138138
sse_manager.publish_event("item_update", {"last_state": self.last_state, "new_state": new_state, "item_id": self._id})
139139
self.last_state = new_state
@@ -145,6 +145,10 @@ def is_stream_blacklisted(self, stream: Stream):
145145
session.refresh(self, attribute_names=['blacklisted_streams'])
146146
return stream in self.blacklisted_streams
147147

148+
def blacklist_active_stream(self):
149+
stream = next(stream for stream in self.streams if stream.infohash == self.active_stream["infohash"])
150+
self.blacklist_stream(stream)
151+
148152
def blacklist_stream(self, stream: Stream):
149153
value = blacklist_stream(self, stream)
150154
if value:
@@ -321,20 +325,23 @@ def get_aliases(self) -> dict:
321325
def __hash__(self):
322326
return hash(self._id)
323327

324-
def reset(self, soft_reset: bool = False):
328+
def reset(self):
325329
"""Reset item attributes."""
326330
if self.type == "show":
327331
for season in self.seasons:
328332
for episode in season.episodes:
329-
episode._reset(soft_reset)
330-
season._reset(soft_reset)
333+
episode._reset()
334+
season._reset()
331335
elif self.type == "season":
332336
for episode in self.episodes:
333-
episode._reset(soft_reset)
334-
self._reset(soft_reset)
335-
self.store_state()
337+
episode._reset()
338+
self._reset()
339+
if self.title:
340+
self.store_state(States.Indexed)
341+
else:
342+
self.store_state(States.Requested)
336343

337-
def _reset(self, soft_reset):
344+
def _reset(self):
338345
"""Reset item attributes for rescraping."""
339346
if self.symlink_path:
340347
if Path(self.symlink_path).exists():
@@ -351,16 +358,8 @@ def _reset(self, soft_reset):
351358
self.set("folder", None)
352359
self.set("alternative_folder", None)
353360

354-
if not self.active_stream:
355-
self.active_stream = {}
356-
if not soft_reset:
357-
if self.active_stream.get("infohash", False):
358-
reset_streams(self, self.active_stream["infohash"])
359-
else:
360-
if self.active_stream.get("infohash", False):
361-
stream = next((stream for stream in self.streams if stream.infohash == self.active_stream["infohash"]), None)
362-
if stream:
363-
self.blacklist_stream(stream)
361+
reset_streams(self)
362+
self.active_stream = {}
364363

365364
self.set("active_stream", {})
366365
self.set("symlinked", False)
@@ -371,7 +370,7 @@ def _reset(self, soft_reset):
371370
self.set("symlinked_times", 0)
372371
self.set("scraped_times", 0)
373372

374-
logger.debug(f"Item {self.log_string} reset for rescraping")
373+
logger.debug(f"Item {self.log_string} has been reset")
375374

376375
@property
377376
def log_string(self):
@@ -456,10 +455,10 @@ def _determine_state(self):
456455
return States.Requested
457456
return States.Unknown
458457

459-
def store_state(self) -> None:
458+
def store_state(self, given_state: States =None) -> None:
460459
for season in self.seasons:
461-
season.store_state()
462-
super().store_state()
460+
season.store_state(given_state)
461+
super().store_state(given_state)
463462

464463
def __repr__(self):
465464
return f"Show:{self.log_string}:{self.state.name}"
@@ -527,10 +526,10 @@ class Season(MediaItem):
527526
"polymorphic_load": "inline",
528527
}
529528

530-
def store_state(self) -> None:
529+
def store_state(self, given_state: States = None) -> None:
531530
for episode in self.episodes:
532-
episode.store_state()
533-
super().store_state()
531+
episode.store_state(given_state)
532+
super().store_state(given_state)
534533

535534
def __init__(self, item):
536535
self.type = "season"

src/program/symlink.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,8 @@ def run(self, item: Union[Movie, Show, Season, Episode]):
9494
if not self._should_submit(items):
9595
if item.symlinked_times == 5:
9696
logger.debug(f"Soft resetting {item.log_string} because required files were not found")
97-
item.reset(True)
97+
item.blacklist_active_stream()
98+
item.reset()
9899
yield item
99100
next_attempt = self._calculate_next_attempt(item)
100101
logger.debug(f"Waiting for {item.log_string} to become available, next attempt in {round((next_attempt - datetime.now()).total_seconds())} seconds")

src/routers/secure/items.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -533,7 +533,7 @@ def set_torrent_rd(request: Request, id: int, torrent_id: str) -> SetTorrentRDRe
533533
# downloader = request.app.program.services.get(Downloader).service
534534
# with db.Session() as session:
535535
# item = session.execute(select(MediaItem).where(MediaItem._id == id)).unique().scalar_one()
536-
# item.reset(True)
536+
# item.reset()
537537
# downloader.download_cached(item, hash)
538538
# request.app.program.add_to_queue(item)
539539
# return {"success": True, "message": f"Downloading {item.title} with hash {hash}"}

src/utils/event_manager.py

+13-12
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import os
2+
import threading
23
import traceback
34

45
from datetime import datetime
@@ -8,8 +9,7 @@
89

910
from loguru import logger
1011
from pydantic import BaseModel
11-
from sqlalchemy.orm.exc import StaleDataError
12-
from concurrent.futures import CancelledError, Future, ThreadPoolExecutor
12+
from concurrent.futures import Future, ThreadPoolExecutor
1313

1414
from utils.sse_manager import sse_manager
1515
from program.db.db import db
@@ -37,6 +37,7 @@ def __init__(self):
3737
self._futures: list[Future] = []
3838
self._queued_events: list[Event] = []
3939
self._running_events: list[Event] = []
40+
self._canceled_futures: list[Future] = []
4041
self.mutex = Lock()
4142

4243
def _find_or_create_executor(self, service_cls) -> ThreadPoolExecutor:
@@ -71,7 +72,7 @@ def _process_future(self, future, service):
7172
service (type): The service class associated with the future.
7273
"""
7374
try:
74-
result = next(future.result(), None)
75+
result = future.result()
7576
if future in self._futures:
7677
self._futures.remove(future)
7778
sse_manager.publish_event("event_update", self.get_event_updates())
@@ -81,10 +82,10 @@ def _process_future(self, future, service):
8182
item_id, timestamp = result, datetime.now()
8283
if item_id:
8384
self.remove_event_from_running(item_id)
85+
if future.cancellation_event.is_set():
86+
logger.debug(f"Future with Item ID: {item_id} was cancelled discarding results...")
87+
return
8488
self.add_event(Event(emitted_by=service, item_id=item_id, run_at=timestamp))
85-
except (StaleDataError, CancelledError):
86-
# Expected behavior when cancelling tasks or when the item was removed
87-
return
8889
except Exception as e:
8990
logger.error(f"Error in future for {future}: {e}")
9091
logger.exception(traceback.format_exc())
@@ -166,8 +167,10 @@ def submit_job(self, service, program, event=None):
166167
log_message += f" with Item ID: {item_id}"
167168
logger.debug(log_message)
168169

170+
cancellation_event = threading.Event()
169171
executor = self._find_or_create_executor(service)
170-
future = executor.submit(run_thread_with_db_item, program.all_services[service].run, service, program, item_id)
172+
future = executor.submit(run_thread_with_db_item, program.all_services[service].run, service, program, item_id, cancellation_event)
173+
future.cancellation_event = cancellation_event
171174
if event:
172175
future.event = event
173176
self._futures.append(future)
@@ -186,27 +189,25 @@ def cancel_job(self, item_id: int, suppress_logs=False):
186189
item_id, related_ids = get_item_ids(session, item_id)
187190
ids_to_cancel = set([item_id] + related_ids)
188191

189-
futures_to_remove = []
190192
for future in self._futures:
191193
future_item_id = None
192194
future_related_ids = []
193195

194-
if hasattr(future, 'event') and hasattr(future.event, 'item'):
196+
if hasattr(future, 'event') and hasattr(future.event, 'item_id'):
195197
future_item = future.event.item_id
196198
future_item_id, future_related_ids = get_item_ids(session, future_item)
197199

198200
if future_item_id in ids_to_cancel or any(rid in ids_to_cancel for rid in future_related_ids):
199201
self.remove_id_from_queues(future_item)
200-
futures_to_remove.append(future)
201202
if not future.done() and not future.cancelled():
202203
try:
204+
future.cancellation_event.set()
203205
future.cancel()
206+
self._canceled_futures.append(future)
204207
except Exception as e:
205208
if not suppress_logs:
206209
logger.error(f"Error cancelling future for {future_item.log_string}: {str(e)}")
207210

208-
for future in futures_to_remove:
209-
self._futures.remove(future)
210211

211212
logger.debug(f"Canceled jobs for Item ID {item_id} and its children.")
212213

0 commit comments

Comments
 (0)