Skip to content

Commit

Permalink
chore: continue with mips, program starts now.
Browse files Browse the repository at this point in the history
  • Loading branch information
Gaisberg authored and Gaisberg committed Oct 3, 2024
1 parent b937e34 commit d98cbc6
Show file tree
Hide file tree
Showing 27 changed files with 345 additions and 309 deletions.
8 changes: 4 additions & 4 deletions src/controllers/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ async def get_stats(_: Request):
payload = {}
with db.Session() as session:

movies_symlinks = session.execute(select(func.count(Movie._id)).where(Movie.symlinked == True)).scalar_one()
episodes_symlinks = session.execute(select(func.count(Episode._id)).where(Episode.symlinked == True)).scalar_one()
movies_symlinks = session.execute(select(func.count(Movie.id)).where(Movie.symlinked == True)).scalar_one()
episodes_symlinks = session.execute(select(func.count(Episode.id)).where(Episode.symlinked == True)).scalar_one()
total_symlinks = movies_symlinks + episodes_symlinks

total_movies = session.execute(select(func.count(Movie._id))).scalar_one()
Expand All @@ -118,7 +118,7 @@ async def get_stats(_: Request):
select(MediaItem._id)
.where(MediaItem.last_state != States.Completed)
).scalars().all()

incomplete_retries = {}
if _incomplete_items:
media_items = session.query(MediaItem).filter(MediaItem._id.in_(_incomplete_items)).all()
Expand All @@ -127,7 +127,7 @@ async def get_stats(_: Request):

states = {}
for state in States:
states[state] = session.execute(select(func.count(MediaItem._id)).where(MediaItem.last_state == state)).scalar_one()
states[state] = session.execute(select(func.count(MediaItem.id)).where(MediaItem.last_state == state)).scalar_one()

payload["total_items"] = total_items
payload["total_movies"] = total_movies
Expand Down
18 changes: 9 additions & 9 deletions src/controllers/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,11 +74,11 @@ async def get_items(
if search:
search_lower = search.lower()
if search_lower.startswith("tt"):
query = query.where(MediaItem.imdb_id == search_lower)
query = query.where(MediaItem.ids["imdb_id"] == search_lower)
else:
query = query.where(
(func.lower(MediaItem.title).like(f"%{search_lower}%")) |
(func.lower(MediaItem.imdb_id).like(f"%{search_lower}%"))
(func.lower(MediaItem.ids["imdb_id"]).like(f"%{search_lower}%"))
)

if state:
Expand Down Expand Up @@ -168,7 +168,7 @@ async def add_items(
with db.Session() as _:
for id in valid_ids:
item = MediaItem({"imdb_id": id, "requested_by": "riven", "requested_at": datetime.now()})
request.app.program.em.add_item(item)
request.app.program.em.add_item(item, "ApiAdd")

return {"success": True, "message": f"Added {len(valid_ids)} item(s) to the queue"}

Expand All @@ -180,7 +180,7 @@ async def add_items(
async def get_item(request: Request, id: int):
with db.Session() as session:
try:
item = session.execute(select(MediaItem).where(MediaItem._id == id)).unique().scalar_one()
item = session.execute(select(MediaItem).where(MediaItem.id == id)).unique().scalar_one()
except NoResultFound:
raise HTTPException(status_code=404, detail="Item not found")
return {"success": True, "item": item.to_extended_dict()}
Expand All @@ -195,7 +195,7 @@ async def get_items_by_imdb_ids(request: Request, imdb_ids: str):
with db.Session() as session:
items = []
for id in ids:
item = session.execute(select(MediaItem).where(MediaItem.imdb_id == id)).unique().scalar_one()
item = session.execute(select(MediaItem).where(MediaItem.ids["imdb_id"] == id)).unique().scalar_one()
if item:
items.append(item)
return {"success": True, "items": [item.to_extended_dict() for item in items]}
Expand All @@ -219,7 +219,7 @@ async def reset_items(
clear_streams(media_item)
reset_media_item(media_item)
except Exception as e:
logger.error(f"Failed to reset item with id {media_item._id}: {str(e)}")
logger.error(f"Failed to reset item with id {media_item.id}: {str(e)}")
continue
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
Expand Down Expand Up @@ -257,7 +257,7 @@ async def remove_item(request: Request, ids: str):
if not media_items:
raise ValueError("Invalid item ID(s) provided. Some items may not exist.")
for media_item in media_items:
logger.debug(f"Removing item {media_item.title} with ID {media_item._id}")
logger.debug(f"Removing item {media_item.title} with ID {media_item.id}")
request.app.program.em.cancel_job(media_item)
await asyncio.sleep(0.1) # Ensure cancellation is processed
clear_streams(media_item)
Expand Down Expand Up @@ -375,7 +375,7 @@ def set_torrent_rd(request: Request, id: int, torrent_id: str):
# items = []
# return_dict = {}
# for id in ids:
# items.append(session.execute(select(MediaItem).where(MediaItem._id == id)).unique().scalar_one())
# items.append(session.execute(select(MediaItem).where(MediaItem.id == id)).unique().scalar_one())
# if any(item for item in items if item.type in ["Season", "Episode"]):
# raise HTTPException(status_code=400, detail="Only shows and movies can be manually scraped currently")
# for item in items:
Expand All @@ -393,7 +393,7 @@ def set_torrent_rd(request: Request, id: int, torrent_id: str):
# async def download(request: Request, id: str, hash: str):
# downloader = request.app.program.services.get(Downloader).service
# with db.Session() as session:
# item = session.execute(select(MediaItem).where(MediaItem._id == id)).unique().scalar_one()
# item = session.execute(select(MediaItem).where(MediaItem.id == id)).unique().scalar_one()
# item.reset(True)
# downloader.download_cached(item, hash)
# request.app.program.add_to_queue(item)
Expand Down
2 changes: 1 addition & 1 deletion src/controllers/scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ async def scrape(request: Request, imdb_id: str, season: int = None, episode: in
with db.Session() as session:
media_item = session.execute(
select(MediaItem).where(
MediaItem.imdb_id == imdb_id,
MediaItem.ids["imdb_id"] == imdb_id,
MediaItem.type.in_(["movie", "show"])
)
).unique().scalar_one_or_none()
Expand Down
4 changes: 2 additions & 2 deletions src/program/content/listrr.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ def run(self) -> Generator[MediaItem, None, None]:

listrr_items = movie_items + show_items
non_existing_items = _filter_existing_items(listrr_items)
new_non_recurring_items = [item for item in non_existing_items if item.imdb_id not in self.recurring_items]
self.recurring_items.update([item.imdb_id for item in new_non_recurring_items])
new_non_recurring_items = [item for item in non_existing_items if item.ids["imdb_id"] not in self.recurring_items]
self.recurring_items.update([item.ids["imdb_id"] for item in new_non_recurring_items])

if new_non_recurring_items:
logger.info(f"Fetched {len(new_non_recurring_items)} new items from Listrr")
Expand Down
10 changes: 5 additions & 5 deletions src/program/content/mdblist.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,18 +54,18 @@ def run(self) -> Generator[MediaItem, None, None]:
else:
items = list_items_by_url(list, self.settings.api_key)
for item in items:
if hasattr(item, "error") or not item or item.imdb_id is None:
if hasattr(item, "error") or not item or item.ids["imdb_id"] is None:
continue
if item.imdb_id.startswith("tt"):
if item.ids["imdb_id"].startswith("tt"):
items_to_yield.append(MediaItem(
{"imdb_id": item.imdb_id, "requested_by": self.key}
{"imdb_id": item.ids["imdb_id"], "requested_by": self.key}
))
except RateLimitExceeded:
pass

non_existing_items = _filter_existing_items(items_to_yield)
new_non_recurring_items = [item for item in non_existing_items if item.imdb_id not in self.recurring_items and isinstance(item, MediaItem)]
self.recurring_items.update([item.imdb_id for item in new_non_recurring_items])
new_non_recurring_items = [item for item in non_existing_items if item.ids["imdb_id"] not in self.recurring_items and isinstance(item, MediaItem)]
self.recurring_items.update([item.ids["imdb_id"] for item in new_non_recurring_items])

if new_non_recurring_items:
logger.info(f"Found {len(new_non_recurring_items)} new items to fetch")
Expand Down
4 changes: 2 additions & 2 deletions src/program/content/overseerr.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,8 @@ def run(self):

overseerr_items: list[MediaItem] = self.get_media_requests()
non_existing_items = _filter_existing_items(overseerr_items)
new_non_recurring_items = [item for item in non_existing_items if item.imdb_id not in self.recurring_items and isinstance(item, MediaItem)]
self.recurring_items.update([item.imdb_id for item in new_non_recurring_items])
new_non_recurring_items = [item for item in non_existing_items if item.ids["imdb_id"] not in self.recurring_items and isinstance(item, MediaItem)]
self.recurring_items.update([item.ids["imdb_id"] for item in new_non_recurring_items])

if self.settings.use_webhook:
logger.debug("Webhook is enabled. Running Overseerr once before switching to webhook only mode")
Expand Down
4 changes: 2 additions & 2 deletions src/program/content/plex_watchlist.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ def run(self) -> Generator[MediaItem, None, None]:
plex_items: set[str] = set(watchlist_items) | set(rss_items)
items_to_yield: list[MediaItem] = [MediaItem({"imdb_id": imdb_id, "requested_by": self.key}) for imdb_id in plex_items if imdb_id and imdb_id.startswith("tt")]
non_existing_items = _filter_existing_items(items_to_yield)
new_non_recurring_items = [item for item in non_existing_items if item.imdb_id not in self.recurring_items and isinstance(item, MediaItem)]
self.recurring_items.update([item.imdb_id for item in new_non_recurring_items])
new_non_recurring_items = [item for item in non_existing_items if item.ids["imdb_id"] not in self.recurring_items and isinstance(item, MediaItem)]
self.recurring_items.update([item.ids["imdb_id"] for item in new_non_recurring_items])

if new_non_recurring_items:
logger.info(f"Found {len(new_non_recurring_items)} new items to fetch")
Expand Down
4 changes: 2 additions & 2 deletions src/program/content/trakt.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,10 @@ def run(self):
new_non_recurring_items = [
item
for item in non_existing_items
if item.imdb_id not in self.recurring_items
if item.ids["imdb_id"] not in self.recurring_items
and isinstance(item, MediaItem)
]
self.recurring_items.update(item.imdb_id for item in new_non_recurring_items)
self.recurring_items.update(item.ids["imdb_id"] for item in new_non_recurring_items)

if new_non_recurring_items:
logger.log("TRAKT", f"Found {len(new_non_recurring_items)} new items to fetch")
Expand Down
Loading

0 comments on commit d98cbc6

Please sign in to comment.