From 3159c8e8a751d55a7f5a7e64aaaffdab2fb6e830 Mon Sep 17 00:00:00 2001 From: Gav Date: Sat, 11 Oct 2025 01:11:59 +1000 Subject: [PATCH 01/11] More drafting --- .../providers/dlna-server/__init__.py | 1033 +++++++++++++++++ .../providers/dlna-server/manifest.json | 12 + music_assistant/providers/dlna-server/ssdp.py | 339 ++++++ music_assistant/providers/webdav/provider.py | 348 ++++++ 4 files changed, 1732 insertions(+) create mode 100644 music_assistant/providers/dlna-server/__init__.py create mode 100644 music_assistant/providers/dlna-server/manifest.json create mode 100644 music_assistant/providers/dlna-server/ssdp.py create mode 100644 music_assistant/providers/webdav/provider.py diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py new file mode 100644 index 0000000000..972ebaf542 --- /dev/null +++ b/music_assistant/providers/dlna-server/__init__.py @@ -0,0 +1,1033 @@ +"""DLNA Server Plugin Provider for Music Assistant. + +This plugin provider exposes Music Assistant as a DLNA/UPnP Media Server, +allowing DLNA clients on the network to browse and play the MA library. +""" + +from __future__ import annotations + +import urllib.parse +import uuid +from typing import TYPE_CHECKING, cast + +from aiohttp import web +from defusedxml import ElementTree as DefusedET +from music_assistant_models.enums import MediaType +from music_assistant_models.errors import MediaNotFoundError, SetupFailedError + +from music_assistant.models.music_provider import MusicProvider +from music_assistant.models.plugin import PluginProvider + +from .ssdp import SSDPServer + +if TYPE_CHECKING: + from xml.etree.ElementTree import Element + + from music_assistant_models.config_entries import ProviderConfig + from music_assistant_models.media_items import Album, Artist, Track + from music_assistant_models.provider import ProviderManifest + + from music_assistant.mass import MusicAssistant + from music_assistant.models import ProviderInstanceType + +SUPPORTED_FEATURES = set() # No special features needed for this plugin + +# DLNA/UPnP constants +DEVICE_TYPE = "urn:schemas-upnp-org:device:MediaServer:1" +SERVICE_CONTENT_DIRECTORY = "urn:schemas-upnp-org:service:ContentDirectory:1" +SERVICE_CONNECTION_MANAGER = "urn:schemas-upnp-org:service:ConnectionManager:1" + +# DLNA object IDs +ROOT_ID = "0" +ARTISTS_CONTAINER_ID = "artists" +ALBUMS_CONTAINER_ID = "albums" +TRACKS_CONTAINER_ID = "tracks" + +# TODO: Implement these containers in future versions + +# PLAYLISTS_CONTAINER_ID = "playlists" +# RADIO_CONTAINER_ID = "radio" + + +async def setup( + mass: MusicAssistant, manifest: ProviderManifest, config: ProviderConfig +) -> ProviderInstanceType: + """Initialize provider(instance) with given configuration.""" + provider = DLNAServerProvider(mass, manifest, config, SUPPORTED_FEATURES) + await provider.handle_async_init() + return provider + + +async def get_config_entries( + mass: MusicAssistant, # noqa: ARG001 + instance_id: str | None = None, # noqa: ARG001 + action: str | None = None, # noqa: ARG001 + values: dict[str, str] | None = None, # noqa: ARG001 +) -> tuple: + """Return Config entries for this provider.""" + # No configuration needed - use standard everything + return () + + +class DLNAServerProvider(PluginProvider): + """DLNA Server Plugin Provider for Music Assistant.""" + + def __init__(self, *args, **kwargs) -> None: + """Initialize the DLNA server provider.""" + super().__init__(*args, **kwargs) + self._ssdp_server = None + self._server_uuid: str = str(uuid.uuid4()) + self._friendly_name = "Music Assistant" + self._routes_registered = False + + async def handle_async_init(self) -> None: + """Handle async initialization of the provider.""" + # This is called right after setup, before loaded_in_mass + + async def loaded_in_mass(self) -> None: + """Call after the provider has been loaded into Music Assistant.""" + try: + # Register HTTP endpoints with the streams controller + await self._register_http_routes() + + # Start SSDP server for device discovery + await self._start_ssdp_server() + + self.logger.info( + "DLNA Server started successfully - " + "Music Assistant is now discoverable as a DLNA Media Server" + ) + except Exception as err: + self.logger.exception("Failed to start DLNA server") + raise SetupFailedError("Failed to start DLNA server") from err + + async def unload(self, is_removed: bool = False) -> None: + """Handle unload/close of the provider.""" + # Stop SSDP server + if self._ssdp_server: + try: + await self._ssdp_server.stop() + except Exception as err: + self.logger.warning("Error stopping SSDP server: %s", err) + + # Unregister HTTP routes + if self._routes_registered: + try: + self.mass.streams.unregister_dynamic_route("/dlna/description.xml") + self.mass.streams.unregister_dynamic_route("/dlna/ContentDirectory.xml") + self.mass.streams.unregister_dynamic_route("/dlna/ConnectionManager.xml") + self.mass.streams.unregister_dynamic_route("/dlna/control/ContentDirectory") + self.mass.streams.unregister_dynamic_route("/dlna/control/ConnectionManager") + self.mass.streams.unregister_dynamic_route("/dlna/track/*") + # self.mass.streams.unregister_dynamic_route("/dlna/event/ContentDirectory") + except Exception as err: + self.logger.warning("Error unregistering routes: %s", err) + + self.logger.info("DLNA Server stopped") + + async def _register_http_routes(self) -> None: + """Register HTTP routes with the streams controller.""" + # Device description + self.mass.streams.register_dynamic_route( + "/dlna/description.xml", + self._handle_device_description, + "GET", + ) + + # Service descriptions + self.mass.streams.register_dynamic_route( + "/dlna/ContentDirectory.xml", + self._handle_content_directory_scpd, + "GET", + ) + self.mass.streams.register_dynamic_route( + "/dlna/ConnectionManager.xml", + self._handle_connection_manager_scpd, + "GET", + ) + + # Control endpoints (SOAP) + self.mass.streams.register_dynamic_route( + "/dlna/control/ContentDirectory", + self._handle_content_directory_control, + "POST", + ) + self.mass.streams.register_dynamic_route( + "/dlna/control/ConnectionManager", + self._handle_connection_manager_control, + "POST", + ) + + # Media streaming endpoint + self.mass.streams.register_dynamic_route( + "/dlna/track/*", + self._handle_track_stream, + "GET", + ) + self.mass.streams.register_dynamic_route( + "/dlna/track/*", + self._handle_track_stream, + "HEAD", + ) + # Event subscription endpoint (return not implemented) + self.mass.streams.register_dynamic_route( + "/dlna/event/ContentDirectory", + self._handle_event_subscription, + "SUBSCRIBE", + ) + self._routes_registered = True + self.logger.debug("DLNA HTTP routes registered") + + async def _handle_event_subscription(self, request: web.Request) -> web.Response: + """Handle event subscription requests (not implemented).""" + self.logger.debug("Event subscription requested but not implemented") + return web.Response(status=501, text="Event subscription not implemented") + + async def _start_ssdp_server(self) -> None: + """Start the SSDP server for device discovery.""" + base_url = self.mass.streams.base_url + location = f"{base_url}/dlna/description.xml" + + self._ssdp_server = SSDPServer( + location=location, + server_uuid=self._server_uuid, + friendly_name=self._friendly_name, + logger=self.logger, + ) + + await self._ssdp_server.start() + self.logger.info("SSDP advertisement started - devices can now discover this server") + + # ==================== HTTP Handlers ==================== + + async def _handle_device_description(self, request: web.Request) -> web.Response: + """Handle device description request.""" + base_url = self.mass.streams.base_url + + device_xml = f""" + + + 1 + 0 + + + {DEVICE_TYPE} + {self._friendly_name} + Music Assistant + https://music-assistant.io + Music Assistant DLNA Media Server + Music Assistant + 1.0 + https://music-assistant.io + 1 + uuid:{self._server_uuid} + {base_url} + + + {SERVICE_CONTENT_DIRECTORY} + urn:upnp-org:serviceId:ContentDirectory + /dlna/ContentDirectory.xml + /dlna/control/ContentDirectory + /dlna/event/ContentDirectory + + + {SERVICE_CONNECTION_MANAGER} + urn:upnp-org:serviceId:ConnectionManager + /dlna/ConnectionManager.xml + /dlna/control/ConnectionManager + /dlna/event/ConnectionManager + + + +""" + + return web.Response( + text=device_xml, + content_type="text/xml", + charset="utf-8", + ) + + async def _handle_content_directory_scpd(self, request: web.Request) -> web.Response: + """Handle ContentDirectory service description request.""" + scpd_xml = """ + + + 1 + 0 + + + + Browse + + + ObjectID + in + A_ARG_TYPE_ObjectID + + + BrowseFlag + in + A_ARG_TYPE_BrowseFlag + + + Filter + in + A_ARG_TYPE_Filter + + + StartingIndex + in + A_ARG_TYPE_Index + + + RequestedCount + in + A_ARG_TYPE_Count + + + SortCriteria + in + A_ARG_TYPE_SortCriteria + + + Result + out + A_ARG_TYPE_Result + + + NumberReturned + out + A_ARG_TYPE_Count + + + TotalMatches + out + A_ARG_TYPE_Count + + + UpdateID + out + A_ARG_TYPE_UpdateID + + + + + GetSystemUpdateID + + + Id + out + SystemUpdateID + + + + + + + A_ARG_TYPE_ObjectID + string + + + A_ARG_TYPE_Result + string + + + A_ARG_TYPE_BrowseFlag + string + + BrowseMetadata + BrowseDirectChildren + + + + A_ARG_TYPE_Filter + string + + + A_ARG_TYPE_SortCriteria + string + + + A_ARG_TYPE_Index + ui4 + + + A_ARG_TYPE_Count + ui4 + + + A_ARG_TYPE_UpdateID + ui4 + + + SystemUpdateID + ui4 + + +""" + + return web.Response( + text=scpd_xml, + content_type="text/xml", + charset="utf-8", + ) + + async def _handle_connection_manager_scpd(self, request: web.Request) -> web.Response: + """Handle ConnectionManager service description request.""" + scpd_xml = """ + + + 1 + 0 + + + + GetProtocolInfo + + + Source + out + SourceProtocolInfo + + + Sink + out + SinkProtocolInfo + + + + + + + SourceProtocolInfo + string + + + SinkProtocolInfo + string + + +""" + + return web.Response( + text=scpd_xml, + content_type="text/xml", + charset="utf-8", + ) + + async def _handle_content_directory_control(self, request: web.Request) -> web.Response: + """Handle ContentDirectory SOAP control requests.""" + try: + body = await request.text() + # Parse SOAP request using defusedxml + root = DefusedET.fromstring(body) + + # Find the action + action_elem = None + action = None + for elem in root.iter(): + if elem.tag.endswith("Browse"): + action_elem = elem + action = "Browse" + break + if elem.tag.endswith("GetSystemUpdateID"): + action = "GetSystemUpdateID" + action_elem = elem + break + + if action_elem is None or len(action_elem) == 0: # Fixed the deprecation warning too + return await self._soap_error(401, "Invalid Action") + + # Handle the action + if action == "Browse": + return await self._handle_browse_action(action_elem) + if action == "GetSystemUpdateID": + return await self._handle_get_system_update_id() + + return await self._soap_error(401, "Invalid Action") + + except Exception as err: + self.logger.exception("Error handling ContentDirectory control request") + return await self._soap_error(500, str(err)) + + async def _handle_connection_manager_control(self, request: web.Request) -> web.Response: + """Handle ConnectionManager SOAP control requests.""" + try: + body = await request.text() + + # Parse SOAP request + root = DefusedET.fromstring(body) + + # Check for GetProtocolInfo action + for elem in root.iter(): + if elem.tag.endswith("GetProtocolInfo"): + return await self._handle_get_protocol_info() + + return await self._soap_error(401, "Invalid Action") + + except Exception as err: + self.logger.exception("Error handling ConnectionManager control request") + return await self._soap_error(500, str(err)) + + async def _handle_browse_action(self, action_elem: Element) -> web.Response: + """Handle Browse SOAP action.""" + # Extract parameters + object_id = self._get_soap_param(action_elem, "ObjectID") or ROOT_ID + browse_flag = self._get_soap_param(action_elem, "BrowseFlag") or "BrowseDirectChildren" + starting_index = int(self._get_soap_param(action_elem, "StartingIndex") or "0") + requested_count = int(self._get_soap_param(action_elem, "RequestedCount") or "0") + + self.logger.debug( + "Browse: ObjectID=%s, BrowseFlag=%s, StartingIndex=%d, RequestedCount=%d", + object_id, + browse_flag, + starting_index, + requested_count, + ) + + # Generate DIDL response + if browse_flag == "BrowseMetadata": + didl_xml, number_returned, total_matches = await self._get_object_metadata(object_id) + else: # BrowseDirectChildren + didl_xml, number_returned, total_matches = await self._get_children( + object_id, starting_index, requested_count + ) + + # Log DIDL for album browsing + if object_id.startswith("album_") and browse_flag == "BrowseDirectChildren": + self.logger.error("=== MA ALBUM DIDL ===\n%s", didl_xml) + + # Build SOAP response + response_xml = f""" + + + + {self._escape_xml(didl_xml)} + {number_returned} + {total_matches} + 0 + + +""" + + return web.Response( + text=response_xml, + content_type="text/xml", + charset="utf-8", + ) + + async def _handle_get_system_update_id(self) -> web.Response: + """Handle GetSystemUpdateID action.""" + response_xml = f""" + + + + 0 + + +""" + + return web.Response( + text=response_xml, + content_type="text/xml", + charset="utf-8", + ) + + async def _handle_get_protocol_info(self) -> web.Response: + """Handle GetProtocolInfo action.""" + # Advertise supported formats + protocol_info = ( + "http-get:*:audio/mpeg:*," + "http-get:*:audio/mp4:*," + "http-get:*:audio/flac:*," + "http-get:*:audio/x-flac:*" + ) + + response_xml = f""" + + + + {protocol_info} + + + +""" + + return web.Response( + text=response_xml, + content_type="text/xml", + charset="utf-8", + ) + + async def _handle_track_stream(self, request: web.Request) -> web.Response: + """Handle track streaming request.""" + # Parse path: /dlna/track/{provider}/{item_id}.{fmt} + path_parts = request.path.split("/") + if len(path_parts) < 5: + return web.Response(status=400, text="Invalid path") + + provider_param = path_parts[3] + filename = path_parts[4] + item_id, fmt = filename.rsplit(".", 1) + + self.logger.debug( + "Stream request: provider=%s, item_id=%s, format=%s", provider_param, item_id, fmt + ) + + try: + # Get the track + track = await self.mass.music.tracks.get_library_item(item_id) + + # Get provider mapping + provider_instance, prov_item_id = await self.mass.music.tracks.get_provider_mapping( + track + ) + + # Get the provider + prov = self.mass.get_provider(provider_instance) + if not prov or not isinstance(prov, MusicProvider): + return web.Response(status=404, text="Provider not found") + + # Get stream details + streamdetails = await prov.get_stream_details(prov_item_id, MediaType.TRACK) + + # Get the absolute path from the FileSystemItem + if hasattr(streamdetails.data, "absolute_path"): + file_path = streamdetails.data.absolute_path + else: + # Fallback for non-filesystem providers + return web.Response(status=400, text="Only local files supported") + + self.logger.debug("Serving file: %s", file_path) + + # Serve the file + return cast( + "web.Response", web.FileResponse(path=file_path, headers={"Accept-Ranges": "bytes"}) + ) + + except MediaNotFoundError: + return web.Response(status=404, text="Track not found") + except Exception as err: + self.logger.exception("Error streaming track") + return web.Response(status=500, text=str(err)) + + # ==================== DIDL/XML Helpers ==================== + + async def _get_object_metadata(self, object_id: str) -> tuple[str, int, int]: + """Get metadata for a specific object.""" + if object_id == ROOT_ID: + didl_xml = self._create_root_container() + return didl_xml, 1, 1 + + # Parse object_id to determine type + if object_id.startswith("artist_"): + artist_id = object_id[7:] # Remove "artist_" prefix + try: + artist = await self.mass.music.artists.get_library_item(artist_id) + didl_xml = self._create_artist_container(artist) + return didl_xml, 1, 1 + except MediaNotFoundError: + return self._create_empty_didl(), 0, 0 + + if object_id.startswith("album_"): + album_id = object_id[6:] # Remove "album_" prefix + try: + album = await self.mass.music.albums.get_library_item(album_id) + didl_xml = self._create_album_container(album) + return didl_xml, 1, 1 + except MediaNotFoundError: + return self._create_empty_didl(), 0, 0 + + if object_id.startswith("track_"): + track_id = object_id[6:] # Remove "track_" prefix + try: + track = await self.mass.music.tracks.get_library_item(track_id) + didl_xml = await self._create_track_item(track) + return didl_xml, 1, 1 + except MediaNotFoundError: + return self._create_empty_didl(), 0, 0 + + return self._create_empty_didl(), 0, 0 + + async def _get_children( + self, parent_id: str, starting_index: int, requested_count: int + ) -> tuple[str, int, int]: + """Get children of a container.""" + limit = requested_count if requested_count > 0 else 500 + offset = starting_index + + if parent_id == ROOT_ID: + # Root level: return Artists, Albums, and Tracks containers + containers = [ + self._create_artists_root_container(), + self._create_albums_root_container(), + self._create_tracks_root_container(), + ] + didl_xml = self._wrap_didl_items(containers) + return didl_xml, len(containers), len(containers) + + if parent_id == ARTISTS_CONTAINER_ID: + # Return all artists + artists = await self.mass.music.artists.library_items( + limit=limit, offset=offset, order_by="sort_name" + ) + artist_items = [self._create_artist_container(artist) for artist in artists] + total = await self.mass.music.artists.library_count() + didl_xml = self._wrap_didl_items(artist_items) + return didl_xml, len(artist_items), total + + if parent_id == ALBUMS_CONTAINER_ID: + # Return all albums + albums = await self.mass.music.albums.library_items( + limit=limit, offset=offset, order_by="sort_name" + ) + album_items = [ + self._create_album_container(album, ALBUMS_CONTAINER_ID) + for album in albums # type: ignore + ] + total = await self.mass.music.albums.library_count() + didl_xml = self._wrap_didl_items(album_items) + return didl_xml, len(album_items), total + + if parent_id == TRACKS_CONTAINER_ID: + # Return all tracks + tracks = await self.mass.music.tracks.library_items( + limit=limit, offset=offset, order_by="sort_name" + ) + track_items = [] + for track in tracks: + item_xml = await self._create_track_item(track) + track_items.append(item_xml) + total = await self.mass.music.tracks.library_count() + didl_xml = self._wrap_didl_items(track_items) + return didl_xml, len(track_items), total + + if parent_id.startswith("artist_"): + # Return albums for this artist + artist_id = parent_id[7:] + albums = await self.mass.music.artists.albums( + artist_id, "library", in_library_only=True + ) + # Apply pagination manually since albums() doesn't support it + paginated_albums = ( + list(albums)[offset : offset + limit] if limit > 0 else list(albums)[offset:] + ) + album_items = [self._create_album_container(album) for album in paginated_albums] + didl_xml = self._wrap_didl_items(album_items) + return didl_xml, len(album_items), len(albums) + + if parent_id.startswith("album_"): + # Return tracks for this album + album_id = parent_id[6:] + tracks = await self.mass.music.albums.tracks(album_id, "library", in_library_only=True) + # Apply pagination manually + paginated_tracks = ( + list(tracks)[offset : offset + limit] if limit > 0 else list(tracks)[offset:] + ) + track_items = [] + for track in paginated_tracks: + item_xml = await self._create_track_item(track) + track_items.append(item_xml) + didl_xml = self._wrap_didl_items(track_items) + return didl_xml, len(track_items), len(tracks) + + return self._create_empty_didl(), 0, 0 + + def _create_root_container(self) -> str: + """Create DIDL-Lite XML for root container.""" + return """ + Music Assistant + object.container +""" + + def _create_artists_root_container(self) -> str: + """Create DIDL-Lite XML for Artists root container.""" + return f""" + Artists + object.container +""" + + def _create_albums_root_container(self) -> str: + """Create DIDL-Lite XML for Albums root container.""" + return f""" + Albums + object.container + """ + + def _create_tracks_root_container(self) -> str: + """Create DIDL-Lite XML for Tracks root container.""" + return f""" + Tracks + object.container + """ + + def _create_artist_container(self, artist: Artist) -> str: + """Create DIDL-Lite XML for an artist container.""" + artist_id = f"artist_{artist.item_id}" + title = self._escape_xml(artist.name) + + # Add album art if available + album_art_xml = "" + if artist.image and artist.image.path: + image_url = self._get_image_url(artist.image) + album_art_xml = f"{self._escape_xml(image_url)}" + + return f""" + {title} + object.container.person.musicArtist + {album_art_xml} +""" + + def _create_album_container(self, album: Album, parent_id: str | None = None) -> str: + """Create DIDL-Lite XML for an album container.""" + album_id = f"album_{album.item_id}" + + # Use provided parent_id, or default to artist parent + if parent_id is None: + parent_id = ( + f"artist_{album.artists[0].item_id}" if album.artists else ARTISTS_CONTAINER_ID + ) + title = self._escape_xml(album.name) + + # Add album art if available + album_art_xml = "" + if album.image and album.image.path: + image_url = self._get_image_url(album.image) + album_art_xml = f"{self._escape_xml(image_url)}" + + # Add artist + artist_xml = "" + if album.artists: + artist_name = self._escape_xml(album.artists[0].name) + artist_xml = f"{artist_name}" + + return f""" + {title} + object.container.album.musicAlbum + {artist_xml} + {album_art_xml} +""" + + async def _create_track_item(self, track: Track) -> str: # noqa: PLR0915 + """Create DIDL-Lite XML for a track item.""" + track_id = f"track_{track.item_id}" + parent_id = f"album_{track.album.item_id}" if track.album else ROOT_ID + title = self._escape_xml(track.name) + + # Get provider details for file extension and metadata + provider_instance, prov_item_id = await self.mass.music.tracks.get_provider_mapping(track) + prov = self.mass.get_provider(provider_instance) + + # Default values + file_ext = "mp3" + mime_type = "audio/mpeg" + file_size = 0 + + if prov and isinstance(prov, MusicProvider): + try: + streamdetails = await prov.get_stream_details(prov_item_id, MediaType.TRACK) + if hasattr(streamdetails.data, "filename"): + # Extract extension from filename + filename = streamdetails.data.filename + file_ext = filename.rsplit(".", 1)[-1].lower() + # Map extension to mime type + mime_type = { + "mp3": "audio/mpeg", + "m4a": "audio/mp4", + "flac": "audio/flac", + "wav": "audio/wav", + "ogg": "audio/ogg", + }.get(file_ext, "audio/mpeg") + + # Get file size + if hasattr(streamdetails.data, "file_size"): + file_size = streamdetails.data.file_size + + except Exception as err: + self.logger.debug("Could not determine file type, using defaults: %s", err) + + # Build stream URL with correct extension + stream_url = f"{self.mass.streams.base_url}/dlna/track/library/{track.item_id}.{file_ext}" + + # Build protocol info + if file_ext == "m4a": + protocol_info = ( + "http-get:*:audio/mp4:" + "DLNA.ORG_PN=AAC_ISO_320;DLNA.ORG_OP=01;" + "DLNA.ORG_FLAGS=01700000000000000000000000000000" + ) + else: + protocol_info = f"http-get:*:{mime_type}:*" + + # Get metadata for res attributes + bitrate = 0 + sample_rate = 44100 + channels = 2 + + if track.metadata: + bitrate = getattr(track.metadata, "bitrate", 0) or 0 + sample_rate = getattr(track.metadata, "sample_rate", 44100) or 44100 + channels = getattr(track.metadata, "channels", 2) or 2 + + # Add album art if available + album_art_xml = "" + if track.image and track.image.path: + self.logger.debug( + "Track %s image path: %s (remotely_accessible: %s)", + track.name, + track.image.path, + track.image.remotely_accessible, + ) + image_url = self._get_image_url(track.image) + if image_url: + album_art_xml = ( + f"{self._escape_xml(image_url)}" + ) + + # Add artist + artist_xml = "" + creator_xml = "" + if track.artists: + artist_name = self._escape_xml(track.artists[0].name) + artist_xml = f"{artist_name}" + creator_xml = f"{artist_name}" + + # Add album + album_xml = "" + if track.album: + album_name = self._escape_xml(track.album.name) + album_xml = f"{album_name}" + + # Add track number + track_number_xml = "" + if track.position: + track_number_xml = ( + f"{track.position}" + ) + else: + self.logger.debug("Track %s has no position: %s", track.name, track.position) + + # Add release year (optional) + date_xml = "" + if track.album: + album_year = getattr(track.album, "year", None) + if album_year: + date_xml = f"{album_year}-01-01" + + # Duration in H:MM:SS format + duration_str = self._format_duration(track.duration) + + # Build res element with all attributes + file_size_attr = f'size="{file_size}" ' if file_size else "" + + res_element = ( + f'' + f"{self._escape_xml(stream_url)}" + ) + + return f""" + {title} + object.item.audioItem.musicTrack + {date_xml} + {artist_xml} + {creator_xml} + {album_xml} + {track_number_xml} + {album_art_xml} + {res_element} + """ + + def _wrap_didl_items(self, items: list[str]) -> str: + """Wrap DIDL items in DIDL-Lite container.""" + items_xml = "\n".join(items) + return f""" +{items_xml} +""" + + def _create_empty_didl(self) -> str: + """Create empty DIDL-Lite XML.""" + return """ +""" + + def _get_image_url(self, image) -> str: + """Get the URL for an image.""" + if image.remotely_accessible: + return image.path + + # For local images with relative paths, construct absolute path + image_path = image.path + + # If it's not already absolute, prepend the filesystem base path + if not image_path.startswith("/"): + # Find the filesystem provider to get the base path + for provider in self.mass.music.providers: + # Check if this provider has a base_path attribute (filesystem providers do) + if hasattr(provider, "base_path"): + base_path = provider.base_path # type: ignore[attr-defined] + image_path = f"{base_path}/{image_path}" + break + + encoded_path = urllib.parse.quote(image_path) + return f"{self.mass.webserver.base_url}/imageproxy?path={encoded_path}" + + def _format_duration(self, duration_seconds: int) -> str: + """Format duration in seconds to H:MM:SS format.""" + if not duration_seconds: + return "0:00:00" + hours = duration_seconds // 3600 + minutes = (duration_seconds % 3600) // 60 + seconds = duration_seconds % 60 + return f"{hours}:{minutes:02d}:{seconds:02d}" + + def _escape_xml(self, text: str) -> str: + """Escape XML special characters.""" + if not text: + return "" + return ( + text.replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) + + def _get_soap_param(self, action_elem: Element, param_name: str) -> str | None: + """Extract a parameter from SOAP action element.""" + for elem in action_elem: + if elem.tag.endswith(param_name): + return elem.text + return None + + async def _soap_error(self, error_code: int, error_description: str) -> web.Response: + """Create a SOAP error response.""" + error_xml = f""" + + + + s:Client + UPnPError + + + {error_code} + {error_description} + + + + +""" + + return web.Response( + text=error_xml, + content_type="text/xml", + charset="utf-8", + status=500, + ) diff --git a/music_assistant/providers/dlna-server/manifest.json b/music_assistant/providers/dlna-server/manifest.json new file mode 100644 index 0000000000..d3a479d16d --- /dev/null +++ b/music_assistant/providers/dlna-server/manifest.json @@ -0,0 +1,12 @@ +{ + "type": "plugin", + "domain": "dlna-server", + "stage": "beta", + "name": "DLNA Server", + "description": "Expose your Music Assistant library to other DLNA-compatible players and apps on your network.", + "codeowners": ["@ozgav"], + "documentation": "https://music-assistant.io/plugins/dlna-server/", + "multi_instance": false, + "builtin": false, + "requirements": [] +} diff --git a/music_assistant/providers/dlna-server/ssdp.py b/music_assistant/providers/dlna-server/ssdp.py new file mode 100644 index 0000000000..5c54c3643b --- /dev/null +++ b/music_assistant/providers/dlna-server/ssdp.py @@ -0,0 +1,339 @@ +"""SSDP Server for DLNA device advertisement. + +This module handles SSDP (Simple Service Discovery Protocol) multicast announcements +to make the DLNA server discoverable on the local network. +""" + +from __future__ import annotations + +import asyncio +import contextlib +import logging +import socket + +# SSDP constants +SSDP_MULTICAST_ADDR = "239.255.255.250" +SSDP_PORT = 1900 +SSDP_MX = 3 # Maximum wait time in seconds +SSDP_ALIVE_INTERVAL = 1800 # Send alive messages every 30 minutes + +SUPPORTED_SEARCH_TARGETS = { + "ssdp:all", + "upnp:rootdevice", + "urn:schemas-upnp-org:device:MediaServer:1", + "urn:schemas-upnp-org:service:ContentDirectory:1", + "urn:schemas-upnp-org:service:ConnectionManager:1", +} + + +class SSDPServer: + """SSDP Server for device discovery via multicast.""" + + def __init__( + self, + location: str, + server_uuid: str, + friendly_name: str, + logger: logging.Logger, + ) -> None: + """Initialize SSDP server. + + Args: + location: URL to the device description XML + server_uuid: Unique device identifier (UUID) + friendly_name: Human-readable device name + logger: Logger instance + """ + self.location = location + self.server_uuid = server_uuid + self.friendly_name = friendly_name + self.logger = logger + + self._transport: asyncio.DatagramTransport | None = None + self._protocol: SSDPProtocol | None = None + self._alive_task: asyncio.Task | None = None + self._running = False + + async def start(self) -> None: + """Start the SSDP server.""" + if self._running: + return + + self.logger.debug("Starting SSDP server") + + # Create UDP socket for multicast + loop = asyncio.get_event_loop() + + # Create the protocol instance + protocol = SSDPProtocol( + location=self.location, + server_uuid=self.server_uuid, + friendly_name=self.friendly_name, + logger=self.logger, + ) + + # Create datagram endpoint + try: + self._transport, _ = await loop.create_datagram_endpoint( + lambda: protocol, # Use local variable instead of self._protocol + local_addr=("0.0.0.0", SSDP_PORT), + reuse_port=True, + ) + + # Store it after successful creation + self._protocol = protocol + + # Join multicast group + sock = self._transport.get_extra_info("socket") + group = socket.inet_aton(SSDP_MULTICAST_ADDR) + mreq = group + socket.inet_aton("0.0.0.0") + sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + self._running = True + + # Send initial ALIVE messages + await self._send_alive_messages() + + # Start periodic ALIVE task + self._alive_task = asyncio.create_task(self._periodic_alive()) + + self.logger.info("SSDP server started - advertising device at %s", self.location) + + except OSError as err: + self.logger.error("Failed to start SSDP server: %s", err) + raise + + async def stop(self) -> None: + """Stop the SSDP server.""" + if not self._running: + return + + self.logger.debug("Stopping SSDP server") + + # Cancel periodic alive task + if self._alive_task and not self._alive_task.done(): + self._alive_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._alive_task + + # Send byebye messages + await self._send_byebye_messages() + + # Close transport + if self._transport: + self._transport.close() + + self._running = False + self.logger.info("SSDP server stopped") + + async def _send_alive_messages(self) -> None: + """Send SSDP ALIVE messages for all service types.""" + if not self._protocol: + return + + # Send alive for root device + await self._protocol.send_alive( + f"uuid:{self.server_uuid}", + "upnp:rootdevice", + ) + + # Send alive for device UUID + await self._protocol.send_alive( + f"uuid:{self.server_uuid}", + f"uuid:{self.server_uuid}", + ) + + # Send alive for MediaServer device + await self._protocol.send_alive( + f"uuid:{self.server_uuid}", + "urn:schemas-upnp-org:device:MediaServer:1", + ) + + # Send alive for ContentDirectory service + await self._protocol.send_alive( + f"uuid:{self.server_uuid}", + "urn:schemas-upnp-org:service:ContentDirectory:1", + ) + + # Send alive for ConnectionManager service + await self._protocol.send_alive( + f"uuid:{self.server_uuid}", + "urn:schemas-upnp-org:service:ConnectionManager:1", + ) + + async def _send_byebye_messages(self) -> None: + """Send SSDP BYEBYE messages for all service types.""" + if not self._protocol: + return + + # Send byebye for root device + await self._protocol.send_byebye( + f"uuid:{self.server_uuid}", + "upnp:rootdevice", + ) + + # Send byebye for device UUID + await self._protocol.send_byebye( + f"uuid:{self.server_uuid}", + f"uuid:{self.server_uuid}", + ) + + # Send byebye for MediaServer device + await self._protocol.send_byebye( + f"uuid:{self.server_uuid}", + "urn:schemas-upnp-org:device:MediaServer:1", + ) + + # Send byebye for ContentDirectory service + await self._protocol.send_byebye( + f"uuid:{self.server_uuid}", + "urn:schemas-upnp-org:service:ContentDirectory:1", + ) + + # Send byebye for ConnectionManager service + await self._protocol.send_byebye( + f"uuid:{self.server_uuid}", + "urn:schemas-upnp-org:service:ConnectionManager:1", + ) + + async def _periodic_alive(self) -> None: + """Periodically send ALIVE messages.""" + while self._running: + try: + await asyncio.sleep(SSDP_ALIVE_INTERVAL) + await self._send_alive_messages() + except asyncio.CancelledError: + break + except Exception as err: + self.logger.exception("Error in periodic ALIVE task: %s", err) + + +class SSDPProtocol(asyncio.DatagramProtocol): + """SSDP protocol handler.""" + + def __init__( + self, + location: str, + server_uuid: str, + friendly_name: str, + logger: logging.Logger, + ) -> None: + """Initialize protocol. + + Args: + location: URL to the device description XML + server_uuid: Unique device identifier (UUID) + friendly_name: Human-readable device name + logger: Logger instance + """ + super().__init__() + self.location = location + self.server_uuid = server_uuid + self.friendly_name = friendly_name + self.logger = logger + self.transport: asyncio.DatagramTransport | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Call when connection is made.""" + self.transport = transport # type: ignore[assignment] + + def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: + """Handle incoming SSDP datagram.""" + try: + message = data.decode("utf-8") + + # Parse the message + lines = message.split("\r\n") + if not lines: + return + + # Check if this is an M-SEARCH request + if lines[0].startswith("M-SEARCH"): + asyncio.create_task(self._handle_search(message, addr)) + + except Exception as err: + self.logger.debug("Error processing SSDP datagram: %s", err) + + async def _handle_search(self, message: str, addr: tuple[str, int]) -> None: + """Handle M-SEARCH discovery request.""" + # Parse search target + st = None + for line in message.split("\r\n"): + if line.lower().startswith("st:"): + st = line.split(":", 1)[1].strip() + break + + if not st: + return + + # Respond if the search target matches our services + should_respond = st in SUPPORTED_SEARCH_TARGETS or st == f"uuid:{self.server_uuid}" + + if should_respond: + # Small delay to avoid response flooding + await asyncio.sleep(0.1) + await self.send_search_response(st, addr) + + async def send_search_response(self, st: str, addr: tuple[str, int]) -> None: + """Send response to M-SEARCH request.""" + if not self.transport: + return + + usn = f"uuid:{self.server_uuid}" + if st not in {f"uuid:{self.server_uuid}", "upnp:rootdevice"}: + usn = f"{usn}::{st}" + + response = ( + "HTTP/1.1 200 OK\r\n" + f"CACHE-CONTROL: max-age={SSDP_ALIVE_INTERVAL}\r\n" + f"EXT:\r\n" + f"LOCATION: {self.location}\r\n" + f"SERVER: Music Assistant UPnP/1.0\r\n" + f"ST: {st}\r\n" + f"USN: {usn}\r\n" + "\r\n" + ) + + self.transport.sendto(response.encode("utf-8"), addr) + + async def send_alive(self, usn: str, nt: str) -> None: + """Send NOTIFY ALIVE message.""" + if not self.transport: + return + + message = ( + "NOTIFY * HTTP/1.1\r\n" + f"HOST: {SSDP_MULTICAST_ADDR}:{SSDP_PORT}\r\n" + f"CACHE-CONTROL: max-age={SSDP_ALIVE_INTERVAL}\r\n" + f"LOCATION: {self.location}\r\n" + f"NT: {nt}\r\n" + f"NTS: ssdp:alive\r\n" + f"SERVER: Music Assistant UPnP/1.0\r\n" + f"USN: {usn}::{nt}\r\n" + "\r\n" + ) + + self.transport.sendto( + message.encode("utf-8"), + (SSDP_MULTICAST_ADDR, SSDP_PORT), + ) + + async def send_byebye(self, usn: str, nt: str) -> None: + """Send NOTIFY BYEBYE message.""" + if not self.transport: + return + + message = ( + "NOTIFY * HTTP/1.1\r\n" + f"HOST: {SSDP_MULTICAST_ADDR}:{SSDP_PORT}\r\n" + f"NT: {nt}\r\n" + f"NTS: ssdp:byebye\r\n" + f"USN: {usn}::{nt}\r\n" + "\r\n" + ) + + self.transport.sendto( + message.encode("utf-8"), + (SSDP_MULTICAST_ADDR, SSDP_PORT), + ) diff --git a/music_assistant/providers/webdav/provider.py b/music_assistant/providers/webdav/provider.py new file mode 100644 index 0000000000..31817bb166 --- /dev/null +++ b/music_assistant/providers/webdav/provider.py @@ -0,0 +1,348 @@ +"""WebDAV File System Provider for Music Assistant.""" + +from __future__ import annotations + +import asyncio +from pathlib import PurePosixPath +from typing import TYPE_CHECKING, cast +from urllib.parse import quote, unquote, urlparse, urlunparse + +import aiohttp +from music_assistant_models.errors import ( + LoginFailed, + MediaNotFoundError, + ProviderUnavailableError, + SetupFailedError, +) + +from music_assistant.constants import ( + CONF_PASSWORD, + CONF_USERNAME, + DB_TABLE_PROVIDER_MAPPINGS, + VERBOSE_LOG_LEVEL, +) +from music_assistant.providers.filesystem_local import LocalFileSystemProvider +from music_assistant.providers.filesystem_local.helpers import FileSystemItem + +from .constants import CONF_CONTENT_TYPE, CONF_URL, CONF_VERIFY_SSL +from .helpers import build_webdav_url, webdav_propfind, webdav_test_connection + +if TYPE_CHECKING: + from music_assistant_models.config_entries import ProviderConfig + from music_assistant_models.enums import MediaType + from music_assistant_models.provider import ProviderManifest + + from music_assistant.mass import MusicAssistant + + +class WebDAVFileSystemProvider(LocalFileSystemProvider): + """WebDAV File System Provider for Music Assistant.""" + + def __init__( + self, + mass: MusicAssistant, + manifest: ProviderManifest, + config: ProviderConfig, + ) -> None: + """Initialize WebDAV FileSystem Provider.""" + base_url = cast("str", config.get_value(CONF_URL)).rstrip("/") + super().__init__(mass, manifest, config, base_url) + self.base_url = base_url + self.username = cast("str | None", config.get_value(CONF_USERNAME)) + self.password = cast("str | None", config.get_value(CONF_PASSWORD)) + self.verify_ssl = cast("bool", config.get_value(CONF_VERIFY_SSL)) + self.media_content_type = cast("str", config.get_value(CONF_CONTENT_TYPE)) + + @property + def instance_name_postfix(self) -> str | None: + """Return a (default) instance name postfix for this provider instance.""" + try: + parsed = urlparse(self.base_url) + if parsed.path and parsed.path != "/": + return PurePosixPath(parsed.path).name + return parsed.netloc + except (ValueError, TypeError): + return "Invalid URL" + + @property + def _auth(self) -> aiohttp.BasicAuth | None: + """Get BasicAuth for WebDAV requests.""" + if self.username: + return aiohttp.BasicAuth(self.username, self.password or "") + return None + + async def handle_async_init(self) -> None: + """Handle async initialization of the provider.""" + try: + await webdav_test_connection( + self.base_url, + self.username, + self.password, + self.verify_ssl, + timeout=10, + ) + except (LoginFailed, SetupFailedError): + raise + except Exception as err: + raise SetupFailedError(f"WebDAV connection failed: {err}") from err + + self.write_access = False + + def _build_authenticated_url(self, file_path: str) -> str: + """Build authenticated WebDAV URL with properly encoded credentials.""" + webdav_url = build_webdav_url(self.base_url, file_path) + parsed = urlparse(webdav_url) + + if self.username and self.password: + encoded_username = quote(self.username, safe="") + encoded_password = quote(self.password, safe="") + netloc = f"{encoded_username}:{encoded_password}@{parsed.netloc}" + return urlunparse( + (parsed.scheme, netloc, parsed.path, parsed.params, parsed.query, parsed.fragment) + ) + + return webdav_url + + async def _exists_impl(self, file_path: str) -> bool: + """Check if WebDAV resource exists.""" + if not file_path: + return False + # Handle case where absolute URL is passed + if file_path.startswith("http"): + parsed = urlparse(file_path) + base_parsed = urlparse(self.base_url) + file_path = parsed.path[len(base_parsed.path) :].strip("/") + try: + webdav_url = build_webdav_url(self.base_url, file_path) + session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl + items = await webdav_propfind(session, webdav_url, depth=0, auth=self._auth) + return len(items) > 0 or webdav_url.rstrip("/") == self.base_url.rstrip("/") + except (LoginFailed, SetupFailedError): + raise + except aiohttp.ClientError as err: + self.logger.debug(f"WebDAV client error during exists check for {file_path}: {err}") + return False + except Exception as err: + self.logger.debug(f"WebDAV exists check failed for {file_path}: {err}") + return False + + async def _resolve_impl(self, file_path: str) -> FileSystemItem: + """Resolve WebDAV path to FileSystemItem.""" + webdav_url = build_webdav_url(self.base_url, file_path) + session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl + + try: + items = await webdav_propfind(session, webdav_url, depth=0, auth=self._auth) + if not items: + if webdav_url.rstrip("/") == self.base_url.rstrip("/"): + return FileSystemItem( + filename="", + relative_path="", + absolute_path=self._build_authenticated_url(file_path), + is_dir=True, + ) + raise MediaNotFoundError(f"WebDAV resource not found: {file_path}") + + webdav_item = items[0] + + return FileSystemItem( + filename=PurePosixPath(file_path).name or webdav_item.name, + relative_path=file_path, + absolute_path=self._build_authenticated_url(file_path), + is_dir=webdav_item.is_dir, + checksum=webdav_item.last_modified or "unknown", + file_size=webdav_item.size, + ) + + except MediaNotFoundError: + raise + except (LoginFailed, SetupFailedError): + raise + except aiohttp.ClientError as err: + raise MediaNotFoundError(f"Failed to resolve WebDAV path {file_path}: {err}") from err + + async def _scandir_impl(self, path: str) -> list[FileSystemItem]: + """List WebDAV directory contents.""" + # Handle case where absolute URL is passed (from parent's code) + if path.startswith("http"): + parsed = urlparse(path) + base_parsed = urlparse(self.base_url) + path = parsed.path[len(base_parsed.path) :].strip("/") + self.logger.debug(f"Converted absolute URL to relative path: {path}") + + self.logger.debug(f"Scanning WebDAV path: {path}") + webdav_url = build_webdav_url(self.base_url, path) + session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl + + try: + webdav_items = await webdav_propfind(session, webdav_url, depth=1, auth=self._auth) + self.logger.debug(f"WebDAV returned {len(webdav_items)} items for {path}") # ADD THIS + + filesystem_items: list[FileSystemItem] = [] + + # Parse base path component for comparison + base_parsed = urlparse(self.base_url) + base_path = base_parsed.path.rstrip("/") + + for webdav_item in webdav_items: + self.logger.debug( + f"Processing item: name={webdav_item.name}, href={webdav_item.href[:100]}, " + f"is_dir={webdav_item.is_dir}" + ) + + if "#recycle" in webdav_item.name.lower(): + continue + decoded_name = unquote(webdav_item.name) + decoded_href = unquote(webdav_item.href) + + # If href is a full URL, extract just the path component + if decoded_href.startswith("http"): + href_parsed = urlparse(decoded_href) + href_path = href_parsed.path + else: + href_path = decoded_href + + # Skip the directory itself + current_path = urlparse(webdav_url).path.rstrip("/") + if href_path.rstrip("/") == current_path: + self.logger.debug(f"Skipping directory itself: {href_path}") + + continue + self.logger.debug(f"After skip check, processing: {webdav_item.name}") + + # Calculate relative path by stripping base path + if href_path.startswith(base_path + "/") or href_path.startswith(base_path): + relative_path = href_path[len(base_path) :].strip("/") + else: + # Fallback: construct from current path + name + relative_path = ( + str(PurePosixPath(path) / decoded_name) if path else decoded_name + ) + self.logger.debug( + f"Item: {decoded_name}, href: {decoded_href[:80]}, relative_path: {relative_path}" + ) + self.logger.debug( + f"Calculated relative_path: '{relative_path}' for {webdav_item.name}" + ) + + decoded_name = unquote(webdav_item.name) + + filesystem_items.append( + FileSystemItem( + filename=decoded_name, + relative_path=relative_path, + absolute_path=self._build_authenticated_url(relative_path), + is_dir=webdav_item.is_dir, + checksum=webdav_item.last_modified or "unknown", + file_size=webdav_item.size, + ) + ) + self.logger.debug(f"Added to filesystem_items: {decoded_name}") + self.logger.debug( + f"Parsed {len(filesystem_items)} filesystem items for {path}" + ) # ADD THIS + + return filesystem_items + + except (LoginFailed, SetupFailedError, ProviderUnavailableError): + raise + except aiohttp.ClientError as err: + self.logger.log( + VERBOSE_LOG_LEVEL, + f"WebDAV client error listing directory {path}: {err}", + ) + raise ProviderUnavailableError(f"WebDAV server connection failed: {err}") from err + except Exception as err: + self.logger.error(f"Failed to list WebDAV directory {path}: {err}", exc_info=True) + + return [] + + async def resolve_image(self, path: str) -> str | bytes: + """Resolve image path to actual image data or URL.""" + webdav_url = build_webdav_url(self.base_url, path) + session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl + + async with session.get(webdav_url, auth=self._auth) as resp: + if resp.status != 200: + raise MediaNotFoundError(f"Image not found: {path}") + return await resp.read() + + async def sync_library(self, media_type: MediaType, import_as_favorite: bool = False) -> None: + """Run library sync for WebDAV provider.""" + assert self.mass.music.database + + if self.sync_running: + self.logger.warning(f"Library sync already running for {self.name}") + return + + self.logger.info(f"Started library sync for WebDAV provider {self.name}") + self.sync_running = True + + try: + file_checksums: dict[str, str] = {} + query = ( + f"SELECT provider_item_id, details FROM {DB_TABLE_PROVIDER_MAPPINGS} " + f"WHERE provider_instance = '{self.instance_id}' " + "AND media_type in ('track', 'playlist', 'audiobook', 'podcast_episode')" + ) + for db_row in await self.mass.music.database.get_rows_from_query(query, limit=0): + file_checksums[db_row["provider_item_id"]] = str(db_row["details"]) + + cur_filenames: set[str] = set() + prev_filenames: set[str] = set(file_checksums.keys()) + + await self._scan_recursive("", cur_filenames, file_checksums, import_as_favorite) + + deleted_files = prev_filenames - cur_filenames + await self._process_deletions(deleted_files) + await self._process_orphaned_albums_and_artists() + + except (LoginFailed, SetupFailedError, ProviderUnavailableError) as err: + self.logger.error(f"WebDAV library sync failed due to provider error: {err}") + raise + except aiohttp.ClientError as err: + self.logger.error(f"WebDAV library sync failed due to connection error: {err}") + raise ProviderUnavailableError(f"WebDAV server connection failed: {err}") from err + except Exception as err: + self.logger.error(f"WebDAV library sync failed with unexpected error: {err}") + raise SetupFailedError(f"WebDAV library sync failed: {err}") from err + finally: + self.sync_running = False + self.logger.info(f"Completed library sync for WebDAV provider {self.name}") + + async def _scan_recursive( + self, + path: str, + cur_filenames: set[str], + file_checksums: dict[str, str], + import_as_favorite: bool, + ) -> None: + """Recursively scan WebDAV directory.""" + try: + items = await self._scandir_impl(path) + + # Separate directories and files + dirs = [item for item in items if item.is_dir] + files = [item for item in items if not item.is_dir] + + # Process files in executor (blocking operation) + for item in files: + prev_checksum = file_checksums.get(item.relative_path) + # Wrap _process_item in executor since it's blocking + if await asyncio.to_thread( + self._process_item, item, prev_checksum, import_as_favorite + ): + cur_filenames.add(item.relative_path) + + # Recurse into directories + for dir_item in dirs: + await self._scan_recursive( + dir_item.relative_path, cur_filenames, file_checksums, import_as_favorite + ) + + except (LoginFailed, SetupFailedError, ProviderUnavailableError): + raise + except aiohttp.ClientError as err: + self.logger.warning(f"WebDAV client error scanning path {path}: {err}") + except Exception as err: + self.logger.warning(f"Failed to scan WebDAV path {path}: {err}") From 7b2852c207176f684c302b3dbe3f0983d20a540f Mon Sep 17 00:00:00 2001 From: Gav Date: Sat, 11 Oct 2025 01:12:41 +1000 Subject: [PATCH 02/11] Ignore for now --- music_assistant/providers/dlna-server/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 972ebaf542..2a0f5ef4b1 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -682,8 +682,8 @@ async def _get_children( limit=limit, offset=offset, order_by="sort_name" ) album_items = [ - self._create_album_container(album, ALBUMS_CONTAINER_ID) - for album in albums # type: ignore + self._create_album_container(album, ALBUMS_CONTAINER_ID) # type: ignore + for album in albums ] total = await self.mass.music.albums.library_count() didl_xml = self._wrap_didl_items(album_items) From 8c9d8aa3fc03690fe1bf1b47dd7269a419a95b51 Mon Sep 17 00:00:00 2001 From: Gav Date: Sat, 11 Oct 2025 13:49:52 +1000 Subject: [PATCH 03/11] Final drafting --- .../providers/dlna-server/__init__.py | 58 +++++++++++-------- music_assistant/providers/dlna-server/ssdp.py | 29 ++++++++-- 2 files changed, 60 insertions(+), 27 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 2a0f5ef4b1..37bb5cf9d4 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -8,12 +8,17 @@ import urllib.parse import uuid -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, Any, cast from aiohttp import web from defusedxml import ElementTree as DefusedET -from music_assistant_models.enums import MediaType -from music_assistant_models.errors import MediaNotFoundError, SetupFailedError +from music_assistant_models.enums import MediaType, ProviderFeature +from music_assistant_models.errors import ( + MediaNotFoundError, + ProviderUnavailableError, + SetupFailedError, + UnsupportedFeaturedException, +) from music_assistant.models.music_provider import MusicProvider from music_assistant.models.plugin import PluginProvider @@ -23,14 +28,16 @@ if TYPE_CHECKING: from xml.etree.ElementTree import Element - from music_assistant_models.config_entries import ProviderConfig - from music_assistant_models.media_items import Album, Artist, Track + from music_assistant_models.config_entries import ConfigEntry, ProviderConfig + from music_assistant_models.media_items import Album, Artist, MediaItemImage, Track from music_assistant_models.provider import ProviderManifest from music_assistant.mass import MusicAssistant from music_assistant.models import ProviderInstanceType -SUPPORTED_FEATURES = set() # No special features needed for this plugin +SUPPORTED_FEATURES: set[ProviderFeature] = ( + set() +) # we don't have any special supported features (yet) # DLNA/UPnP constants DEVICE_TYPE = "urn:schemas-upnp-org:device:MediaServer:1" @@ -63,26 +70,23 @@ async def get_config_entries( instance_id: str | None = None, # noqa: ARG001 action: str | None = None, # noqa: ARG001 values: dict[str, str] | None = None, # noqa: ARG001 -) -> tuple: +) -> tuple[ConfigEntry, ...]: """Return Config entries for this provider.""" - # No configuration needed - use standard everything + # No configuration needed for DLNA server return () class DLNAServerProvider(PluginProvider): """DLNA Server Plugin Provider for Music Assistant.""" - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize the DLNA server provider.""" super().__init__(*args, **kwargs) - self._ssdp_server = None + self._ssdp_server: SSDPServer | None = None self._server_uuid: str = str(uuid.uuid4()) self._friendly_name = "Music Assistant" self._routes_registered = False - - async def handle_async_init(self) -> None: - """Handle async initialization of the provider.""" - # This is called right after setup, before loaded_in_mass + self.is_streaming_provider = False async def loaded_in_mass(self) -> None: """Call after the provider has been loaded into Music Assistant.""" @@ -97,7 +101,10 @@ async def loaded_in_mass(self) -> None: "DLNA Server started successfully - " "Music Assistant is now discoverable as a DLNA Media Server" ) - except Exception as err: + except OSError as err: # Socket/network errors + self.logger.exception("Failed to start DLNA server due to network error") + raise SetupFailedError(f"Failed to start DLNA server: {err}") from err + except Exception as err: # Unexpected errors self.logger.exception("Failed to start DLNA server") raise SetupFailedError("Failed to start DLNA server") from err @@ -446,6 +453,9 @@ async def _handle_content_directory_control(self, request: web.Request) -> web.R return await self._soap_error(401, "Invalid Action") + except DefusedET.ParseError as err: + self.logger.warning("Invalid XML in SOAP request: %s", err) + return await self._soap_error(400, "Invalid XML") except Exception as err: self.logger.exception("Error handling ContentDirectory control request") return await self._soap_error(500, str(err)) @@ -586,7 +596,7 @@ async def _handle_track_stream(self, request: web.Request) -> web.Response: # Get the provider prov = self.mass.get_provider(provider_instance) if not prov or not isinstance(prov, MusicProvider): - return web.Response(status=404, text="Provider not found") + raise ProviderUnavailableError(f"Provider {provider_instance} not available") # Get stream details streamdetails = await prov.get_stream_details(prov_item_id, MediaType.TRACK) @@ -596,7 +606,9 @@ async def _handle_track_stream(self, request: web.Request) -> web.Response: file_path = streamdetails.data.absolute_path else: # Fallback for non-filesystem providers - return web.Response(status=400, text="Only local files supported") + raise UnsupportedFeaturedException( + "Only local files are supported for DLNA streaming" + ) self.logger.debug("Serving file: %s", file_path) @@ -682,7 +694,7 @@ async def _get_children( limit=limit, offset=offset, order_by="sort_name" ) album_items = [ - self._create_album_container(album, ALBUMS_CONTAINER_ID) # type: ignore + self._create_album_container(album, ALBUMS_CONTAINER_ID) # type: ignore[arg-type] for album in albums ] total = await self.mass.music.albums.library_count() @@ -707,12 +719,12 @@ async def _get_children( artist_id = parent_id[7:] albums = await self.mass.music.artists.albums( artist_id, "library", in_library_only=True - ) + ) # type: ignore[assignment] # Apply pagination manually since albums() doesn't support it paginated_albums = ( list(albums)[offset : offset + limit] if limit > 0 else list(albums)[offset:] ) - album_items = [self._create_album_container(album) for album in paginated_albums] + album_items = [self._create_album_container(album) for album in paginated_albums] # type: ignore[arg-type] didl_xml = self._wrap_didl_items(album_items) return didl_xml, len(album_items), len(albums) @@ -900,7 +912,7 @@ async def _create_track_item(self, track: Track) -> str: # noqa: PLR0915 # Add track number track_number_xml = "" - if track.position: + if track.track_number: track_number_xml = ( f"{track.position}" ) @@ -958,7 +970,7 @@ def _create_empty_didl(self) -> str: xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/"> """ - def _get_image_url(self, image) -> str: + def _get_image_url(self, image: MediaItemImage) -> str: """Get the URL for an image.""" if image.remotely_accessible: return image.path @@ -972,7 +984,7 @@ def _get_image_url(self, image) -> str: for provider in self.mass.music.providers: # Check if this provider has a base_path attribute (filesystem providers do) if hasattr(provider, "base_path"): - base_path = provider.base_path # type: ignore[attr-defined] + base_path = provider.base_path image_path = f"{base_path}/{image_path}" break diff --git a/music_assistant/providers/dlna-server/ssdp.py b/music_assistant/providers/dlna-server/ssdp.py index 5c54c3643b..d92928ff85 100644 --- a/music_assistant/providers/dlna-server/ssdp.py +++ b/music_assistant/providers/dlna-server/ssdp.py @@ -51,7 +51,7 @@ def __init__( self._transport: asyncio.DatagramTransport | None = None self._protocol: SSDPProtocol | None = None - self._alive_task: asyncio.Task | None = None + self._alive_task: asyncio.Task[None] | None = None self._running = False async def start(self) -> None: @@ -59,6 +59,12 @@ async def start(self) -> None: if self._running: return + # Cancel any existing task from a previous incomplete stop + if self._alive_task and not self._alive_task.done(): + self._alive_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._alive_task + self.logger.debug("Starting SSDP server") # Create UDP socket for multicast @@ -87,8 +93,18 @@ async def start(self) -> None: sock = self._transport.get_extra_info("socket") group = socket.inet_aton(SSDP_MULTICAST_ADDR) mreq = group + socket.inet_aton("0.0.0.0") - sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + try: + sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) + except OSError as err: + self.logger.warning( + "Failed to join multicast group: %s. Discovery may not work.", err + ) + + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + except OSError as err: + self.logger.warning("Failed to set SO_REUSEADDR: %s", err) self._running = True @@ -124,6 +140,10 @@ async def stop(self) -> None: if self._transport: self._transport.close() + # Clear references + self._protocol = None + self._transport = None + self._running = False self.logger.info("SSDP server stopped") @@ -259,7 +279,8 @@ async def _handle_search(self, message: str, addr: tuple[str, int]) -> None: """Handle M-SEARCH discovery request.""" # Parse search target st = None - for line in message.split("\r\n"): + for raw_line in message.split("\r\n"): + line = raw_line.strip() if line.lower().startswith("st:"): st = line.split(":", 1)[1].strip() break From 1e60cbb10c0fad464d95d31da1697292caf1ee58 Mon Sep 17 00:00:00 2001 From: Gav Date: Sat, 11 Oct 2025 14:08:06 +1000 Subject: [PATCH 04/11] Fix security concerns --- music_assistant/providers/dlna-server/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 37bb5cf9d4..dc36739d27 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -456,9 +456,9 @@ async def _handle_content_directory_control(self, request: web.Request) -> web.R except DefusedET.ParseError as err: self.logger.warning("Invalid XML in SOAP request: %s", err) return await self._soap_error(400, "Invalid XML") - except Exception as err: + except Exception: self.logger.exception("Error handling ContentDirectory control request") - return await self._soap_error(500, str(err)) + return await self._soap_error(500, "Internal server error") async def _handle_connection_manager_control(self, request: web.Request) -> web.Response: """Handle ConnectionManager SOAP control requests.""" @@ -475,9 +475,9 @@ async def _handle_connection_manager_control(self, request: web.Request) -> web. return await self._soap_error(401, "Invalid Action") - except Exception as err: + except Exception: self.logger.exception("Error handling ConnectionManager control request") - return await self._soap_error(500, str(err)) + return await self._soap_error(500, "Internal server error") async def _handle_browse_action(self, action_elem: Element) -> web.Response: """Handle Browse SOAP action.""" @@ -619,9 +619,9 @@ async def _handle_track_stream(self, request: web.Request) -> web.Response: except MediaNotFoundError: return web.Response(status=404, text="Track not found") - except Exception as err: + except Exception: self.logger.exception("Error streaming track") - return web.Response(status=500, text=str(err)) + return web.Response(status=500, text="Internal server error") # ==================== DIDL/XML Helpers ==================== From e984f77463fc369312235114c8333f14f36b45f2 Mon Sep 17 00:00:00 2001 From: Gav Date: Sat, 11 Oct 2025 14:12:51 +1000 Subject: [PATCH 05/11] Remove rogue file from branch --- music_assistant/providers/webdav/provider.py | 348 ------------------- 1 file changed, 348 deletions(-) delete mode 100644 music_assistant/providers/webdav/provider.py diff --git a/music_assistant/providers/webdav/provider.py b/music_assistant/providers/webdav/provider.py deleted file mode 100644 index 31817bb166..0000000000 --- a/music_assistant/providers/webdav/provider.py +++ /dev/null @@ -1,348 +0,0 @@ -"""WebDAV File System Provider for Music Assistant.""" - -from __future__ import annotations - -import asyncio -from pathlib import PurePosixPath -from typing import TYPE_CHECKING, cast -from urllib.parse import quote, unquote, urlparse, urlunparse - -import aiohttp -from music_assistant_models.errors import ( - LoginFailed, - MediaNotFoundError, - ProviderUnavailableError, - SetupFailedError, -) - -from music_assistant.constants import ( - CONF_PASSWORD, - CONF_USERNAME, - DB_TABLE_PROVIDER_MAPPINGS, - VERBOSE_LOG_LEVEL, -) -from music_assistant.providers.filesystem_local import LocalFileSystemProvider -from music_assistant.providers.filesystem_local.helpers import FileSystemItem - -from .constants import CONF_CONTENT_TYPE, CONF_URL, CONF_VERIFY_SSL -from .helpers import build_webdav_url, webdav_propfind, webdav_test_connection - -if TYPE_CHECKING: - from music_assistant_models.config_entries import ProviderConfig - from music_assistant_models.enums import MediaType - from music_assistant_models.provider import ProviderManifest - - from music_assistant.mass import MusicAssistant - - -class WebDAVFileSystemProvider(LocalFileSystemProvider): - """WebDAV File System Provider for Music Assistant.""" - - def __init__( - self, - mass: MusicAssistant, - manifest: ProviderManifest, - config: ProviderConfig, - ) -> None: - """Initialize WebDAV FileSystem Provider.""" - base_url = cast("str", config.get_value(CONF_URL)).rstrip("/") - super().__init__(mass, manifest, config, base_url) - self.base_url = base_url - self.username = cast("str | None", config.get_value(CONF_USERNAME)) - self.password = cast("str | None", config.get_value(CONF_PASSWORD)) - self.verify_ssl = cast("bool", config.get_value(CONF_VERIFY_SSL)) - self.media_content_type = cast("str", config.get_value(CONF_CONTENT_TYPE)) - - @property - def instance_name_postfix(self) -> str | None: - """Return a (default) instance name postfix for this provider instance.""" - try: - parsed = urlparse(self.base_url) - if parsed.path and parsed.path != "/": - return PurePosixPath(parsed.path).name - return parsed.netloc - except (ValueError, TypeError): - return "Invalid URL" - - @property - def _auth(self) -> aiohttp.BasicAuth | None: - """Get BasicAuth for WebDAV requests.""" - if self.username: - return aiohttp.BasicAuth(self.username, self.password or "") - return None - - async def handle_async_init(self) -> None: - """Handle async initialization of the provider.""" - try: - await webdav_test_connection( - self.base_url, - self.username, - self.password, - self.verify_ssl, - timeout=10, - ) - except (LoginFailed, SetupFailedError): - raise - except Exception as err: - raise SetupFailedError(f"WebDAV connection failed: {err}") from err - - self.write_access = False - - def _build_authenticated_url(self, file_path: str) -> str: - """Build authenticated WebDAV URL with properly encoded credentials.""" - webdav_url = build_webdav_url(self.base_url, file_path) - parsed = urlparse(webdav_url) - - if self.username and self.password: - encoded_username = quote(self.username, safe="") - encoded_password = quote(self.password, safe="") - netloc = f"{encoded_username}:{encoded_password}@{parsed.netloc}" - return urlunparse( - (parsed.scheme, netloc, parsed.path, parsed.params, parsed.query, parsed.fragment) - ) - - return webdav_url - - async def _exists_impl(self, file_path: str) -> bool: - """Check if WebDAV resource exists.""" - if not file_path: - return False - # Handle case where absolute URL is passed - if file_path.startswith("http"): - parsed = urlparse(file_path) - base_parsed = urlparse(self.base_url) - file_path = parsed.path[len(base_parsed.path) :].strip("/") - try: - webdav_url = build_webdav_url(self.base_url, file_path) - session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl - items = await webdav_propfind(session, webdav_url, depth=0, auth=self._auth) - return len(items) > 0 or webdav_url.rstrip("/") == self.base_url.rstrip("/") - except (LoginFailed, SetupFailedError): - raise - except aiohttp.ClientError as err: - self.logger.debug(f"WebDAV client error during exists check for {file_path}: {err}") - return False - except Exception as err: - self.logger.debug(f"WebDAV exists check failed for {file_path}: {err}") - return False - - async def _resolve_impl(self, file_path: str) -> FileSystemItem: - """Resolve WebDAV path to FileSystemItem.""" - webdav_url = build_webdav_url(self.base_url, file_path) - session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl - - try: - items = await webdav_propfind(session, webdav_url, depth=0, auth=self._auth) - if not items: - if webdav_url.rstrip("/") == self.base_url.rstrip("/"): - return FileSystemItem( - filename="", - relative_path="", - absolute_path=self._build_authenticated_url(file_path), - is_dir=True, - ) - raise MediaNotFoundError(f"WebDAV resource not found: {file_path}") - - webdav_item = items[0] - - return FileSystemItem( - filename=PurePosixPath(file_path).name or webdav_item.name, - relative_path=file_path, - absolute_path=self._build_authenticated_url(file_path), - is_dir=webdav_item.is_dir, - checksum=webdav_item.last_modified or "unknown", - file_size=webdav_item.size, - ) - - except MediaNotFoundError: - raise - except (LoginFailed, SetupFailedError): - raise - except aiohttp.ClientError as err: - raise MediaNotFoundError(f"Failed to resolve WebDAV path {file_path}: {err}") from err - - async def _scandir_impl(self, path: str) -> list[FileSystemItem]: - """List WebDAV directory contents.""" - # Handle case where absolute URL is passed (from parent's code) - if path.startswith("http"): - parsed = urlparse(path) - base_parsed = urlparse(self.base_url) - path = parsed.path[len(base_parsed.path) :].strip("/") - self.logger.debug(f"Converted absolute URL to relative path: {path}") - - self.logger.debug(f"Scanning WebDAV path: {path}") - webdav_url = build_webdav_url(self.base_url, path) - session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl - - try: - webdav_items = await webdav_propfind(session, webdav_url, depth=1, auth=self._auth) - self.logger.debug(f"WebDAV returned {len(webdav_items)} items for {path}") # ADD THIS - - filesystem_items: list[FileSystemItem] = [] - - # Parse base path component for comparison - base_parsed = urlparse(self.base_url) - base_path = base_parsed.path.rstrip("/") - - for webdav_item in webdav_items: - self.logger.debug( - f"Processing item: name={webdav_item.name}, href={webdav_item.href[:100]}, " - f"is_dir={webdav_item.is_dir}" - ) - - if "#recycle" in webdav_item.name.lower(): - continue - decoded_name = unquote(webdav_item.name) - decoded_href = unquote(webdav_item.href) - - # If href is a full URL, extract just the path component - if decoded_href.startswith("http"): - href_parsed = urlparse(decoded_href) - href_path = href_parsed.path - else: - href_path = decoded_href - - # Skip the directory itself - current_path = urlparse(webdav_url).path.rstrip("/") - if href_path.rstrip("/") == current_path: - self.logger.debug(f"Skipping directory itself: {href_path}") - - continue - self.logger.debug(f"After skip check, processing: {webdav_item.name}") - - # Calculate relative path by stripping base path - if href_path.startswith(base_path + "/") or href_path.startswith(base_path): - relative_path = href_path[len(base_path) :].strip("/") - else: - # Fallback: construct from current path + name - relative_path = ( - str(PurePosixPath(path) / decoded_name) if path else decoded_name - ) - self.logger.debug( - f"Item: {decoded_name}, href: {decoded_href[:80]}, relative_path: {relative_path}" - ) - self.logger.debug( - f"Calculated relative_path: '{relative_path}' for {webdav_item.name}" - ) - - decoded_name = unquote(webdav_item.name) - - filesystem_items.append( - FileSystemItem( - filename=decoded_name, - relative_path=relative_path, - absolute_path=self._build_authenticated_url(relative_path), - is_dir=webdav_item.is_dir, - checksum=webdav_item.last_modified or "unknown", - file_size=webdav_item.size, - ) - ) - self.logger.debug(f"Added to filesystem_items: {decoded_name}") - self.logger.debug( - f"Parsed {len(filesystem_items)} filesystem items for {path}" - ) # ADD THIS - - return filesystem_items - - except (LoginFailed, SetupFailedError, ProviderUnavailableError): - raise - except aiohttp.ClientError as err: - self.logger.log( - VERBOSE_LOG_LEVEL, - f"WebDAV client error listing directory {path}: {err}", - ) - raise ProviderUnavailableError(f"WebDAV server connection failed: {err}") from err - except Exception as err: - self.logger.error(f"Failed to list WebDAV directory {path}: {err}", exc_info=True) - - return [] - - async def resolve_image(self, path: str) -> str | bytes: - """Resolve image path to actual image data or URL.""" - webdav_url = build_webdav_url(self.base_url, path) - session = self.mass.http_session if self.verify_ssl else self.mass.http_session_no_ssl - - async with session.get(webdav_url, auth=self._auth) as resp: - if resp.status != 200: - raise MediaNotFoundError(f"Image not found: {path}") - return await resp.read() - - async def sync_library(self, media_type: MediaType, import_as_favorite: bool = False) -> None: - """Run library sync for WebDAV provider.""" - assert self.mass.music.database - - if self.sync_running: - self.logger.warning(f"Library sync already running for {self.name}") - return - - self.logger.info(f"Started library sync for WebDAV provider {self.name}") - self.sync_running = True - - try: - file_checksums: dict[str, str] = {} - query = ( - f"SELECT provider_item_id, details FROM {DB_TABLE_PROVIDER_MAPPINGS} " - f"WHERE provider_instance = '{self.instance_id}' " - "AND media_type in ('track', 'playlist', 'audiobook', 'podcast_episode')" - ) - for db_row in await self.mass.music.database.get_rows_from_query(query, limit=0): - file_checksums[db_row["provider_item_id"]] = str(db_row["details"]) - - cur_filenames: set[str] = set() - prev_filenames: set[str] = set(file_checksums.keys()) - - await self._scan_recursive("", cur_filenames, file_checksums, import_as_favorite) - - deleted_files = prev_filenames - cur_filenames - await self._process_deletions(deleted_files) - await self._process_orphaned_albums_and_artists() - - except (LoginFailed, SetupFailedError, ProviderUnavailableError) as err: - self.logger.error(f"WebDAV library sync failed due to provider error: {err}") - raise - except aiohttp.ClientError as err: - self.logger.error(f"WebDAV library sync failed due to connection error: {err}") - raise ProviderUnavailableError(f"WebDAV server connection failed: {err}") from err - except Exception as err: - self.logger.error(f"WebDAV library sync failed with unexpected error: {err}") - raise SetupFailedError(f"WebDAV library sync failed: {err}") from err - finally: - self.sync_running = False - self.logger.info(f"Completed library sync for WebDAV provider {self.name}") - - async def _scan_recursive( - self, - path: str, - cur_filenames: set[str], - file_checksums: dict[str, str], - import_as_favorite: bool, - ) -> None: - """Recursively scan WebDAV directory.""" - try: - items = await self._scandir_impl(path) - - # Separate directories and files - dirs = [item for item in items if item.is_dir] - files = [item for item in items if not item.is_dir] - - # Process files in executor (blocking operation) - for item in files: - prev_checksum = file_checksums.get(item.relative_path) - # Wrap _process_item in executor since it's blocking - if await asyncio.to_thread( - self._process_item, item, prev_checksum, import_as_favorite - ): - cur_filenames.add(item.relative_path) - - # Recurse into directories - for dir_item in dirs: - await self._scan_recursive( - dir_item.relative_path, cur_filenames, file_checksums, import_as_favorite - ) - - except (LoginFailed, SetupFailedError, ProviderUnavailableError): - raise - except aiohttp.ClientError as err: - self.logger.warning(f"WebDAV client error scanning path {path}: {err}") - except Exception as err: - self.logger.warning(f"Failed to scan WebDAV path {path}: {err}") From 67afdfc246bbdb458eca251411a0fbd9e28ba70e Mon Sep 17 00:00:00 2001 From: OzGav Date: Sat, 11 Oct 2025 14:14:29 +1000 Subject: [PATCH 06/11] Add files via upload --- .../providers/dlna-server/icon.svg | 3 ++ .../providers/dlna-server/icon_monochrome.svg | 40 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 music_assistant/providers/dlna-server/icon.svg create mode 100644 music_assistant/providers/dlna-server/icon_monochrome.svg diff --git a/music_assistant/providers/dlna-server/icon.svg b/music_assistant/providers/dlna-server/icon.svg new file mode 100644 index 0000000000..10e19efa16 --- /dev/null +++ b/music_assistant/providers/dlna-server/icon.svg @@ -0,0 +1,3 @@ + + + diff --git a/music_assistant/providers/dlna-server/icon_monochrome.svg b/music_assistant/providers/dlna-server/icon_monochrome.svg new file mode 100644 index 0000000000..2ab8797591 --- /dev/null +++ b/music_assistant/providers/dlna-server/icon_monochrome.svg @@ -0,0 +1,40 @@ + + + + + + From a3693227ab27e94741ab780fab4841f25bef461e Mon Sep 17 00:00:00 2001 From: Gav Date: Sun, 12 Oct 2025 23:25:52 +1000 Subject: [PATCH 07/11] Remove ignores --- music_assistant/providers/dlna-server/__init__.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index dc36739d27..7ccc195622 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -694,8 +694,7 @@ async def _get_children( limit=limit, offset=offset, order_by="sort_name" ) album_items = [ - self._create_album_container(album, ALBUMS_CONTAINER_ID) # type: ignore[arg-type] - for album in albums + self._create_album_container(album, ALBUMS_CONTAINER_ID) for album in albums ] total = await self.mass.music.albums.library_count() didl_xml = self._wrap_didl_items(album_items) @@ -719,12 +718,12 @@ async def _get_children( artist_id = parent_id[7:] albums = await self.mass.music.artists.albums( artist_id, "library", in_library_only=True - ) # type: ignore[assignment] + ) # Apply pagination manually since albums() doesn't support it paginated_albums = ( list(albums)[offset : offset + limit] if limit > 0 else list(albums)[offset:] ) - album_items = [self._create_album_container(album) for album in paginated_albums] # type: ignore[arg-type] + album_items = [self._create_album_container(album) for album in paginated_albums] didl_xml = self._wrap_didl_items(album_items) return didl_xml, len(album_items), len(albums) From ae46bb7fbe2e27e272c5ef8b8b805c9597c08396 Mon Sep 17 00:00:00 2001 From: OzGav Date: Tue, 14 Oct 2025 21:06:41 +1100 Subject: [PATCH 08/11] Update music_assistant/providers/dlna-server/__init__.py Co-authored-by: Marcel van der Veldt --- music_assistant/providers/dlna-server/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 7ccc195622..5f8c3318d6 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -104,9 +104,6 @@ async def loaded_in_mass(self) -> None: except OSError as err: # Socket/network errors self.logger.exception("Failed to start DLNA server due to network error") raise SetupFailedError(f"Failed to start DLNA server: {err}") from err - except Exception as err: # Unexpected errors - self.logger.exception("Failed to start DLNA server") - raise SetupFailedError("Failed to start DLNA server") from err async def unload(self, is_removed: bool = False) -> None: """Handle unload/close of the provider.""" From d8075a0017056feb7370538116dce412e429e38f Mon Sep 17 00:00:00 2001 From: Gav Date: Tue, 14 Oct 2025 20:10:06 +1000 Subject: [PATCH 09/11] PR Review --- .../providers/dlna-server/__init__.py | 29 +++++++------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 7ccc195622..3a0e1d8044 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -110,25 +110,18 @@ async def loaded_in_mass(self) -> None: async def unload(self, is_removed: bool = False) -> None: """Handle unload/close of the provider.""" - # Stop SSDP server - if self._ssdp_server: - try: - await self._ssdp_server.stop() - except Exception as err: - self.logger.warning("Error stopping SSDP server: %s", err) - - # Unregister HTTP routes + # Unregister HTTP routes first if self._routes_registered: - try: - self.mass.streams.unregister_dynamic_route("/dlna/description.xml") - self.mass.streams.unregister_dynamic_route("/dlna/ContentDirectory.xml") - self.mass.streams.unregister_dynamic_route("/dlna/ConnectionManager.xml") - self.mass.streams.unregister_dynamic_route("/dlna/control/ContentDirectory") - self.mass.streams.unregister_dynamic_route("/dlna/control/ConnectionManager") - self.mass.streams.unregister_dynamic_route("/dlna/track/*") - # self.mass.streams.unregister_dynamic_route("/dlna/event/ContentDirectory") - except Exception as err: - self.logger.warning("Error unregistering routes: %s", err) + self.mass.streams.unregister_dynamic_route("/dlna/description.xml") + self.mass.streams.unregister_dynamic_route("/dlna/ContentDirectory.xml") + self.mass.streams.unregister_dynamic_route("/dlna/ConnectionManager.xml") + self.mass.streams.unregister_dynamic_route("/dlna/control/ContentDirectory") + self.mass.streams.unregister_dynamic_route("/dlna/control/ConnectionManager") + self.mass.streams.unregister_dynamic_route("/dlna/track/*") + + # Then stop SSDP server + if self._ssdp_server: + await self._ssdp_server.stop() self.logger.info("DLNA Server stopped") From 1d8a475536edd14737c92d3395dff5c592d61771 Mon Sep 17 00:00:00 2001 From: Gav Date: Tue, 14 Oct 2025 20:27:26 +1000 Subject: [PATCH 10/11] More PR Review --- music_assistant/providers/dlna-server/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 4fe0ed3e18..3a0e1d8044 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -104,6 +104,9 @@ async def loaded_in_mass(self) -> None: except OSError as err: # Socket/network errors self.logger.exception("Failed to start DLNA server due to network error") raise SetupFailedError(f"Failed to start DLNA server: {err}") from err + except Exception as err: # Unexpected errors + self.logger.exception("Failed to start DLNA server") + raise SetupFailedError("Failed to start DLNA server") from err async def unload(self, is_removed: bool = False) -> None: """Handle unload/close of the provider.""" From 2ba62ff15e614338a37b0142a06dfbfc80c729da Mon Sep 17 00:00:00 2001 From: Gav Date: Tue, 14 Oct 2025 21:41:03 +1000 Subject: [PATCH 11/11] Filter non-local tracks --- .../providers/dlna-server/__init__.py | 123 +++++++++++++----- 1 file changed, 90 insertions(+), 33 deletions(-) diff --git a/music_assistant/providers/dlna-server/__init__.py b/music_assistant/providers/dlna-server/__init__.py index 3a0e1d8044..6af0f25850 100644 --- a/music_assistant/providers/dlna-server/__init__.py +++ b/music_assistant/providers/dlna-server/__init__.py @@ -29,7 +29,7 @@ from xml.etree.ElementTree import Element from music_assistant_models.config_entries import ConfigEntry, ProviderConfig - from music_assistant_models.media_items import Album, Artist, MediaItemImage, Track + from music_assistant_models.media_items import Album, Artist, MediaItem, MediaItemImage, Track from music_assistant_models.provider import ProviderManifest from music_assistant.mass import MusicAssistant @@ -39,6 +39,8 @@ set() ) # we don't have any special supported features (yet) +SUPPORTED_PROVIDER_DOMAINS = {"filesystem_local", "filesystem_smb"} + # DLNA/UPnP constants DEVICE_TYPE = "urn:schemas-upnp-org:device:MediaServer:1" SERVICE_CONTENT_DIRECTORY = "urn:schemas-upnp-org:service:ContentDirectory:1" @@ -662,7 +664,6 @@ async def _get_children( offset = starting_index if parent_id == ROOT_ID: - # Root level: return Artists, Albums, and Tracks containers containers = [ self._create_artists_root_container(), self._create_albums_root_container(), @@ -672,47 +673,28 @@ async def _get_children( return didl_xml, len(containers), len(containers) if parent_id == ARTISTS_CONTAINER_ID: - # Return all artists - artists = await self.mass.music.artists.library_items( - limit=limit, offset=offset, order_by="sort_name" - ) + artists, total = await self._get_filesystem_artists(limit, offset) artist_items = [self._create_artist_container(artist) for artist in artists] - total = await self.mass.music.artists.library_count() didl_xml = self._wrap_didl_items(artist_items) return didl_xml, len(artist_items), total if parent_id == ALBUMS_CONTAINER_ID: - # Return all albums - albums = await self.mass.music.albums.library_items( - limit=limit, offset=offset, order_by="sort_name" - ) + albums, total = await self._get_filesystem_albums(limit, offset) album_items = [ self._create_album_container(album, ALBUMS_CONTAINER_ID) for album in albums ] - total = await self.mass.music.albums.library_count() didl_xml = self._wrap_didl_items(album_items) return didl_xml, len(album_items), total if parent_id == TRACKS_CONTAINER_ID: - # Return all tracks - tracks = await self.mass.music.tracks.library_items( - limit=limit, offset=offset, order_by="sort_name" - ) - track_items = [] - for track in tracks: - item_xml = await self._create_track_item(track) - track_items.append(item_xml) - total = await self.mass.music.tracks.library_count() + tracks, total = await self._get_filesystem_tracks(limit, offset) + track_items = [await self._create_track_item(track) for track in tracks] didl_xml = self._wrap_didl_items(track_items) return didl_xml, len(track_items), total if parent_id.startswith("artist_"): - # Return albums for this artist artist_id = parent_id[7:] - albums = await self.mass.music.artists.albums( - artist_id, "library", in_library_only=True - ) - # Apply pagination manually since albums() doesn't support it + albums = await self._get_filesystem_albums_for_artist(artist_id) paginated_albums = ( list(albums)[offset : offset + limit] if limit > 0 else list(albums)[offset:] ) @@ -721,22 +703,97 @@ async def _get_children( return didl_xml, len(album_items), len(albums) if parent_id.startswith("album_"): - # Return tracks for this album album_id = parent_id[6:] - tracks = await self.mass.music.albums.tracks(album_id, "library", in_library_only=True) - # Apply pagination manually + tracks = await self._get_filesystem_tracks_for_album(album_id) paginated_tracks = ( list(tracks)[offset : offset + limit] if limit > 0 else list(tracks)[offset:] ) - track_items = [] - for track in paginated_tracks: - item_xml = await self._create_track_item(track) - track_items.append(item_xml) + track_items = [await self._create_track_item(track) for track in paginated_tracks] didl_xml = self._wrap_didl_items(track_items) return didl_xml, len(track_items), len(tracks) return self._create_empty_didl(), 0, 0 + # Add this helper method to the class + def _is_supported_item(self, item: MediaItem) -> bool: + """Check if a media item is from a supported filesystem provider.""" + # Check provider mappings that are already on the item + for provider_mapping in item.provider_mappings: + if provider_mapping.provider_domain in SUPPORTED_PROVIDER_DOMAINS: + return True + return False + + async def _get_filesystem_artists(self, limit: int, offset: int) -> tuple[list[Artist], int]: + """Get artists that have filesystem tracks.""" + artists = await self.mass.music.artists.library_items( + limit=limit, offset=offset, order_by="sort_name" + ) + + filtered_artists = [] + for artist in artists: + albums = await self.mass.music.artists.albums( + artist.item_id, "library", in_library_only=True + ) + # Check if any album has filesystem tracks + for album in albums: + tracks = await self.mass.music.albums.tracks( + album.item_id, "library", in_library_only=True + ) + if any(self._is_supported_item(track) for track in tracks): + filtered_artists.append(artist) + break + + total = await self.mass.music.artists.library_count() + return filtered_artists, total + + async def _get_filesystem_albums( + self, limit: int, offset: int, parent_container: str | None = None + ) -> tuple[list[Album], int]: + """Get albums that have filesystem tracks.""" + albums_list = await self.mass.music.albums.library_items( + limit=limit, offset=offset, order_by="sort_name" + ) + + filtered_albums = [] + for album in albums_list: + tracks = await self.mass.music.albums.tracks( + album.item_id, "library", in_library_only=True + ) + if any(self._is_supported_item(track) for track in tracks): + filtered_albums.append(album) + + total = await self.mass.music.albums.library_count() + return filtered_albums, total + + async def _get_filesystem_albums_for_artist(self, artist_id: str) -> list[Album]: + """Get albums for an artist that have filesystem tracks.""" + albums = await self.mass.music.artists.albums(artist_id, "library", in_library_only=True) + + filtered_albums = [] + for album in albums: + tracks = await self.mass.music.albums.tracks( + album.item_id, "library", in_library_only=True + ) + if any(self._is_supported_item(track) for track in tracks): + filtered_albums.append(album) + + return filtered_albums + + async def _get_filesystem_tracks(self, limit: int, offset: int) -> tuple[list[Track], int]: + """Get filesystem tracks.""" + tracks = await self.mass.music.tracks.library_items( + limit=limit, offset=offset, order_by="sort_name" + ) + + filtered_tracks = [track for track in tracks if self._is_supported_item(track)] + total = await self.mass.music.tracks.library_count() + return filtered_tracks, total + + async def _get_filesystem_tracks_for_album(self, album_id: str) -> list[Track]: + """Get filesystem tracks for an album.""" + tracks = await self.mass.music.albums.tracks(album_id, "library", in_library_only=True) + return [track for track in tracks if self._is_supported_item(track)] + def _create_root_container(self) -> str: """Create DIDL-Lite XML for root container.""" return """