Skip to content
This repository was archived by the owner on Jun 6, 2024. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 14 additions & 3 deletions dashlivesim/dashlib/configprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __init__(self, vod_cfg_dir, base_url=None):
self.media_presentation_duration = None
self.timeshift_buffer_depth_in_s = None
self.minimum_update_period_in_s = None
self.modulo_period = None
self.modulo_period = False # True if modulo behavior active
self.last_segment_numbers = [] # The last segment number in every period.
self.init_seg_avail_offset = 0 # The number of secs before AST that one can fetch the init segments
self.tfdt32_flag = False # Restart every 3 hours make tfdt fit into 32 bits.
Expand Down Expand Up @@ -107,6 +107,8 @@ def __init__(self, vod_cfg_dir, base_url=None):
self.insert_sidx = False
self.segtimelineloss= False #This flag is true only when there is /segtimelineloss_1/
self.emsg_last_seg=False
self.patching = False # This flag is only true when there is /patching_1/ in the URL
self.patch_base = -1

def __str__(self):
lines = ["%s=%s" % (k, v) for (k, v) in self.__dict__.items() if not k.startswith("_")]
Expand Down Expand Up @@ -341,7 +343,7 @@ class ConfigProcessor(object):
"insertad", "mpdcallback", "continuous", "segtimeline",
"segtimelinenr", "baseurl", "peroff", "scte35", "utc",
"snr", "ato", "spd", "sidx", "segtimelineloss",
"sts", "sid")
"sts", "sid", "patching", "patch")

def __init__(self, vod_cfg_dir, base_url):
self.vod_cfg_dir = vod_cfg_dir
Expand Down Expand Up @@ -374,7 +376,9 @@ def get_mpd_data(self):
'periodOffset' : self.cfg.period_offset,
'publishTime' : self.cfg.publish_time,
'mediaData' : self.cfg.media_data,
'segtimelineloss' : self.cfg.segtimelineloss}
'segtimelineloss' : self.cfg.segtimelineloss,
'patching': self.cfg.patching,
'originalPublishTime': self.cfg.patch_base}
if self.cfg.availability_end_time:
mpd['availabilityEndTime'] = self.cfg.availability_end_time
return mpd
Expand Down Expand Up @@ -426,6 +430,7 @@ def process_url(self, url_parts, now_int=0):
cfg.minimum_update_period_in_s = int(value)
elif key == "modulo": # Make a number of time-limited sessions every hour
modulo_period = ModuloPeriod(int(value), now_int)
cfg.modulo_period = True
elif key == "tfdt": # Use 32-bit tfdt (which means that AST must be more recent as well)
cfg.tfdt32_flag = True
elif key == "cont": # Continuous update of MPD AST and seg_nr.
Expand Down Expand Up @@ -478,6 +483,12 @@ def process_url(self, url_parts, now_int=0):
elif key == "segtimelineloss": # If segment timeline loss case signalled.
if int(value) == 1:
cfg.segtimelineloss = True
elif key == "patching":
if int(value) == 1: # Only valid when it's set to 1
cfg.patching = True
elif key == "patch":
cfg.patch_base = int(value)
cfg.patching = True # patch base will imply patching
else:
raise ConfigProcessorError("Cannot interpret option %s properly" % key)
url_pos += 1
Expand Down
7 changes: 6 additions & 1 deletion dashlivesim/dashlib/dash_namespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,14 @@

RE_NAMESPACE_TAG = re.compile(r"({.*})?(.*)")
DASH_NAMESPACE = "{urn:mpeg:dash:schema:mpd:2011}"

DASH_PATCH_NAMESPACE = "{urn:mpeg:dash:schema:mpd-patch:2020}"

def add_ns(element):
"Add DASH namespace to element or to path."
parts = element.split('/')
return "/".join([DASH_NAMESPACE + e for e in parts])

def add_patch_ns(element):
"""Add Patch namespace to element or to path."""
parts = element.split('/')
return "/".join([DASH_PATCH_NAMESPACE + e for e in parts])
35 changes: 33 additions & 2 deletions dashlivesim/dashlib/dash_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,10 @@

SECS_IN_DAY = 24 * 3600
DEFAULT_MINIMUM_UPDATE_PERIOD = "P100Y"
DEFAULT_MINIMUM_UPDATE_PERIOD_IN_S = SECS_IN_DAY * 365 * 100
DEFAULT_PUBLISH_ADVANCE_IN_S = 7200
EXTRA_TIME_AFTER_END_IN_S = 60
PATCHING_MAXIMUM_UPDATE_LATENCY = 10

UTC_HEAD_PATH = "dash/time.txt"

Expand Down Expand Up @@ -334,6 +336,20 @@ def parse_url(self):
if mpd_input_data['insertAd'] > 0 and nr_xlink_periods_per_hour < 0:
raise Exception("Insert ad option can only be used in conjuction with the xlink option. To use the "
"insert ad option, also set use xlink_m in your url.")

# based on simulator implementation there are a few option combinations with patching that have not
# been directly implemented, implementation can be requested with a github issue but we would prefer
# the scenario fail than improperly work
if mpd_input_data['patching']:
base_text = "The patching option cannot be used in conjunction with the %s option currently, please file a github issue to request implementation."
if nr_xlink_periods_per_hour != -1:
raise Exception(base_text % "xlink")
if cfg.segtimelineloss:
raise Exception(base_text % "segtimelineloss")
print cfg.modulo_period
if cfg.modulo_period:
raise Exception(base_text % "modulo")

response = self.generate_dynamic_mpd(cfg, mpd_filename, mpd_input_data, self.now)
#The following 'if' is for IOP 4.11.4.3 , deployment scenario when segments not found.
if len(cfg.multi_url) > 0 and cfg.segtimelineloss == True: # There is one specific baseURL with losses specified
Expand All @@ -356,10 +372,18 @@ def parse_url(self):
cfg.emsg_last_seg=True
response = self.generate_dynamic_mpd(cfg, mpd_filename, mpd_input_data, self.now)
cfg.emsg_last_seg=False

if nr_xlink_periods_per_hour > 0:
response = generate_response_with_xlink(response, cfg.ext, cfg.filename, nr_periods_per_hour,
nr_xlink_periods_per_hour, mpd_input_data['insertAd'])

# Manifest patch update, separate out here as the xlink logic is unsafe in assuming all periods will
# be present in response to perform the replacement
elif cfg.ext == ".patch":
mpd_filename = "%s/%s/%s.mpd" % (self.content_dir, cfg.content_name, cfg.filename.split('.')[0])
mpd_input_data = cfg_processor.get_mpd_data()
response = self.generate_dynamic_mpd(cfg, mpd_filename, mpd_input_data, self.now)

elif cfg.ext == ".mp4": # Init segment
if self.now < cfg.availability_start_time_in_s - cfg.init_seg_avail_offset:
diff = (cfg.availability_start_time_in_s - cfg.init_seg_avail_offset) - self.now_float
Expand Down Expand Up @@ -416,10 +440,14 @@ def generate_dynamic_mpd(self, cfg, mpd_filename, in_data, now):
mpd_data = in_data.copy()
if cfg.minimum_update_period_in_s is not None:
mpd_data['minimumUpdatePeriod'] = seconds_to_iso_duration(cfg.minimum_update_period_in_s)
minimum_update_period_in_s = cfg.minimum_update_period_in_s
else:
mpd_data['minimumUpdatePeriod'] = DEFAULT_MINIMUM_UPDATE_PERIOD
minimum_update_period_in_s = DEFAULT_MINIMUM_UPDATE_PERIOD_IN_S

if cfg.media_presentation_duration is not None:
mpd_data['mediaPresentationDuration'] = seconds_to_iso_duration(cfg.media_presentation_duration)
mpd_data['id'] = cfg.content_name # default in case content has none, required for patching
mpd_data['timeShiftBufferDepth'] = seconds_to_iso_duration(cfg.timeshift_buffer_depth_in_s)
mpd_data['timeShiftBufferDepthInS'] = cfg.timeshift_buffer_depth_in_s
mpd_data['startNumber'] = cfg.adjusted_start_number
Expand All @@ -443,9 +471,12 @@ def generate_dynamic_mpd(self, cfg, mpd_filename, in_data, now):
'continuous': in_data['continuous'],
'segtimeline': in_data['segtimeline'],
'segtimeline_nr': in_data['segtimeline_nr'],
'patching': in_data['patching'],
'utc_timing_methods': cfg.utc_timing_methods,
'utc_head_url': self.utc_head_url,
'now': now}
'now': now,
'patch_base': cfg.patch_base,
'patch_ttl': minimum_update_period_in_s * PATCHING_MAXIMUM_UPDATE_LATENCY}
full_url = self.base_url + '/'.join(self.url_parts)
mpmod = mpdprocessor.MpdProcessor(mpd_filename, mpd_proc_cfg, cfg,
full_url)
Expand Down
Loading