| 
									
										
										
										
											2015-02-17 21:37:48 +01:00
										 |  |  | from __future__ import division, unicode_literals | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | import base64 | 
					
						
							|  |  |  | import io | 
					
						
							|  |  |  | import itertools | 
					
						
							|  |  |  | import os | 
					
						
							|  |  |  | import time | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  | from .fragment import FragmentFD | 
					
						
							| 
									
										
										
										
											2014-12-13 12:24:42 +01:00
										 |  |  | from ..compat import ( | 
					
						
							| 
									
										
										
										
											2015-10-25 20:04:55 +01:00
										 |  |  |     compat_etree_fromstring, | 
					
						
							| 
									
										
										
										
											2014-12-13 12:24:42 +01:00
										 |  |  |     compat_urlparse, | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |     compat_urllib_error, | 
					
						
							| 
									
										
										
										
											2015-09-10 20:49:43 +01:00
										 |  |  |     compat_urllib_parse_urlparse, | 
					
						
							| 
									
										
										
										
											2014-12-13 12:24:42 +01:00
										 |  |  | ) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | from ..utils import ( | 
					
						
							| 
									
										
										
										
											2015-08-31 02:20:29 +06:00
										 |  |  |     encodeFilename, | 
					
						
							| 
									
										
										
										
											2015-12-10 22:59:50 +06:00
										 |  |  |     fix_xml_ampersands, | 
					
						
							| 
									
										
										
										
											2015-08-31 02:20:29 +06:00
										 |  |  |     sanitize_open, | 
					
						
							| 
									
										
										
										
											2014-02-15 16:24:43 +01:00
										 |  |  |     struct_pack, | 
					
						
							|  |  |  |     struct_unpack, | 
					
						
							| 
									
										
										
										
											2014-09-21 15:43:09 +02:00
										 |  |  |     xpath_text, | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class FlvReader(io.BytesIO): | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     Reader for Flv files | 
					
						
							|  |  |  |     The file format is documented in https://www.adobe.com/devnet/f4v.html | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # Utility functions for reading numbers and strings | 
					
						
							|  |  |  |     def read_unsigned_long_long(self): | 
					
						
							| 
									
										
										
										
											2014-02-15 16:24:43 +01:00
										 |  |  |         return struct_unpack('!Q', self.read(8))[0] | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def read_unsigned_int(self): | 
					
						
							| 
									
										
										
										
											2014-02-15 16:24:43 +01:00
										 |  |  |         return struct_unpack('!I', self.read(4))[0] | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def read_unsigned_char(self): | 
					
						
							| 
									
										
										
										
											2014-02-15 16:24:43 +01:00
										 |  |  |         return struct_unpack('!B', self.read(1))[0] | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def read_string(self): | 
					
						
							|  |  |  |         res = b'' | 
					
						
							|  |  |  |         while True: | 
					
						
							|  |  |  |             char = self.read(1) | 
					
						
							|  |  |  |             if char == b'\x00': | 
					
						
							|  |  |  |                 break | 
					
						
							|  |  |  |             res += char | 
					
						
							|  |  |  |         return res | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def read_box_info(self): | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         Read a box and return the info as a tuple: (box_size, box_type, box_data) | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         real_size = size = self.read_unsigned_int() | 
					
						
							|  |  |  |         box_type = self.read(4) | 
					
						
							|  |  |  |         header_end = 8 | 
					
						
							|  |  |  |         if size == 1: | 
					
						
							|  |  |  |             real_size = self.read_unsigned_long_long() | 
					
						
							|  |  |  |             header_end = 16 | 
					
						
							| 
									
										
										
										
											2014-11-23 21:23:05 +01:00
										 |  |  |         return real_size, box_type, self.read(real_size - header_end) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def read_asrt(self): | 
					
						
							|  |  |  |         # version | 
					
						
							|  |  |  |         self.read_unsigned_char() | 
					
						
							|  |  |  |         # flags | 
					
						
							|  |  |  |         self.read(3) | 
					
						
							|  |  |  |         quality_entry_count = self.read_unsigned_char() | 
					
						
							|  |  |  |         # QualityEntryCount | 
					
						
							|  |  |  |         for i in range(quality_entry_count): | 
					
						
							|  |  |  |             self.read_string() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         segment_run_count = self.read_unsigned_int() | 
					
						
							|  |  |  |         segments = [] | 
					
						
							|  |  |  |         for i in range(segment_run_count): | 
					
						
							|  |  |  |             first_segment = self.read_unsigned_int() | 
					
						
							|  |  |  |             fragments_per_segment = self.read_unsigned_int() | 
					
						
							|  |  |  |             segments.append((first_segment, fragments_per_segment)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'segment_run': segments, | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def read_afrt(self): | 
					
						
							|  |  |  |         # version | 
					
						
							|  |  |  |         self.read_unsigned_char() | 
					
						
							|  |  |  |         # flags | 
					
						
							|  |  |  |         self.read(3) | 
					
						
							|  |  |  |         # time scale | 
					
						
							|  |  |  |         self.read_unsigned_int() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         quality_entry_count = self.read_unsigned_char() | 
					
						
							|  |  |  |         # QualitySegmentUrlModifiers | 
					
						
							|  |  |  |         for i in range(quality_entry_count): | 
					
						
							|  |  |  |             self.read_string() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         fragments_count = self.read_unsigned_int() | 
					
						
							|  |  |  |         fragments = [] | 
					
						
							|  |  |  |         for i in range(fragments_count): | 
					
						
							|  |  |  |             first = self.read_unsigned_int() | 
					
						
							|  |  |  |             first_ts = self.read_unsigned_long_long() | 
					
						
							|  |  |  |             duration = self.read_unsigned_int() | 
					
						
							|  |  |  |             if duration == 0: | 
					
						
							|  |  |  |                 discontinuity_indicator = self.read_unsigned_char() | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 discontinuity_indicator = None | 
					
						
							|  |  |  |             fragments.append({ | 
					
						
							|  |  |  |                 'first': first, | 
					
						
							|  |  |  |                 'ts': first_ts, | 
					
						
							|  |  |  |                 'duration': duration, | 
					
						
							|  |  |  |                 'discontinuity_indicator': discontinuity_indicator, | 
					
						
							|  |  |  |             }) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'fragments': fragments, | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def read_abst(self): | 
					
						
							|  |  |  |         # version | 
					
						
							|  |  |  |         self.read_unsigned_char() | 
					
						
							|  |  |  |         # flags | 
					
						
							|  |  |  |         self.read(3) | 
					
						
							| 
									
										
										
										
											2014-02-22 23:03:00 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         self.read_unsigned_int()  # BootstrapinfoVersion | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         # Profile,Live,Update,Reserved | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |         flags = self.read_unsigned_char() | 
					
						
							|  |  |  |         live = flags & 0x20 != 0 | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         # time scale | 
					
						
							|  |  |  |         self.read_unsigned_int() | 
					
						
							|  |  |  |         # CurrentMediaTime | 
					
						
							|  |  |  |         self.read_unsigned_long_long() | 
					
						
							|  |  |  |         # SmpteTimeCodeOffset | 
					
						
							|  |  |  |         self.read_unsigned_long_long() | 
					
						
							| 
									
										
										
										
											2014-02-22 23:03:00 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         self.read_string()  # MovieIdentifier | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         server_count = self.read_unsigned_char() | 
					
						
							|  |  |  |         # ServerEntryTable | 
					
						
							|  |  |  |         for i in range(server_count): | 
					
						
							|  |  |  |             self.read_string() | 
					
						
							|  |  |  |         quality_count = self.read_unsigned_char() | 
					
						
							|  |  |  |         # QualityEntryTable | 
					
						
							| 
									
										
										
										
											2014-02-22 23:03:00 +01:00
										 |  |  |         for i in range(quality_count): | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |             self.read_string() | 
					
						
							|  |  |  |         # DrmData | 
					
						
							|  |  |  |         self.read_string() | 
					
						
							|  |  |  |         # MetaData | 
					
						
							|  |  |  |         self.read_string() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         segments_count = self.read_unsigned_char() | 
					
						
							|  |  |  |         segments = [] | 
					
						
							|  |  |  |         for i in range(segments_count): | 
					
						
							|  |  |  |             box_size, box_type, box_data = self.read_box_info() | 
					
						
							|  |  |  |             assert box_type == b'asrt' | 
					
						
							|  |  |  |             segment = FlvReader(box_data).read_asrt() | 
					
						
							|  |  |  |             segments.append(segment) | 
					
						
							|  |  |  |         fragments_run_count = self.read_unsigned_char() | 
					
						
							|  |  |  |         fragments = [] | 
					
						
							|  |  |  |         for i in range(fragments_run_count): | 
					
						
							|  |  |  |             box_size, box_type, box_data = self.read_box_info() | 
					
						
							|  |  |  |             assert box_type == b'afrt' | 
					
						
							|  |  |  |             fragments.append(FlvReader(box_data).read_afrt()) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'segments': segments, | 
					
						
							|  |  |  |             'fragments': fragments, | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |             'live': live, | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def read_bootstrap_info(self): | 
					
						
							|  |  |  |         total_size, box_type, box_data = self.read_box_info() | 
					
						
							|  |  |  |         assert box_type == b'abst' | 
					
						
							|  |  |  |         return FlvReader(box_data).read_abst() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def read_bootstrap_info(bootstrap_bytes): | 
					
						
							|  |  |  |     return FlvReader(bootstrap_bytes).read_bootstrap_info() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def build_fragments_list(boot_info): | 
					
						
							|  |  |  |     """ Return a list of (segment, fragment) for each fragment in the video """ | 
					
						
							|  |  |  |     res = [] | 
					
						
							|  |  |  |     segment_run_table = boot_info['segments'][0] | 
					
						
							|  |  |  |     fragment_run_entry_table = boot_info['fragments'][0]['fragments'] | 
					
						
							|  |  |  |     first_frag_number = fragment_run_entry_table[0]['first'] | 
					
						
							| 
									
										
										
										
											2015-01-23 16:31:52 +01:00
										 |  |  |     fragments_counter = itertools.count(first_frag_number) | 
					
						
							|  |  |  |     for segment, fragments_count in segment_run_table['segment_run']: | 
					
						
							|  |  |  |         for _ in range(fragments_count): | 
					
						
							|  |  |  |             res.append((segment, next(fragments_counter))) | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  |     if boot_info['live']: | 
					
						
							|  |  |  |         res = res[-2:] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |     return res | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-05 20:22:17 +02:00
										 |  |  | def write_unsigned_int(stream, val): | 
					
						
							|  |  |  |     stream.write(struct_pack('!I', val)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-05 20:30:40 +02:00
										 |  |  | def write_unsigned_int_24(stream, val): | 
					
						
							|  |  |  |     stream.write(struct_pack('!I', val)[1:]) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  | def write_flv_header(stream): | 
					
						
							|  |  |  |     """Writes the FLV header to stream""" | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |     # FLV header | 
					
						
							|  |  |  |     stream.write(b'FLV\x01') | 
					
						
							|  |  |  |     stream.write(b'\x05') | 
					
						
							|  |  |  |     stream.write(b'\x00\x00\x00\x09') | 
					
						
							|  |  |  |     stream.write(b'\x00\x00\x00\x00') | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def write_metadata_tag(stream, metadata): | 
					
						
							|  |  |  |     """Writes optional metadata tag to stream""" | 
					
						
							| 
									
										
										
										
											2015-01-05 20:30:40 +02:00
										 |  |  |     SCRIPT_TAG = b'\x12' | 
					
						
							| 
									
										
										
										
											2015-01-05 20:22:17 +02:00
										 |  |  |     FLV_TAG_HEADER_LEN = 11 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  |     if metadata: | 
					
						
							| 
									
										
										
										
											2015-01-05 20:30:40 +02:00
										 |  |  |         stream.write(SCRIPT_TAG) | 
					
						
							|  |  |  |         write_unsigned_int_24(stream, len(metadata)) | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  |         stream.write(b'\x00\x00\x00\x00\x00\x00\x00') | 
					
						
							|  |  |  |         stream.write(metadata) | 
					
						
							| 
									
										
										
										
											2015-01-05 20:22:17 +02:00
										 |  |  |         write_unsigned_int(stream, FLV_TAG_HEADER_LEN + len(metadata)) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-27 07:41:19 +06:00
										 |  |  | def remove_encrypted_media(media): | 
					
						
							|  |  |  |     return list(filter(lambda e: 'drmAdditionalHeaderId' not in e.attrib and | 
					
						
							|  |  |  |                                  'drmAdditionalHeaderSetId' not in e.attrib, | 
					
						
							|  |  |  |                        media)) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | def _add_ns(prop): | 
					
						
							|  |  |  |     return '{http://ns.adobe.com/f4m/1.0}%s' % prop | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  | class F4mFD(FragmentFD): | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |     """
 | 
					
						
							|  |  |  |     A downloader for f4m manifests or AdobeHDS. | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |     FD_NAME = 'f4m' | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2014-05-28 18:19:23 +02:00
										 |  |  |     def _get_unencrypted_media(self, doc): | 
					
						
							| 
									
										
										
										
											2015-01-31 10:51:39 +01:00
										 |  |  |         media = doc.findall(_add_ns('media')) | 
					
						
							| 
									
										
										
										
											2014-05-28 18:19:23 +02:00
										 |  |  |         if not media: | 
					
						
							|  |  |  |             self.report_error('No media found') | 
					
						
							|  |  |  |         for e in (doc.findall(_add_ns('drmAdditionalHeader')) + | 
					
						
							|  |  |  |                   doc.findall(_add_ns('drmAdditionalHeaderSet'))): | 
					
						
							|  |  |  |             # If id attribute is missing it's valid for all media nodes | 
					
						
							|  |  |  |             # without drmAdditionalHeaderId or drmAdditionalHeaderSetId attribute | 
					
						
							| 
									
										
										
										
											2015-01-30 16:06:55 +01:00
										 |  |  |             if 'id' not in e.attrib: | 
					
						
							|  |  |  |                 self.report_error('Missing ID in f4m DRM') | 
					
						
							| 
									
										
										
										
											2016-03-27 07:41:19 +06:00
										 |  |  |         media = remove_encrypted_media(media) | 
					
						
							| 
									
										
										
										
											2014-05-28 18:19:23 +02:00
										 |  |  |         if not media: | 
					
						
							| 
									
										
										
										
											2015-01-30 16:06:55 +01:00
										 |  |  |             self.report_error('Unsupported DRM') | 
					
						
							| 
									
										
										
										
											2014-05-28 18:19:23 +02:00
										 |  |  |         return media | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |     def _get_bootstrap_from_url(self, bootstrap_url): | 
					
						
							|  |  |  |         bootstrap = self.ydl.urlopen(bootstrap_url).read() | 
					
						
							|  |  |  |         return read_bootstrap_info(bootstrap) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _update_live_fragments(self, bootstrap_url, latest_fragment): | 
					
						
							|  |  |  |         fragments_list = [] | 
					
						
							|  |  |  |         retries = 30 | 
					
						
							|  |  |  |         while (not fragments_list) and (retries > 0): | 
					
						
							|  |  |  |             boot_info = self._get_bootstrap_from_url(bootstrap_url) | 
					
						
							|  |  |  |             fragments_list = build_fragments_list(boot_info) | 
					
						
							|  |  |  |             fragments_list = [f for f in fragments_list if f[1] > latest_fragment] | 
					
						
							|  |  |  |             if not fragments_list: | 
					
						
							|  |  |  |                 # Retry after a while | 
					
						
							|  |  |  |                 time.sleep(5.0) | 
					
						
							|  |  |  |                 retries -= 1 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         if not fragments_list: | 
					
						
							|  |  |  |             self.report_error('Failed to update fragments') | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return fragments_list | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _parse_bootstrap_node(self, node, base_url): | 
					
						
							| 
									
										
										
										
											2016-01-30 18:28:38 +06:00
										 |  |  |         # Sometimes non empty inline bootstrap info can be specified along | 
					
						
							|  |  |  |         # with bootstrap url attribute (e.g. dummy inline bootstrap info | 
					
						
							|  |  |  |         # contains whitespace characters in [1]). We will prefer bootstrap | 
					
						
							|  |  |  |         # url over inline bootstrap info when present. | 
					
						
							|  |  |  |         # 1. http://live-1-1.rutube.ru/stream/1024/HDS/SD/C2NKsS85HQNckgn5HdEmOQ/1454167650/S-s604419906/move/four/dirs/upper/1024-576p.f4m | 
					
						
							|  |  |  |         bootstrap_url = node.get('url') | 
					
						
							|  |  |  |         if bootstrap_url: | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |             bootstrap_url = compat_urlparse.urljoin( | 
					
						
							| 
									
										
										
										
											2016-01-30 18:28:38 +06:00
										 |  |  |                 base_url, bootstrap_url) | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |             boot_info = self._get_bootstrap_from_url(bootstrap_url) | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             bootstrap_url = None | 
					
						
							| 
									
										
										
										
											2015-03-08 18:25:11 +01:00
										 |  |  |             bootstrap = base64.b64decode(node.text.encode('ascii')) | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |             boot_info = read_bootstrap_info(bootstrap) | 
					
						
							| 
									
										
										
										
											2016-01-30 18:28:38 +06:00
										 |  |  |         return boot_info, bootstrap_url | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |     def real_download(self, filename, info_dict): | 
					
						
							|  |  |  |         man_url = info_dict['url'] | 
					
						
							| 
									
										
										
										
											2014-07-28 15:25:56 +02:00
										 |  |  |         requested_bitrate = info_dict.get('tbr') | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |         self.to_screen('[%s] Downloading f4m manifest' % self.FD_NAME) | 
					
						
							| 
									
										
										
										
											2015-09-10 20:49:43 +01:00
										 |  |  |         urlh = self.ydl.urlopen(man_url) | 
					
						
							|  |  |  |         man_url = urlh.geturl() | 
					
						
							| 
									
										
										
										
											2015-12-10 22:59:50 +06:00
										 |  |  |         # Some manifests may be malformed, e.g. prosiebensat1 generated manifests | 
					
						
							|  |  |  |         # (see https://github.com/rg3/youtube-dl/issues/6215#issuecomment-121704244 | 
					
						
							|  |  |  |         # and https://github.com/rg3/youtube-dl/issues/7823) | 
					
						
							| 
									
										
										
										
											2015-12-11 20:28:44 +06:00
										 |  |  |         manifest = fix_xml_ampersands(urlh.read().decode('utf-8', 'ignore')).strip() | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-10-25 20:04:55 +01:00
										 |  |  |         doc = compat_etree_fromstring(manifest) | 
					
						
							| 
									
										
										
										
											2014-05-28 18:19:23 +02:00
										 |  |  |         formats = [(int(f.attrib.get('bitrate', -1)), f) | 
					
						
							|  |  |  |                    for f in self._get_unencrypted_media(doc)] | 
					
						
							| 
									
										
										
										
											2014-07-28 15:25:56 +02:00
										 |  |  |         if requested_bitrate is None: | 
					
						
							|  |  |  |             # get the best format | 
					
						
							|  |  |  |             formats = sorted(formats, key=lambda f: f[0]) | 
					
						
							|  |  |  |             rate, media = formats[-1] | 
					
						
							|  |  |  |         else: | 
					
						
							|  |  |  |             rate, media = list(filter( | 
					
						
							|  |  |  |                 lambda f: int(f[0]) == requested_bitrate, formats))[0] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         base_url = compat_urlparse.urljoin(man_url, media.attrib['url']) | 
					
						
							| 
									
										
										
										
											2014-10-28 17:27:41 +01:00
										 |  |  |         bootstrap_node = doc.find(_add_ns('bootstrapInfo')) | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |         boot_info, bootstrap_url = self._parse_bootstrap_node(bootstrap_node, base_url) | 
					
						
							|  |  |  |         live = boot_info['live'] | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  |         metadata_node = media.find(_add_ns('metadata')) | 
					
						
							|  |  |  |         if metadata_node is not None: | 
					
						
							| 
									
										
										
										
											2015-03-08 18:25:11 +01:00
										 |  |  |             metadata = base64.b64decode(metadata_node.text.encode('ascii')) | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  |         else: | 
					
						
							|  |  |  |             metadata = None | 
					
						
							| 
									
										
										
										
											2014-10-28 17:27:41 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         fragments_list = build_fragments_list(boot_info) | 
					
						
							| 
									
										
										
										
											2016-01-30 19:43:25 +06:00
										 |  |  |         test = self.params.get('test', False) | 
					
						
							|  |  |  |         if test: | 
					
						
							| 
									
										
										
										
											2014-02-15 17:09:49 +01:00
										 |  |  |             # We only download the first fragment | 
					
						
							|  |  |  |             fragments_list = fragments_list[:1] | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |         total_frags = len(fragments_list) | 
					
						
							| 
									
										
										
										
											2014-09-21 15:43:09 +02:00
										 |  |  |         # For some akamai manifests we'll need to add a query to the fragment url | 
					
						
							|  |  |  |         akamai_pv = xpath_text(doc, _add_ns('pv-2.0')) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |         ctx = { | 
					
						
							|  |  |  |             'filename': filename, | 
					
						
							|  |  |  |             'total_frags': total_frags, | 
					
						
							| 
									
										
										
										
											2016-01-30 19:22:15 +06:00
										 |  |  |             'live': live, | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         self._prepare_frag_download(ctx) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         dest_stream = ctx['dest_stream'] | 
					
						
							| 
									
										
										
										
											2015-02-17 21:37:48 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-01-05 20:12:29 +02:00
										 |  |  |         write_flv_header(dest_stream) | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |         if not live: | 
					
						
							|  |  |  |             write_metadata_tag(dest_stream, metadata) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-09-10 20:49:43 +01:00
										 |  |  |         base_url_parsed = compat_urllib_parse_urlparse(base_url) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |         self._start_frag_download(ctx) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         frags_filenames = [] | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |         while fragments_list: | 
					
						
							|  |  |  |             seg_i, frag_i = fragments_list.pop(0) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  |             name = 'Seg%d-Frag%d' % (seg_i, frag_i) | 
					
						
							| 
									
										
										
										
											2015-10-24 21:02:31 +06:00
										 |  |  |             query = [] | 
					
						
							|  |  |  |             if base_url_parsed.query: | 
					
						
							|  |  |  |                 query.append(base_url_parsed.query) | 
					
						
							| 
									
										
										
										
											2014-09-21 15:43:09 +02:00
										 |  |  |             if akamai_pv: | 
					
						
							| 
									
										
										
										
											2015-10-24 21:02:31 +06:00
										 |  |  |                 query.append(akamai_pv.strip(';')) | 
					
						
							| 
									
										
										
										
											2015-04-21 02:29:56 +08:00
										 |  |  |             if info_dict.get('extra_param_to_segment_url'): | 
					
						
							| 
									
										
										
										
											2015-10-24 21:02:31 +06:00
										 |  |  |                 query.append(info_dict['extra_param_to_segment_url']) | 
					
						
							|  |  |  |             url_parsed = base_url_parsed._replace(path=base_url_parsed.path + name, query='&'.join(query)) | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |             frag_filename = '%s-%s' % (ctx['tmpfilename'], name) | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |             try: | 
					
						
							| 
									
										
										
										
											2015-09-10 20:49:43 +01:00
										 |  |  |                 success = ctx['dl'].download(frag_filename, {'url': url_parsed.geturl()}) | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |                 if not success: | 
					
						
							|  |  |  |                     return False | 
					
						
							| 
									
										
										
										
											2015-05-03 11:04:14 +03:00
										 |  |  |                 (down, frag_sanitized) = sanitize_open(frag_filename, 'rb') | 
					
						
							|  |  |  |                 down_data = down.read() | 
					
						
							|  |  |  |                 down.close() | 
					
						
							|  |  |  |                 reader = FlvReader(down_data) | 
					
						
							|  |  |  |                 while True: | 
					
						
							|  |  |  |                     _, box_type, box_data = reader.read_box_info() | 
					
						
							|  |  |  |                     if box_type == b'mdat': | 
					
						
							|  |  |  |                         dest_stream.write(box_data) | 
					
						
							|  |  |  |                         break | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |                 if live: | 
					
						
							| 
									
										
										
										
											2015-05-03 11:04:14 +03:00
										 |  |  |                     os.remove(encodeFilename(frag_sanitized)) | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |                 else: | 
					
						
							| 
									
										
										
										
											2015-05-03 11:04:14 +03:00
										 |  |  |                     frags_filenames.append(frag_sanitized) | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |             except (compat_urllib_error.HTTPError, ) as err: | 
					
						
							|  |  |  |                 if live and (err.code == 404 or err.code == 410): | 
					
						
							|  |  |  |                     # We didn't keep up with the live window. Continue | 
					
						
							|  |  |  |                     # with the next available fragment. | 
					
						
							|  |  |  |                     msg = 'Fragment %d unavailable' % frag_i | 
					
						
							|  |  |  |                     self.report_warning(msg) | 
					
						
							|  |  |  |                     fragments_list = [] | 
					
						
							|  |  |  |                 else: | 
					
						
							|  |  |  |                     raise | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-01-30 19:43:25 +06:00
										 |  |  |             if not fragments_list and not test and live and bootstrap_url: | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  |                 fragments_list = self._update_live_fragments(bootstrap_url, frag_i) | 
					
						
							| 
									
										
										
										
											2015-02-23 21:56:35 +02:00
										 |  |  |                 total_frags += len(fragments_list) | 
					
						
							|  |  |  |                 if fragments_list and (fragments_list[0][1] > frag_i + 1): | 
					
						
							|  |  |  |                     msg = 'Missed %d fragments' % (fragments_list[0][1] - (frag_i + 1)) | 
					
						
							|  |  |  |                     self.report_warning(msg) | 
					
						
							| 
									
										
										
										
											2015-02-22 21:03:49 +02:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-07-29 02:27:50 +06:00
										 |  |  |         self._finish_frag_download(ctx) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         for frag_file in frags_filenames: | 
					
						
							| 
									
										
										
										
											2015-05-03 11:04:14 +03:00
										 |  |  |             os.remove(encodeFilename(frag_file)) | 
					
						
							| 
									
										
										
										
											2013-12-23 16:39:49 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         return True |