| 
									
										
										
										
											2015-06-03 23:10:18 +08:00
										 |  |  | from __future__ import unicode_literals | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  | from .fragment import FragmentFD | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  | from ..compat import compat_urllib_error | 
					
						
							| 
									
										
										
										
											2018-07-08 08:22:56 +07:00
										 |  |  | from ..utils import ( | 
					
						
							|  |  |  |     DownloadError, | 
					
						
							|  |  |  |     urljoin, | 
					
						
							|  |  |  | ) | 
					
						
							| 
									
										
										
										
											2015-06-04 22:12:05 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-03 23:10:18 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  | class DashSegmentsFD(FragmentFD): | 
					
						
							| 
									
										
										
										
											2015-06-03 23:10:18 +08:00
										 |  |  |     """
 | 
					
						
							|  |  |  |     Download segments in a DASH manifest | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  |     FD_NAME = 'dashsegments' | 
					
						
							| 
									
										
										
										
											2015-06-10 14:45:54 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  |     def real_download(self, filename, info_dict): | 
					
						
							| 
									
										
										
										
											2017-08-05 06:57:19 +07:00
										 |  |  |         fragment_base_url = info_dict.get('fragment_base_url') | 
					
						
							|  |  |  |         fragments = info_dict['fragments'][:1] if self.params.get( | 
					
						
							| 
									
										
										
										
											2016-09-17 20:35:22 +07:00
										 |  |  |             'test', False) else info_dict['fragments'] | 
					
						
							| 
									
										
										
										
											2015-06-10 14:45:54 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  |         ctx = { | 
					
						
							|  |  |  |             'filename': filename, | 
					
						
							| 
									
										
										
										
											2017-08-05 06:57:19 +07:00
										 |  |  |             'total_frags': len(fragments), | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  |         } | 
					
						
							| 
									
										
										
										
											2015-06-10 14:45:54 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  |         self._prepare_and_start_frag_download(ctx) | 
					
						
							| 
									
										
										
										
											2015-06-03 23:10:18 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |         fragment_retries = self.params.get('fragment_retries', 0) | 
					
						
							| 
									
										
										
										
											2016-08-27 04:55:55 +07:00
										 |  |  |         skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True) | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |         frag_index = 0 | 
					
						
							| 
									
										
										
										
											2017-08-05 06:57:19 +07:00
										 |  |  |         for i, fragment in enumerate(fragments): | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |             frag_index += 1 | 
					
						
							| 
									
										
										
										
											2017-04-22 16:42:24 +01:00
										 |  |  |             if frag_index <= ctx['fragment_index']: | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |                 continue | 
					
						
							| 
									
										
										
										
											2016-09-17 20:35:22 +07:00
										 |  |  |             # In DASH, the first segment contains necessary headers to | 
					
						
							|  |  |  |             # generate a valid MP4 file, so always abort for the first segment | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |             fatal = i == 0 or not skip_unavailable_fragments | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |             count = 0 | 
					
						
							|  |  |  |             while count <= fragment_retries: | 
					
						
							|  |  |  |                 try: | 
					
						
							| 
									
										
										
										
											2017-08-05 06:57:19 +07:00
										 |  |  |                     fragment_url = fragment.get('url') | 
					
						
							|  |  |  |                     if not fragment_url: | 
					
						
							|  |  |  |                         assert fragment_base_url | 
					
						
							|  |  |  |                         fragment_url = urljoin(fragment_base_url, fragment['path']) | 
					
						
							|  |  |  |                     success, frag_content = self._download_fragment(ctx, fragment_url, info_dict) | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |                     if not success: | 
					
						
							|  |  |  |                         return False | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |                     self._append_fragment(ctx, frag_content) | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |                     break | 
					
						
							| 
									
										
										
										
											2016-08-27 04:57:59 +07:00
										 |  |  |                 except compat_urllib_error.HTTPError as err: | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |                     # YouTube may often return 404 HTTP error for a fragment causing the | 
					
						
							|  |  |  |                     # whole download to fail. However if the same fragment is immediately | 
					
						
							|  |  |  |                     # retried with the same request data this usually succeeds (1-2 attemps | 
					
						
							|  |  |  |                     # is usually enough) thus allowing to download the whole file successfully. | 
					
						
							| 
									
										
										
										
											2016-08-27 04:55:55 +07:00
										 |  |  |                     # To be future-proof we will retry all fragments that fail with any | 
					
						
							|  |  |  |                     # HTTP error. | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |                     count += 1 | 
					
						
							|  |  |  |                     if count <= fragment_retries: | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |                         self.report_retry_fragment(err, frag_index, count, fragment_retries) | 
					
						
							| 
									
										
										
										
											2018-07-08 08:22:56 +07:00
										 |  |  |                 except DownloadError: | 
					
						
							|  |  |  |                     # Don't retry fragment if error occurred during HTTP downloading | 
					
						
							|  |  |  |                     # itself since it has own retry settings | 
					
						
							|  |  |  |                     if not fatal: | 
					
						
							|  |  |  |                         self.report_skip_fragment(frag_index) | 
					
						
							|  |  |  |                         break | 
					
						
							|  |  |  |                     raise | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |             if count > fragment_retries: | 
					
						
							| 
									
										
										
										
											2016-09-03 23:00:52 +08:00
										 |  |  |                 if not fatal: | 
					
						
							| 
									
										
										
										
											2016-06-28 18:07:50 +01:00
										 |  |  |                     self.report_skip_fragment(frag_index) | 
					
						
							|  |  |  |                     continue | 
					
						
							| 
									
										
										
										
											2016-03-19 20:42:23 +06:00
										 |  |  |                 self.report_error('giving up after %s fragment retries' % fragment_retries) | 
					
						
							| 
									
										
										
										
											2016-02-09 17:25:02 +01:00
										 |  |  |                 return False | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         self._finish_frag_download(ctx) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-06-03 23:10:18 +08:00
										 |  |  |         return True |