| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  | from __future__ import unicode_literals | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  | import re | 
					
						
							| 
									
										
										
										
											2013-11-02 19:50:57 +01:00
										 |  |  | import itertools | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | from .common import InfoExtractor | 
					
						
							| 
									
										
										
										
											2016-03-26 02:19:24 +06:00
										 |  |  | from ..compat import compat_str | 
					
						
							| 
									
										
										
										
											2015-04-20 22:46:01 +06:00
										 |  |  | from ..utils import ( | 
					
						
							|  |  |  |     ExtractorError, | 
					
						
							|  |  |  |     float_or_none, | 
					
						
							| 
									
										
										
										
											2016-03-26 02:19:24 +06:00
										 |  |  |     int_or_none, | 
					
						
							| 
									
										
										
										
											2015-11-21 22:18:17 +06:00
										 |  |  |     sanitized_Request, | 
					
						
							| 
									
										
										
										
											2016-03-26 02:19:24 +06:00
										 |  |  |     urlencode_postdata, | 
					
						
							| 
									
										
										
										
											2015-04-20 22:46:01 +06:00
										 |  |  | ) | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | class BambuserIE(InfoExtractor): | 
					
						
							| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  |     IE_NAME = 'bambuser' | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  |     _VALID_URL = r'https?://bambuser\.com/v/(?P<id>\d+)' | 
					
						
							|  |  |  |     _API_KEY = '005f64509e19a868399060af746a00aa' | 
					
						
							| 
									
										
										
										
											2015-04-20 23:00:37 +06:00
										 |  |  |     _LOGIN_URL = 'https://bambuser.com/user' | 
					
						
							|  |  |  |     _NETRC_MACHINE = 'bambuser' | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     _TEST = { | 
					
						
							| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  |         'url': 'http://bambuser.com/v/4050584', | 
					
						
							| 
									
										
										
										
											2013-11-16 01:59:28 +01:00
										 |  |  |         # MD5 seems to be flaky, see https://travis-ci.org/rg3/youtube-dl/jobs/14051016#L388 | 
					
						
							| 
									
										
										
										
											2014-11-26 20:01:20 +01:00
										 |  |  |         # 'md5': 'fba8f7693e48fd4e8641b3fd5539a641', | 
					
						
							| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  |         'info_dict': { | 
					
						
							|  |  |  |             'id': '4050584', | 
					
						
							|  |  |  |             'ext': 'flv', | 
					
						
							|  |  |  |             'title': 'Education engineering days - lightning talks', | 
					
						
							|  |  |  |             'duration': 3741, | 
					
						
							|  |  |  |             'uploader': 'pixelversity', | 
					
						
							|  |  |  |             'uploader_id': '344706', | 
					
						
							| 
									
										
										
										
											2015-04-20 22:46:01 +06:00
										 |  |  |             'timestamp': 1382976692, | 
					
						
							|  |  |  |             'upload_date': '20131028', | 
					
						
							|  |  |  |             'view_count': int, | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  |         }, | 
					
						
							| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  |         'params': { | 
					
						
							| 
									
										
										
										
											2013-11-25 22:03:20 +01:00
										 |  |  |             # It doesn't respect the 'Range' header, it would download the whole video | 
					
						
							|  |  |  |             # caused the travis builds to fail: https://travis-ci.org/rg3/youtube-dl/jobs/14493845#L59 | 
					
						
							| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  |             'skip_download': True, | 
					
						
							| 
									
										
										
										
											2013-11-25 22:03:20 +01:00
										 |  |  |         }, | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-04-20 23:00:37 +06:00
										 |  |  |     def _login(self): | 
					
						
							| 
									
										
										
										
											2018-05-26 16:12:44 +01:00
										 |  |  |         username, password = self._get_login_info() | 
					
						
							| 
									
										
										
										
											2015-04-20 23:00:37 +06:00
										 |  |  |         if username is None: | 
					
						
							|  |  |  |             return | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         login_form = { | 
					
						
							|  |  |  |             'form_id': 'user_login', | 
					
						
							|  |  |  |             'op': 'Log in', | 
					
						
							|  |  |  |             'name': username, | 
					
						
							|  |  |  |             'pass': password, | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2015-11-21 22:18:17 +06:00
										 |  |  |         request = sanitized_Request( | 
					
						
							| 
									
										
										
										
											2016-03-26 02:19:24 +06:00
										 |  |  |             self._LOGIN_URL, urlencode_postdata(login_form)) | 
					
						
							| 
									
										
										
										
											2015-04-20 23:00:37 +06:00
										 |  |  |         request.add_header('Referer', self._LOGIN_URL) | 
					
						
							|  |  |  |         response = self._download_webpage( | 
					
						
							| 
									
										
										
										
											2017-11-11 20:49:03 +07:00
										 |  |  |             request, None, 'Logging in') | 
					
						
							| 
									
										
										
										
											2015-04-20 23:00:37 +06:00
										 |  |  | 
 | 
					
						
							|  |  |  |         login_error = self._html_search_regex( | 
					
						
							|  |  |  |             r'(?s)<div class="messages error">(.+?)</div>', | 
					
						
							|  |  |  |             response, 'login error', default=None) | 
					
						
							|  |  |  |         if login_error: | 
					
						
							|  |  |  |             raise ExtractorError( | 
					
						
							|  |  |  |                 'Unable to login: %s' % login_error, expected=True) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     def _real_initialize(self): | 
					
						
							|  |  |  |         self._login() | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  |     def _real_extract(self, url): | 
					
						
							| 
									
										
										
										
											2015-04-20 22:33:35 +06:00
										 |  |  |         video_id = self._match_id(url) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         info = self._download_json( | 
					
						
							|  |  |  |             'http://player-c.api.bambuser.com/getVideo.json?api_key=%s&vid=%s' | 
					
						
							| 
									
										
										
										
											2015-04-20 22:35:53 +06:00
										 |  |  |             % (self._API_KEY, video_id), video_id) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         error = info.get('error') | 
					
						
							|  |  |  |         if error: | 
					
						
							|  |  |  |             raise ExtractorError( | 
					
						
							|  |  |  |                 '%s returned error: %s' % (self.IE_NAME, error), expected=True) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         result = info['result'] | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             'id': video_id, | 
					
						
							| 
									
										
										
										
											2015-04-20 22:35:53 +06:00
										 |  |  |             'title': result['title'], | 
					
						
							|  |  |  |             'url': result['url'], | 
					
						
							|  |  |  |             'thumbnail': result.get('preview'), | 
					
						
							| 
									
										
										
										
											2015-04-20 22:46:01 +06:00
										 |  |  |             'duration': int_or_none(result.get('length')), | 
					
						
							|  |  |  |             'uploader': result.get('username'), | 
					
						
							|  |  |  |             'uploader_id': compat_str(result.get('owner', {}).get('uid')), | 
					
						
							|  |  |  |             'timestamp': int_or_none(result.get('created')), | 
					
						
							|  |  |  |             'fps': float_or_none(result.get('framerate')), | 
					
						
							|  |  |  |             'view_count': int_or_none(result.get('views_total')), | 
					
						
							|  |  |  |             'comment_count': int_or_none(result.get('comment_count')), | 
					
						
							| 
									
										
										
										
											2013-11-02 19:01:01 +01:00
										 |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2013-11-02 19:50:57 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  | class BambuserChannelIE(InfoExtractor): | 
					
						
							| 
									
										
										
										
											2014-01-07 10:04:48 +01:00
										 |  |  |     IE_NAME = 'bambuser:channel' | 
					
						
							| 
									
										
										
										
											2013-12-04 20:34:47 +07:00
										 |  |  |     _VALID_URL = r'https?://bambuser\.com/channel/(?P<user>.*?)(?:/|#|\?|$)' | 
					
						
							| 
									
										
										
										
											2013-11-02 19:50:57 +01:00
										 |  |  |     # The maximum number we can get with each request | 
					
						
							|  |  |  |     _STEP = 50 | 
					
						
							| 
									
										
										
										
											2014-08-28 00:58:24 +02:00
										 |  |  |     _TEST = { | 
					
						
							|  |  |  |         'url': 'http://bambuser.com/channel/pixelversity', | 
					
						
							|  |  |  |         'info_dict': { | 
					
						
							|  |  |  |             'title': 'pixelversity', | 
					
						
							|  |  |  |         }, | 
					
						
							|  |  |  |         'playlist_mincount': 60, | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2013-11-02 19:50:57 +01:00
										 |  |  | 
 | 
					
						
							|  |  |  |     def _real_extract(self, url): | 
					
						
							|  |  |  |         mobj = re.match(self._VALID_URL, url) | 
					
						
							|  |  |  |         user = mobj.group('user') | 
					
						
							|  |  |  |         urls = [] | 
					
						
							|  |  |  |         last_id = '' | 
					
						
							|  |  |  |         for i in itertools.count(1): | 
					
						
							| 
									
										
										
										
											2014-11-23 22:21:46 +01:00
										 |  |  |             req_url = ( | 
					
						
							|  |  |  |                 'http://bambuser.com/xhr-api/index.php?username={user}' | 
					
						
							|  |  |  |                 '&sort=created&access_mode=0%2C1%2C2&limit={count}' | 
					
						
							|  |  |  |                 '&method=broadcast&format=json&vid_older_than={last}' | 
					
						
							|  |  |  |             ).format(user=user, count=self._STEP, last=last_id) | 
					
						
							| 
									
										
										
										
											2015-11-21 22:18:17 +06:00
										 |  |  |             req = sanitized_Request(req_url) | 
					
						
							| 
									
										
										
										
											2013-11-02 19:50:57 +01:00
										 |  |  |             # Without setting this header, we wouldn't get any result | 
					
						
							|  |  |  |             req.add_header('Referer', 'http://bambuser.com/channel/%s' % user) | 
					
						
							| 
									
										
										
										
											2014-08-28 00:58:24 +02:00
										 |  |  |             data = self._download_json( | 
					
						
							|  |  |  |                 req, user, 'Downloading page %d' % i) | 
					
						
							|  |  |  |             results = data['result'] | 
					
						
							|  |  |  |             if not results: | 
					
						
							| 
									
										
										
										
											2013-11-02 19:50:57 +01:00
										 |  |  |                 break | 
					
						
							|  |  |  |             last_id = results[-1]['vid'] | 
					
						
							|  |  |  |             urls.extend(self.url_result(v['page'], 'Bambuser') for v in results) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         return { | 
					
						
							|  |  |  |             '_type': 'playlist', | 
					
						
							|  |  |  |             'title': user, | 
					
						
							|  |  |  |             'entries': urls, | 
					
						
							|  |  |  |         } |