import os
import json
from requests import session, cookies
-from urllib import quote
+from urllib import quote, unquote
from time import time
from base64 import urlsafe_b64encode
from bs4 import BeautifulSoup, SoupStrainer
urls = {
'login': '/login',
- 'browse': '/browse',
+ 'browse': '/profiles/manage',
'video_list_ids': '/preflight',
'shakti': '/pathEvaluator',
- 'profiles': '/browse',
+ 'profiles': '/profiles/manage',
'switch_profiles': '/profiles/switch',
'adult_pin': '/pin/service',
'metadata': '/metadata',
'set_video_rating': '/setVideoRating',
- 'update_my_list': '/playlistop'
+ 'update_my_list': '/playlistop',
+ 'kids': '/Kids'
}
""":obj:`dict` of :obj:`str` List of all static endpoints for HTML/JSON POST/GET requests"""
List of all the serialized data pulled out of the pagws <script/> tags
"""
scripts = page_soup.find_all('script', attrs={'src': None});
- self.log('Trying sloppy inline data parser')
+ self.log(msg='Trying sloppy inline data parser')
inline_data = self._sloppy_parse_inline_data(scripts=scripts)
if self._verfify_auth_and_profiles_data(data=inline_data) != False:
- self.log('Sloppy inline data parsing successfull')
+ self.log(msg='Sloppy inline data parsing successfull')
return inline_data
- self.log('Sloppy inline parser failed, trying JS parser')
+ self.log(msg='Sloppy inline parser failed, trying JS parser')
return self._accurate_parse_inline_data(scripts=scripts)
def is_logged_in (self, account):
video_lists = response_data['lists']
for video_list_id in video_lists.keys():
video_list = video_lists[video_list_id]
- if video_list['context'] == 'genre':
- video_list_ids['genres'].update(self.parse_video_list_ids_entry(id=video_list_id, entry=video_list))
- elif video_list['context'] == 'similars' or video_list['context'] == 'becauseYouAdded':
- video_list_ids['recommendations'].update(self.parse_video_list_ids_entry(id=video_list_id, entry=video_list))
- else:
- video_list_ids['user'].update(self.parse_video_list_ids_entry(id=video_list_id, entry=video_list))
-
+ if video_list.get('context', False) != False:
+ if video_list['context'] == 'genre':
+ video_list_ids['genres'].update(self.parse_video_list_ids_entry(id=video_list_id, entry=video_list))
+ elif video_list['context'] == 'similars' or video_list['context'] == 'becauseYouAdded':
+ video_list_ids['recommendations'].update(self.parse_video_list_ids_entry(id=video_list_id, entry=video_list))
+ else:
+ video_list_ids['user'].update(self.parse_video_list_ids_entry(id=video_list_id, entry=video_list))
return video_list_ids
def parse_video_list_ids_entry (self, id, entry):
'synopsis': video['synopsis'],
'regular_synopsis': video['regularSynopsis'],
'type': video['summary']['type'],
- 'rating': video['userRating']['average'],
+ 'rating': video['userRating'].get('average', 0) if video['userRating'].get('average', None) != None else video['userRating'].get('predicted', 0),
'episode_count': season_info['episode_count'],
'seasons_label': season_info['seasons_label'],
'seasons_count': season_info['seasons_count'],
'mpaa': str(episode['maturity']['rating']['board']) + ' ' + str(episode['maturity']['rating']['value']),
'maturity': episode['maturity'],
'playcount': (0, 1)[episode['watched']],
- 'rating': episode['userRating']['average'],
+ 'rating': episode['userRating'].get('average', 0) if episode['userRating'].get('average', None) != None else episode['userRating'].get('predicted', 0),
'thumb': episode['info']['interestingMoments']['url'],
'fanart': episode['interestingMoment']['_1280x720']['jpg']['url'],
'poster': episode['boxarts']['_1280x720']['jpg']['url'],
'_': int(time()),
'authURL': self.user_data['authURL']
}
+
response = self._session_get(component='video_list_ids', params=payload, type='api')
return self._process_response(response=response, component=self._get_api_url_for(component='video_list_ids'))
response = self._path_request(paths=paths)
return self._process_response(response=response, component='Search results')
+ def get_lolomo_for_kids (self):
+ """Fetches the lolomo ID for Kids profiles
+
+ Returns
+ -------
+ :obj:`str`
+ Kids Lolomo ID
+ """
+ response = self._session_get(component='kids')
+ for cookie in response.cookies:
+ if cookie.name.find('lhpuuidh-browse-' + self.user_data['guid']) != -1 and cookie.name.rfind('-T') == -1:
+ start = unquote(cookie.value).rfind(':')
+ return unquote(cookie.value)[start+1:]
+ return None
+
+ def fetch_lists_for_kids (self, lolomo, list_from=0, list_to=50):
+ """Fetches the JSON which contains the contents of a the video list for kids users
+
+ Parameters
+ ----------
+ lolomo : :obj:`str`
+ Lolomo ID for the Kids profile
+
+ list_from : :obj:`int`
+ Start entry for pagination
+
+ list_to : :obj:`int`
+ Last entry for pagination
+
+ Returns
+ -------
+ :obj:`dict` of :obj:`dict` of :obj:`str`
+ Raw Netflix API call response or api call error
+ """
+ paths = [
+ ['lists', lolomo, {'from': list_from, 'to': list_to}, ['displayName', 'context', 'genreId', 'id', 'index', 'length']]
+ ]
+
+ response = self._path_request(paths=paths)
+ res = self._process_response(response=response, component='Kids lists')
+ return self.parse_video_list_ids(response_data=res['value'])
+
def fetch_video_list (self, list_id, list_from=0, list_to=20):
"""Fetches the JSON which contains the contents of a given video list
})
params = {
- 'withSize': True,
- 'materialize': True,
'model': self.user_data['gpsModel']
}
start = time()
response = self.session.post(url=url, data=data, params=params, headers=headers, verify=self.verify_ssl)
end = time()
- self.log('[POST] Request for "' + url + '" took ' + str(end - start) + ' seconds')
+ self.log(msg='[POST] Request for "' + url + '" took ' + str(end - start) + ' seconds')
return response
def _session_get (self, component, type='document', params={}):
url = self._get_document_url_for(component=component) if type == 'document' else self._get_api_url_for(component=component)
start = time()
response = self.session.get(url=url, verify=self.verify_ssl, params=params)
+ for cookie in response.cookies:
+ if cookie.name.find('lhpuuidh-browse-' + self.user_data['guid']) != -1 and cookie.name.rfind('-T') == -1:
+ start = unquote(cookie.value).rfind(':')
+ return unquote(cookie.value)[start+1:]
end = time()
- self.log('[GET] Request for "' + url + '" took ' + str(end - start) + ' seconds')
+ self.log(msg='[GET] Request for "' + url + '" took ' + str(end - start) + ' seconds')
return response
def _sloppy_parse_user_and_api_data (self, key, contents):
'profileName',
'isActive',
'isFirstUse',
- 'isAccountOwner'
+ 'isAccountOwner',
+ 'isKids'
]
-
# values are accessible via dict (sloppy parsing successfull)
if type(netflix_page_data) == dict:
for profile_id in netflix_page_data.get('profiles'):
self.esn = self._parse_esn_data(netflix_page_data=netflix_page_data)
self.api_data = self._parse_api_base_data(netflix_page_data=netflix_page_data)
self.profiles = self._parse_profile_data(netflix_page_data=netflix_page_data)
- self.log('Found ESN "' + self.esn + '"')
+ self.log(msg='Found ESN "' + self.esn + '"')
return netflix_page_data