import urllib
import json
import requests
-import pickle
-from BeautifulSoup import BeautifulSoup
+try:
+ import cPickle as pickle
+except:
+ import pickle
+from bs4 import BeautifulSoup
from utils import strip_tags
from utils import noop
class NetflixSession:
"""Helps with login/session management of Netflix users & API data fetching"""
- base_url = 'https://www.netflix.com/'
+ base_url = 'https://www.netflix.com'
"""str: Secure Netflix url"""
urls = {
esn = ''
"""str: Widevine esn, something like: NFCDCH-MC-D7D6F54LOPY8J416T72MQXX3RD20ME"""
- def __init__(self, cookie_path, data_path, log_fn=noop):
+ def __init__(self, cookie_path, data_path, verify_ssl=True, log_fn=noop):
"""Stores the cookie path for later use & instanciates a requests
session with a proper user agent & stored cookies/data if available
"""
self.cookie_path = cookie_path
self.data_path = data_path
+ self.verify_ssl = verify_ssl
self.log = log_fn
# start session, fake chrome (so that we get a proper widevine esn) & enable gzip
value from the form field
"""
login_input_fields = {}
- login_inputs = form_soup.findAll('input')
+ login_inputs = form_soup.find_all('input')
# gather all form fields, set an empty string as the default value
for item in login_inputs:
keys = dict(item.attrs).keys()
List of all the serialized data pulled out of the pagws <script/> tags
"""
inline_data = [];
- data_scripts = page_soup.findAll('script', attrs={'src': None});
+ data_scripts = page_soup.find_all('script', attrs={'src': None});
for script in data_scripts:
# ugly part: try to parse the data & don't care about errors (as they will be some)
try:
profile = {'id': profile_id}
for important_field in important_fields:
profile.update({important_field: item['profiles'][profile_id]['summary'][important_field]})
- profile.update({'avatar': item['avatars']['nf'][item['profiles'][profile_id]['summary']['avatarName']]['images']['byWidth']['320']['value']})
+ avatar_base = item['avatars']['nf'].get(item['profiles'][profile_id]['summary']['avatarName'], False);
+ avatar = 'https://secure.netflix.com/ffe/profiles/avatars_v2/320x320/PICON_029.png' if avatar_base == False else avatar_base['images']['byWidth']['320']['value']
+ profile.update({'avatar': avatar})
profiles.update({profile_id: profile})
return profiles
return False
if self._load_data(filename=self.data_path + '_' + account_hash) == False:
# load the profiles page (to verify the user)
- response = self.session.get(self._get_document_url_for(component='profiles'))
+ response = self.session.get(self._get_document_url_for(component='profiles'), verify=self.verify_ssl)
# parse out the needed inline information
page_soup = BeautifulSoup(response.text)
bool
User could be logged in or not
"""
- response = self.session.get(self._get_document_url_for(component='login'))
+ response = self.session.get(self._get_document_url_for(component='login'), verify=self.verify_ssl)
if response.status_code != 200:
return False;
login_payload['password'] = account['password']
# perform the login
- login_response = self.session.post(self._get_document_url_for(component='login'), data=login_payload)
+ login_response = self.session.post(self._get_document_url_for(component='login'), data=login_payload, verify=self.verify_ssl)
login_soup = BeautifulSoup(login_response.text)
# we know that the login was successfull if we find an HTML element with the class of 'profile-name'
'authURL': self.user_data['authURL']
}
- response = self.session.get(self._get_api_url_for(component='switch_profiles'), params=payload);
+ response = self.session.get(self._get_api_url_for(component='switch_profiles'), params=payload, verify=self.verify_ssl);
if response.status_code != 200:
return False
# fetch the index page again, so that we can fetch the corresponding user data
- browse_response = self.session.get(self._get_document_url_for(component='browse'))
+ browse_response = self.session.get(self._get_document_url_for(component='browse'), verify=self.verify_ssl)
browse_soup = BeautifulSoup(browse_response.text)
self._parse_page_contents(page_soup=browse_soup)
account_hash = self._generate_account_hash(account=account)
'authURL': self.user_data['authURL']
}
url = self._get_api_url_for(component='adult_pin')
- response = self.session.get(url, params=payload);
+ response = self.session.get(url, params=payload, verify=self.verify_ssl);
pin_response = self._process_response(response=response, component=url)
keys = pin_response.keys()
if 'success' in keys:
'authURL': self.user_data['authURL']
})
- response = self.session.post(self._get_api_url_for(component='set_video_rating'), params=params, headers=headers, data=payload)
+ response = self.session.post(self._get_api_url_for(component='set_video_rating'), params=params, headers=headers, data=payload, verify=self.verify_ssl)
return response.status_code == 200
def parse_video_list_ids (self, response_data):
:obj:`BeautifulSoup`
Instance of an BeautifulSoup document containing the complete page contents
"""
- response = self.session.get(self._get_document_url_for(component='browse'))
+ response = self.session.get(self._get_document_url_for(component='browse'), verify=self.verify_ssl)
return BeautifulSoup(response.text)
def fetch_video_list_ids (self, list_from=0, list_to=50):
'authURL': self.user_data['authURL']
}
url = self._get_api_url_for(component='video_list_ids')
- response = self.session.get(url, params=payload);
+ response = self.session.get(url, params=payload, verify=self.verify_ssl);
return self._process_response(response=response, component=url)
- def fetch_search_results (self, search_str, list_from=0, list_to=48):
+ def fetch_search_results (self, search_str, list_from=0, list_to=10):
"""Fetches the JSON which contains the results for the given search query
Parameters
paths = [
['search', encoded_search_string, 'titles', {'from': list_from, 'to': list_to}, ['summary', 'title']],
['search', encoded_search_string, 'titles', {'from': list_from, 'to': list_to}, 'boxarts', '_342x192', 'jpg'],
- ['search', encoded_search_string, 'titles', ['id', 'length', 'name', 'trackIds', 'requestId']]
+ ['search', encoded_search_string, 'titles', ['id', 'length', 'name', 'trackIds', 'requestId']],
+ ['search', encoded_search_string, 'suggestions', 0, 'relatedvideos', {'from': list_from, 'to': list_to}, ['summary', 'title']],
+ ['search', encoded_search_string, 'suggestions', 0, 'relatedvideos', {'from': list_from, 'to': list_to}, 'boxarts', '_342x192', 'jpg'],
+ ['search', encoded_search_string, 'suggestions', 0, 'relatedvideos', ['id', 'length', 'name', 'trackIds', 'requestId']]
]
response = self._path_request(paths=paths)
return self._process_response(response=response, component='Search results')
'_': int(time.time())
}
url = self._get_api_url_for(component='metadata')
- response = self.session.get(url, params=payload);
+ response = self.session.get(url, params=payload, verify=self.verify_ssl);
return self._process_response(response=response, component=url)
def fetch_show_information (self, id, type):
Dict containing an email, country & a password property
"""
# load the profiles page (to verify the user)
- response = self.session.get(self._get_document_url_for(component='profiles'))
-
+ response = self.session.get(self._get_document_url_for(component='profiles'), verify=self.verify_ssl)
# parse out the needed inline information
page_soup = BeautifulSoup(response.text)
page_data = self.extract_inline_netflix_page_data(page_soup=page_soup)
'model': self.user_data['gpsModel']
}
- return self.session.post(self._get_api_url_for(component='shakti'), params=params, headers=headers, data=data)
+ return self.session.post(self._get_api_url_for(component='shakti'), params=params, headers=headers, data=data, verify=self.verify_ssl)
def _is_size_key (self, key):
"""Tiny helper that checks if a given key is called $size or size, as we need to check this often
'authURL': self.user_data['authURL']
})
- response = self.session.post(self._get_api_url_for(component='update_my_list'), headers=headers, data=payload)
+ response = self.session.post(self._get_api_url_for(component='update_my_list'), headers=headers, data=payload, verify=self.verify_ssl)
return response.status_code == 200
def _save_data(self, filename):