projects
/
plugin.video.netflix.git
/ commitdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
59f30d0
)
fix(parser): Fix copy & paste error
author
Sebastian Golasch
<public@asciidisco.com>
Sat, 4 Mar 2017 19:26:20 +0000
(20:26 +0100)
committer
Sebastian Golasch
<public@asciidisco.com>
Sat, 4 Mar 2017 19:26:20 +0000
(20:26 +0100)
resources/lib/NetflixSession.py
patch
|
blob
|
history
diff --git
a/resources/lib/NetflixSession.py
b/resources/lib/NetflixSession.py
index 0e75d0dbd1a74dcfe42f9f646b50529d2a1e37de..bd01b9de1e404e4f9a9eae3965cf1f44437f6510 100644
(file)
--- a/
resources/lib/NetflixSession.py
+++ b/
resources/lib/NetflixSession.py
@@
-433,7
+433,7
@@
class NetflixSession:
return False;
# collect all the login fields & their contents and add the user credentials
return False;
# collect all the login fields & their contents and add the user credentials
- page_soup = BeautifulSoup(response.text)
+ page_soup = BeautifulSoup(response.text
, 'html.parser'
)
login_form = page_soup.find(attrs={'class' : 'ui-label-text'}).findPrevious('form')
login_payload = self.parse_login_form_fields(form_soup=login_form)
if 'email' in login_payload:
login_form = page_soup.find(attrs={'class' : 'ui-label-text'}).findPrevious('form')
login_payload = self.parse_login_form_fields(form_soup=login_form)
if 'email' in login_payload:
@@
-488,7
+488,7
@@
class NetflixSession:
# fetch the index page again, so that we can fetch the corresponding user data
browse_response = self.session.get(self._get_document_url_for(component='browse'), verify=self.verify_ssl)
only_script_tags = SoupStrainer('script')
# fetch the index page again, so that we can fetch the corresponding user data
browse_response = self.session.get(self._get_document_url_for(component='browse'), verify=self.verify_ssl)
only_script_tags = SoupStrainer('script')
- browse_soup = BeautifulSoup(response.text, 'html.parser', parse_only=only_script_tags)
+ browse_soup = BeautifulSoup(
browse_
response.text, 'html.parser', parse_only=only_script_tags)
self._parse_page_contents(page_soup=browse_soup)
account_hash = self._generate_account_hash(account=account)
self._save_data(filename=self.data_path + '_' + account_hash)
self._parse_page_contents(page_soup=browse_soup)
account_hash = self._generate_account_hash(account=account)
self._save_data(filename=self.data_path + '_' + account_hash)