Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix for phantomjs send_keys #63

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions GoogleScraper/config.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,9 @@ use_own_ip: True
; Whether to check proxies before starting the scrape
check_proxies: True

; Set HTTP requests to stop waiting for a response after a given number of seconds
timeout: 10

; Global configuration parameters that apply on all modes.
[GLOBAL]
; The proxy file. If this is a valid file path, each line will represent a proxy.
Expand Down
5 changes: 5 additions & 0 deletions GoogleScraper/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,10 @@ def main(return_results=False, parse_cmd_line=True):

if Config['SCRAPING'].getboolean('use_own_ip'):
proxies.append(None)

request_timeout = Config['SCRAPING'].getint('timeout', 10)
if request_timeout < 10:
request_timeout = 10

if not proxies:
raise InvalidConfigurationException("No proxies available and using own IP is prohibited by configuration. Turning down.")
Expand Down Expand Up @@ -398,6 +402,7 @@ def main(return_results=False, parse_cmd_line=True):
db_lock=db_lock,
proxy=proxy_to_use,
progress_queue=q,
request_timeout=request_timeout
)
)

Expand Down
3 changes: 2 additions & 1 deletion GoogleScraper/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,8 @@ def search(self, *args, rand=False, **kwargs):
super().detection_prevention_sleep()
super().keyword_info()

request = self.requests.get(self.base_search_url + urlencode(self.search_params), headers=self.headers, timeout=5)
request = self.requests.get(self.base_search_url + urlencode(self.search_params), headers=self.headers,
timeout=self.request_timeout)

self.current_request_time = datetime.datetime.utcnow()
self.html = request.text
Expand Down
8 changes: 6 additions & 2 deletions GoogleScraper/scraping.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,8 @@ class SearchEngineScrape(metaclass=abc.ABCMeta):
}

def __init__(self, keywords=None, scraper_search=None, session=None, db_lock=None, cache_lock=None,
start_page_pos=1, search_engine=None, search_type=None, proxy=None, progress_queue=None):
start_page_pos=1, search_engine=None, search_type=None, proxy=None, progress_queue=None,
request_timeout=10):
"""Instantiate an SearchEngineScrape object.

Args:
Expand Down Expand Up @@ -240,6 +241,9 @@ def __init__(self, keywords=None, scraper_search=None, session=None, db_lock=Non
# the default timeout
self.timeout = 5

# http request timeout
self.request_timeout = request_timeout



@abc.abstractmethod
Expand Down Expand Up @@ -275,7 +279,7 @@ def blocking_search(self, callback, *args, **kwargs):
# Leave search when search engines detected us
# add the rest of the keywords as missed one
logger.critical(e)
self.missed_keywords.add(self.keywords[i:])
self.missed_keywords.add(self.keywords[i])
continue

@abc.abstractmethod
Expand Down
5 changes: 4 additions & 1 deletion GoogleScraper/selenium.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,10 @@ def search(self):
if self.search_input:
self.search_input.clear()
time.sleep(.25)
self.search_input.send_keys(self.current_keyword + Keys.ENTER)
self.search_input.send_keys(self.current_keyword)
if self.browser_type == 'phantomjs':
time.sleep(1) # Phantomjs are much faster than firefox, chrome
self.search_input.send_keys(Keys.ENTER)
self.current_request_time = datetime.datetime.utcnow()
else:
logger.warning('Cannot get handle to the input form for keyword {}.'.format(self.current_keyword))
Expand Down