in mozdownload/scraper.py [0:0]
def __init__(self, destination=None, platform=None,
application='firefox', locale=None, extension=None,
username=None, password=None,
retry_attempts=0, retry_delay=10.,
is_stub_installer=False, timeout=None,
logger=None,
base_url=BASE_URL):
"""Create an instance of the generic scraper."""
# Private properties for caching
self._filename = None
self._binary = None
self.logger = logger or logging.getLogger(self.__module__)
self.destination = destination or os.getcwd()
if application in APPLICATIONS_MULTI_LOCALE:
self.locale = 'multi'
elif locale:
self.locale = locale
else:
self.locale = 'en-US'
self.locale_build = self.locale not in ('en-US', 'multi')
self.platform = platform or self.detect_platform()
self.session = requests.Session()
if (username, password) != (None, None):
self.session.auth = (username, password)
self.retry_attempts = retry_attempts
self.retry_delay = retry_delay
self.is_stub_installer = is_stub_installer
self.timeout_download = timeout
# this is the timeout used in requests.get. Unlike "auth",
# it does not work if we attach it on the session, so we handle
# it independently.
self.timeout_network = 60.
# build the base URL
self.application = application
self.base_url = '%s/' % urljoin(base_url, self.application)
if extension:
self.extension = extension
else:
if self.application in APPLICATIONS_MULTI_LOCALE and \
self.platform in ('win32', 'win64'):
# builds for APPLICATIONS_MULTI_LOCALE only exist in zip
self.extension = 'zip'
else:
self.extension = DEFAULT_FILE_EXTENSIONS[self.platform]
self._retry_check_404(self.get_build_info)