diff --git a/testUpdateHostsFile.py b/testUpdateHostsFile.py index 62616dbc9..77e3ac44a 100644 --- a/testUpdateHostsFile.py +++ b/testUpdateHostsFile.py @@ -1617,14 +1617,14 @@ class GetFileByUrl(BaseStdout): url = b"www.google.com" expected = "www.google.com" - actual = get_file_by_url(url) + actual = get_file_by_url(url, delay=0) self.assertEqual(actual, expected) @mock.patch("updateHostsFile.urlopen", side_effect=mock_url_open_fail) def test_read_url_fail(self, _): url = b"www.google.com" - self.assertIsNone(get_file_by_url(url)) + self.assertIsNone(get_file_by_url(url, delay=0)) expected = "Problem getting file:" output = sys.stdout.getvalue() @@ -1634,7 +1634,7 @@ class GetFileByUrl(BaseStdout): @mock.patch("updateHostsFile.urlopen", side_effect=mock_url_open_read_fail) def test_read_url_read_fail(self, _): url = b"www.google.com" - self.assertIsNone(get_file_by_url(url)) + self.assertIsNone(get_file_by_url(url, delay=0)) expected = "Problem getting file:" output = sys.stdout.getvalue() @@ -1644,7 +1644,7 @@ class GetFileByUrl(BaseStdout): @mock.patch("updateHostsFile.urlopen", side_effect=mock_url_open_decode_fail) def test_read_url_decode_fail(self, _): url = b"www.google.com" - self.assertIsNone(get_file_by_url(url)) + self.assertIsNone(get_file_by_url(url, delay=0)) expected = "Problem getting file:" output = sys.stdout.getvalue() diff --git a/updateHostsFile.py b/updateHostsFile.py index a73da4f25..130cf8aa6 100644 --- a/updateHostsFile.py +++ b/updateHostsFile.py @@ -1461,7 +1461,7 @@ def maybe_copy_example_file(file_path): shutil.copyfile(example_file_path, file_path) -def get_file_by_url(url): +def get_file_by_url(url, retries=3, delay=10): """ Get a file data located at a particular URL. @@ -1482,12 +1482,17 @@ def get_file_by_url(url): format we have to encode or decode data before parsing it to UTF-8. """ - try: - with urlopen(url) as f: - soup = BeautifulSoup(f.read(), "lxml").get_text() - return "\n".join(list(map(domain_to_idna, soup.split("\n")))) - except Exception: - print("Problem getting file: ", url) + while retries: + try: + with urlopen(url) as f: + soup = BeautifulSoup(f.read(), "lxml").get_text() + return "\n".join(list(map(domain_to_idna, soup.split("\n")))) + except Exception as e: + if 'failure in name resolution' in str(e): + print('No internet connection! Retrying in {} seconds'.format(delay)) + time.sleep(delay) + retries -= 1 + return print("Problem getting file: ", url) def write_data(f, data):