import requests
import time
import logging
# Configure logging
logging.basicConfig(level=logging.ERROR, format='%(asctime)s - %(levelname)s - %(message)s')
def batch_http_requests(url_list):
"""
Processes a list of URLs by making HTTP requests, handling potential failures.
Args:
url_list: A list of URLs to process.
Returns:
A list of results, where each result is a tuple containing the URL and its response.
If a request fails, the result will contain the URL and an error message.
"""
results = []
for url in url_list:
try:
response = requests.get(url, timeout=10) # Add a timeout
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
results.append((url, response.text))
print(f"Successfully processed {url}") # Indicate success
except requests.exceptions.RequestException as e:
logging.error(f"Error processing {url}: {e}") # Log the error
results.append((url, str(e)))
print(f"Failed to process {url}: {e}") # Indicate failure
time.sleep(0.5) # Add a delay between requests
return results
if __name__ == '__main__':
# Example usage
urls = [
"https://www.example.com",
"https://httpstat.us/200",
"https://httpstat.us/404",
"https://httpstat.us/500",
"https://invalid-url.example" # Simulate a connection error
]
results = batch_http_requests(urls)
for url, result in results:
print(f"URL: {url}, Result: {result[:50]}...") # Print the result
Add your comment