import requests
from urllib.parse import urlparse
def fetch_data_with_cookie_fallback(url, cookie_manager=None, max_retries=3):
"""
Fetches data from a URL, attempting to use cookies if available.
Falls back to a plain request if cookies are not present or if the request fails.
Args:
url (str): The URL to fetch data from.
cookie_manager (object, optional): An object that manages cookies. Defaults to None.
max_retries (int, optional): Maximum number of retry attempts. Defaults to 3.
Returns:
requests.Response or None: The response object if successful, None otherwise.
"""
for attempt in range(max_retries):
try:
# Try to use cookies
if cookie_manager:
response = requests.get(url, cookies=cookie_manager.get_cookies())
else:
response = requests.get(url) # No cookies provided
response.raise_for_status() # Raise HTTPError for bad responses (4xx or 5xx)
return response
except requests.exceptions.RequestException as e:
print(f"Attempt {attempt + 1} failed: {e}")
if attempt < max_retries - 1:
print("Retrying...")
continue
else:
print("Failed after multiple retries.")
return None
except Exception as e:
print(f"An unexpected error occurred: {e}")
return None
if __name__ == '__main__':
#Example usage
url = "https://httpbin.org/cookies/set?session_id=12345" #URL with cookies
#url = "https://httpbin.org/get" #URL without cookies
# Simulate a cookie manager
class SimpleCookieManager:
def __init__(self):
self.cookies = {}
def get_cookies(self):
return self.cookies
def set_cookie(self, name, value):
self.cookies[name] = value
cookie_manager = SimpleCookieManager()
cookie_manager.set_cookie("session_id", "12345")
response = fetch_data_with_cookie_fallback(url, cookie_manager)
if response:
print("Successfully fetched data:")
print(response.text)
else:
print("Failed to fetch data.")
Add your comment