1. import time
  2. import requests
  3. from urllib.parse import urlparse
  4. class SessionCookieThrottler:
  5. def __init__(self, max_requests, interval):
  6. self.max_requests = max_requests # Maximum number of requests allowed within the interval
  7. self.interval = interval # Time interval in seconds
  8. self.request_count = 0
  9. self.last_reset = time.time()
  10. self.session = requests.Session() # Use a session for cookie management
  11. def __call__(self, func):
  12. def wrapper(*args, **kwargs):
  13. def inner(*args2, **kwargs2):
  14. if self.request_count >= self.max_requests:
  15. # Calculate time until the interval resets
  16. time_since_last_reset = time.time() - self.last_reset
  17. wait_time = self.interval - time_since_last_reset
  18. if wait_time > 0:
  19. time.sleep(wait_time)
  20. self.request_count = 0 # Reset count after waiting.
  21. try:
  22. response = func(*args2, **kwargs2) # Execute the original function
  23. self.request_count += 1
  24. self.last_reset = time.time()
  25. return response
  26. except Exception as e:
  27. print(f"Error during request: {e}")
  28. return None
  29. return inner
  30. return wrapper
  31. if __name__ == '__main__':
  32. # Example Usage
  33. @SessionCookieThrottler(max_requests=2, interval=5) # Allow 2 requests every 5 seconds
  34. def make_request(url):
  35. print(f"Making request to: {url}")
  36. response = requests.get(url)
  37. print(f"Response from: {url} - Status Code: {response.status_code}")
  38. return response
  39. # Example URLs
  40. urls = [
  41. "https://www.example.com",
  42. "https://www.google.com",
  43. "https://www.wikipedia.org",
  44. "https://www.python.org"
  45. ]
  46. for url in urls:
  47. make_request(url)
  48. print("Finished.")

Add your comment