import time
import requests
from urllib.parse import urlparse
class SessionCookieThrottler:
def __init__(self, max_requests, interval):
self.max_requests = max_requests # Maximum number of requests allowed within the interval
self.interval = interval # Time interval in seconds
self.request_count = 0
self.last_reset = time.time()
self.session = requests.Session() # Use a session for cookie management
def __call__(self, func):
def wrapper(*args, **kwargs):
def inner(*args2, **kwargs2):
if self.request_count >= self.max_requests:
# Calculate time until the interval resets
time_since_last_reset = time.time() - self.last_reset
wait_time = self.interval - time_since_last_reset
if wait_time > 0:
time.sleep(wait_time)
self.request_count = 0 # Reset count after waiting.
try:
response = func(*args2, **kwargs2) # Execute the original function
self.request_count += 1
self.last_reset = time.time()
return response
except Exception as e:
print(f"Error during request: {e}")
return None
return inner
return wrapper
if __name__ == '__main__':
# Example Usage
@SessionCookieThrottler(max_requests=2, interval=5) # Allow 2 requests every 5 seconds
def make_request(url):
print(f"Making request to: {url}")
response = requests.get(url)
print(f"Response from: {url} - Status Code: {response.status_code}")
return response
# Example URLs
urls = [
"https://www.example.com",
"https://www.google.com",
"https://www.wikipedia.org",
"https://www.python.org"
]
for url in urls:
make_request(url)
print("Finished.")
Add your comment