import json
from urllib.parse import urlparse
def aggregate_session_cookies(cookie_strings):
"""
Aggregates session cookies from a list of cookie strings and performs basic sanity checks.
"""
aggregated_cookies = {}
invalid_cookies = []
for cookie_string in cookie_strings:
try:
cookie = dict(item.split(';') for item in cookie_string.split(',')) # Parse cookie string
# Remove whitespace around key-value pairs
cookie = {k.strip(): v.strip() for k, v in cookie.items()}
aggregated_cookies.update(cookie) # Add to the aggregated dictionary
except Exception as e:
invalid_cookies.append(f"Invalid cookie: {cookie_string} - {str(e)}") # Log invalid cookies
# Sanity checks
if not aggregated_cookies:
print("No valid cookies found.")
return None
# Check for empty cookie values
empty_value_count = sum(1 for value in aggregated_cookies.values() if not value)
if empty_value_count > 0:
print(f"Warning: {empty_value_count} cookies have empty values.")
# Check for cookie name length
long_name_count = sum(1 for name in aggregated_cookies if len(name) > 64)
if long_name_count > 0:
print(f"Warning: {long_name_count} cookie names are longer than 64 characters.")
return aggregated_cookies
if __name__ == '__main__':
# Example usage:
cookie_strings = [
"sessionid=12345; username=testuser; "
"other_param=value; expired=true",
"sessionid=67890; "
"another_param=another_value",
"invalid_cookie" # Example of an invalid cookie string
]
aggregated_cookies = aggregate_session_cookies(cookie_strings)
if aggregated_cookies:
print(json.dumps(aggregated_cookies, indent=2))
Add your comment