import time
import datetime
import random
import json
import pickle
import os
def measure_metadata_performance(num_iterations=100):
"""
Measures the performance of different metadata storage/retrieval methods.
"""
data = []
for _ in range(num_iterations):
# Generate sample data
data.append({"id": random.randint(1, 1000), "value": random.random(), "timestamp": datetime.datetime.now()})
# 1. JSON serialization/deserialization
start_time = time.time()
json_data = json.dumps(data)
json_data = json.loads(json_data)
json_time = time.time() - start_time
print(f"JSON Time: {json_time:.4f} seconds")
# 2. Pickle serialization/deserialization
start_time = time.time()
with open("temp.pkl", "wb") as f: # Use a temporary file
pickle.dump(data, f)
with open("temp.pkl", "rb") as f:
pickled_data = pickle.load(f)
os.remove("temp.pkl") # Clean up
pickle_time = time.time() - start_time
print(f"Pickle Time: {pickle_time:.4f} seconds")
# 3. Simple dictionary storage/retrieval (for comparison)
start_time = time.time()
# Simulate a dictionary-like structure
dict_data = {i: d for i, d in enumerate(data)} # Use enumerate for key
retrieved_data = [dict_data[i] for i in range(len(data))]
dict_time = time.time() - start_time
print(f"Dictionary Time: {dict_time:.4f} seconds")
if __name__ == "__main__":
measure_metadata_performance()
Add your comment