1. import time
  2. import datetime
  3. import random
  4. import json
  5. import pickle
  6. import os
  7. def measure_metadata_performance(num_iterations=100):
  8. """
  9. Measures the performance of different metadata storage/retrieval methods.
  10. """
  11. data = []
  12. for _ in range(num_iterations):
  13. # Generate sample data
  14. data.append({"id": random.randint(1, 1000), "value": random.random(), "timestamp": datetime.datetime.now()})
  15. # 1. JSON serialization/deserialization
  16. start_time = time.time()
  17. json_data = json.dumps(data)
  18. json_data = json.loads(json_data)
  19. json_time = time.time() - start_time
  20. print(f"JSON Time: {json_time:.4f} seconds")
  21. # 2. Pickle serialization/deserialization
  22. start_time = time.time()
  23. with open("temp.pkl", "wb") as f: # Use a temporary file
  24. pickle.dump(data, f)
  25. with open("temp.pkl", "rb") as f:
  26. pickled_data = pickle.load(f)
  27. os.remove("temp.pkl") # Clean up
  28. pickle_time = time.time() - start_time
  29. print(f"Pickle Time: {pickle_time:.4f} seconds")
  30. # 3. Simple dictionary storage/retrieval (for comparison)
  31. start_time = time.time()
  32. # Simulate a dictionary-like structure
  33. dict_data = {i: d for i, d in enumerate(data)} # Use enumerate for key
  34. retrieved_data = [dict_data[i] for i in range(len(data))]
  35. dict_time = time.time() - start_time
  36. print(f"Dictionary Time: {dict_time:.4f} seconds")
  37. if __name__ == "__main__":
  38. measure_metadata_performance()

Add your comment