185 lines
No EOL
6.8 KiB
Python
185 lines
No EOL
6.8 KiB
Python
import timeit
|
|
import random
|
|
import string
|
|
import sqlite3
|
|
import threading
|
|
import time
|
|
from memory_profiler import memory_usage
|
|
from security.audit import SecureAudit
|
|
from security.rbac_engine import RBACEngine # Mock for testing
|
|
|
|
class MockRBAC:
|
|
def _audit_access_attempt(self, *args, **kwargs):
|
|
pass
|
|
|
|
def generate_random_string(length=32):
|
|
return ''.join(random.choices(string.ascii_letters + string.digits, k=length))
|
|
|
|
class SecureAuditQueueBenchmarks:
|
|
def __init__(self):
|
|
self.rbac = MockRBAC()
|
|
self.audit = SecureAudit(self.rbac, "queue_benchmark.db", "queue_benchmark.key")
|
|
|
|
def benchmark_queue_access(self, num_requests=1000):
|
|
"""Test queue_access performance against 800ms response time guardian"""
|
|
def single_request():
|
|
key = generate_random_string()
|
|
self.audit.queue_access(
|
|
operation="access",
|
|
key=key,
|
|
user="benchmark_user",
|
|
priority=random.randint(1, 3)
|
|
)
|
|
|
|
# Measure single request time
|
|
single_time = timeit.timeit(single_request, number=1)
|
|
print(f"Single queue_access request: {single_time*1000:.2f}ms")
|
|
|
|
# Measure batch performance
|
|
start = time.time()
|
|
for i in range(num_requests):
|
|
self.audit.queue_access(
|
|
operation="access",
|
|
key=f"resource_{i}",
|
|
user=f"user_{i%10}",
|
|
priority=random.randint(1, 3)
|
|
)
|
|
elapsed = time.time() - start
|
|
avg_time = elapsed / num_requests * 1000
|
|
print(f"Average queue_access time ({num_requests} requests): {avg_time:.2f}ms")
|
|
|
|
# Verify against architectural guardian
|
|
if avg_time > 800:
|
|
print("WARNING: Exceeds 800ms response time guardian")
|
|
return avg_time
|
|
|
|
def benchmark_memory_usage(self):
|
|
"""Test memory usage against 512MB footprint guardian"""
|
|
def operation_wrapper():
|
|
for i in range(1000):
|
|
self.audit.queue_access(
|
|
operation="memory_test",
|
|
key=f"mem_test_{i}",
|
|
user="memory_user",
|
|
priority=1
|
|
)
|
|
|
|
mem_usage = memory_usage((operation_wrapper,), max_usage=True)
|
|
print(f"Peak memory usage: {mem_usage:.2f} MB")
|
|
|
|
# Verify against architectural guardian
|
|
if mem_usage > 512:
|
|
print("WARNING: Exceeds 512MB memory footprint guardian")
|
|
return mem_usage
|
|
|
|
def run_queue_benchmarks(self):
|
|
print("\n=== SecureAudit Queue Benchmarks ===")
|
|
print("1. Queue Access Performance")
|
|
self.benchmark_queue_access(1000)
|
|
self.benchmark_queue_access(5000)
|
|
|
|
print("\n2. Memory Usage")
|
|
self.benchmark_memory_usage()
|
|
|
|
class AuditBenchmarks:
|
|
def __init__(self):
|
|
self.rbac = MockRBAC()
|
|
self.audit = SecureAudit(self.rbac, "benchmark.db", "benchmark.key")
|
|
|
|
def benchmark_insert(self, num_entries=1000):
|
|
def insert_operations():
|
|
for i in range(num_entries):
|
|
self.audit.log_operation(
|
|
operation="read",
|
|
key=generate_random_string(),
|
|
success=True,
|
|
user=f"user_{i%10}",
|
|
reason="benchmark test"
|
|
)
|
|
|
|
time = timeit.timeit(insert_operations, number=1)
|
|
print(f"Insert {num_entries} entries: {time:.4f}s ({num_entries/time:.2f} ops/s)")
|
|
return time
|
|
|
|
def benchmark_concurrent_insert(self, num_threads=4, ops_per_thread=250):
|
|
def worker():
|
|
for _ in range(ops_per_thread):
|
|
self.audit.log_operation(
|
|
operation="write",
|
|
key=generate_random_string(),
|
|
success=random.choice([True, False]),
|
|
user=f"user_{random.randint(1, 10)}",
|
|
reason="concurrent test"
|
|
)
|
|
|
|
threads = []
|
|
start = timeit.default_timer()
|
|
for _ in range(num_threads):
|
|
t = threading.Thread(target=worker)
|
|
threads.append(t)
|
|
t.start()
|
|
|
|
for t in threads:
|
|
t.join()
|
|
|
|
elapsed = timeit.default_timer() - start
|
|
total_ops = num_threads * ops_per_thread
|
|
print(f"Concurrent insert ({num_threads} threads, {total_ops} ops): {elapsed:.4f}s ({total_ops/elapsed:.2f} ops/s)")
|
|
return elapsed
|
|
|
|
def benchmark_verify_integrity(self):
|
|
# First ensure we have data
|
|
if not self.audit.verify_log_integrity():
|
|
raise ValueError("Database integrity check failed")
|
|
|
|
time = timeit.timeit(self.audit.verify_log_integrity, number=10)
|
|
print(f"Verify integrity (10 runs): {time:.4f}s avg")
|
|
return time / 10
|
|
|
|
def benchmark_query_performance(self):
|
|
with sqlite3.connect("benchmark.db") as conn:
|
|
# Test timestamp range query
|
|
start = timeit.default_timer()
|
|
conn.execute("SELECT COUNT(*) FROM audit_logs WHERE timestamp > datetime('now', '-1 hour')")
|
|
elapsed = timeit.default_timer() - start
|
|
print(f"Timestamp range query: {elapsed:.6f}s")
|
|
|
|
# Test user filter query
|
|
start = timeit.default_timer()
|
|
conn.execute("SELECT COUNT(*) FROM audit_logs WHERE user = 'user_1'")
|
|
elapsed = timeit.default_timer() - start
|
|
print(f"User filter query: {elapsed:.6f}s")
|
|
|
|
# Test operation type query
|
|
start = timeit.default_timer()
|
|
conn.execute("SELECT COUNT(*) FROM audit_logs WHERE operation = 'read'")
|
|
elapsed = timeit.default_timer() - start
|
|
print(f"Operation filter query: {elapsed:.6f}s")
|
|
|
|
def run_all_benchmarks(self):
|
|
print("=== Audit System Benchmarks ===")
|
|
print("1. Single-threaded insertion")
|
|
self.benchmark_insert(1000)
|
|
self.benchmark_insert(5000)
|
|
self.benchmark_insert(10000)
|
|
|
|
print("\n2. Concurrent insertion")
|
|
self.benchmark_concurrent_insert(4, 250)
|
|
self.benchmark_concurrent_insert(8, 250)
|
|
|
|
print("\n3. Integrity verification")
|
|
self.benchmark_verify_integrity()
|
|
|
|
print("\n4. Query performance")
|
|
self.benchmark_query_performance()
|
|
|
|
if __name__ == "__main__":
|
|
print("Running performance benchmarks...")
|
|
|
|
# Run standard audit benchmarks
|
|
audit_bench = AuditBenchmarks()
|
|
audit_bench.run_all_benchmarks()
|
|
|
|
# Run queue-specific benchmarks
|
|
queue_bench = SecureAuditQueueBenchmarks()
|
|
queue_bench.run_queue_benchmarks() |