378 lines
No EOL
14 KiB
Python
378 lines
No EOL
14 KiB
Python
import pytest
|
|
from unittest.mock import patch, MagicMock, call
|
|
from datetime import datetime, timedelta
|
|
import time
|
|
import pytz
|
|
|
|
from orchestrator.scheduler import Scheduler
|
|
from security.encrypt import AES256Encryptor
|
|
|
|
class TestSchedulerDSTHandling:
|
|
@pytest.fixture
|
|
def scheduler(self):
|
|
return Scheduler()
|
|
|
|
@patch('orchestrator.scheduler.time')
|
|
@patch('orchestrator.scheduler.datetime')
|
|
def test_dst_spring_forward(self, mock_datetime, mock_time):
|
|
"""Test scheduler handles spring forward DST transition"""
|
|
# Setup mock time progression through DST transition
|
|
mock_time.monotonic.side_effect = [0, 30, 60, 90]
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 3, 9, 1, 59, tzinfo=pytz.UTC),
|
|
datetime(2025, 3, 9, 3, 0, tzinfo=pytz.UTC), # DST jumps forward
|
|
datetime(2025, 3, 9, 3, 1, tzinfo=pytz.UTC),
|
|
datetime(2025, 3, 9, 3, 2, tzinfo=pytz.UTC)
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify scheduler adjusted correctly
|
|
assert scheduler.last_sync is not None
|
|
assert scheduler.time_offset == 0 # Should maintain sync through transition
|
|
|
|
@patch('orchestrator.scheduler.time')
|
|
@patch('orchestrator.scheduler.datetime')
|
|
def test_dst_fall_back(self, mock_datetime, mock_time):
|
|
"""Test scheduler handles fall back DST transition"""
|
|
# Setup mock time progression through DST transition
|
|
mock_time.monotonic.side_effect = [0, 30, 60, 90]
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 11, 2, 1, 59, tzinfo=pytz.UTC),
|
|
datetime(2025, 11, 2, 1, 0, tzinfo=pytz.UTC), # DST falls back
|
|
datetime(2025, 11, 2, 1, 1, tzinfo=pytz.UTC),
|
|
datetime(2025, 11, 2, 1, 2, tzinfo=pytz.UTC)
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify scheduler adjusted correctly
|
|
assert scheduler.last_sync is not None
|
|
assert abs(scheduler.time_offset) < 0.3 # Should maintain sync within threshold
|
|
|
|
class TestSchedulerEncryption:
|
|
@patch('orchestrator.scheduler.AES256Encryptor')
|
|
def test_timing_data_encryption(self, mock_encryptor):
|
|
"""Verify all sensitive timing data is encrypted"""
|
|
mock_enc = MagicMock(spec=AES256Encryptor)
|
|
mock_encryptor.return_value = mock_enc
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify encryption was called for sensitive data
|
|
assert mock_enc.encrypt.call_count >= 1
|
|
calls = mock_enc.encrypt.call_args_list
|
|
assert any(b'time_offset' in call[0][0] for call in calls)
|
|
assert any(b'last_sync' in call[0][0] for call in calls)
|
|
|
|
def test_task_data_encryption(self, mock_encryptor):
|
|
"""Verify task callback data is encrypted"""
|
|
mock_enc = MagicMock(spec=AES256Encryptor)
|
|
mock_encryptor.return_value = mock_enc
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.schedule_task("* * * * *", "sensitive_callback_data")
|
|
|
|
# Verify encryption was called for task data
|
|
assert mock_enc.encrypt.call_count >= 1
|
|
calls = mock_enc.encrypt.call_args_list
|
|
assert any(b'sensitive_callback_data' in call[0][0] for call in calls)
|
|
|
|
class TestSchedulerTimingAccuracy:
|
|
@patch('orchestrator.scheduler.time')
|
|
@patch('orchestrator.scheduler.datetime')
|
|
def test_time_sync_accuracy(self, mock_datetime, mock_time):
|
|
"""Verify scheduler maintains ±1s accuracy"""
|
|
# Setup mock time with slight drift
|
|
mock_time.monotonic.side_effect = [0, 30.5, 61.2, 91.8] # Simulate drift
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 5, 3, 12, 0, 0, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 30, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 1, 1, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 1, 31, tzinfo=pytz.UTC)
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify time offset stays within ±1s
|
|
assert abs(scheduler.time_offset) <= 1.0
|
|
|
|
def test_timing_accuracy_under_load(self, mock_datetime, mock_time):
|
|
"""Verify timing accuracy under CPU load conditions"""
|
|
# Setup mock time with varying drift
|
|
mock_time.monotonic.side_effect = [0, 30.8, 61.6, 92.4] # Simulate worse drift
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 5, 3, 12, 0, 0, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 30, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 1, 0, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 1, 30, tzinfo=pytz.UTC)
|
|
]
|
|
|
|
# Simulate CPU load by slowing down time adjustments
|
|
with patch('orchestrator.scheduler.time.sleep', side_effect=lambda x: time.sleep(x*2)):
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify timing still stays within ±1s under load
|
|
assert abs(scheduler.time_offset) <= 1.0
|
|
|
|
|
|
class TestTaskManagement:
|
|
"""Tests for task management functionality"""
|
|
|
|
@pytest.fixture
|
|
def scheduler(self):
|
|
mock_dispatcher = MagicMock()
|
|
return Scheduler(mock_dispatcher)
|
|
|
|
def test_get_task_success(self, scheduler):
|
|
"""Test getting an existing task returns correct data"""
|
|
test_task = {
|
|
'id': 'test1',
|
|
'callback': lambda: None,
|
|
'schedule': '* * * * *',
|
|
'next_run': datetime.now(pytz.UTC),
|
|
'last_run': None,
|
|
'encrypted': False
|
|
}
|
|
|
|
# Add test task
|
|
with scheduler.lock:
|
|
scheduler.tasks['test1'] = test_task
|
|
|
|
# Get and verify task
|
|
result = scheduler.get_task('test1')
|
|
assert result == test_task
|
|
assert result is not test_task # Verify copy was returned
|
|
|
|
def test_get_task_not_found(self, scheduler):
|
|
"""Test getting non-existent task raises KeyError"""
|
|
with pytest.raises(KeyError):
|
|
scheduler.get_task('nonexistent')
|
|
|
|
def test_get_task_thread_safety(self, scheduler):
|
|
"""Test get_task maintains thread safety"""
|
|
test_task = {
|
|
'id': 'test2',
|
|
'callback': lambda: None,
|
|
'schedule': '* * * * *',
|
|
'next_run': datetime.now(pytz.UTC),
|
|
'last_run': None,
|
|
'encrypted': False
|
|
}
|
|
|
|
# Add test task
|
|
with scheduler.lock:
|
|
scheduler.tasks['test2'] = test_task
|
|
|
|
# Verify lock is acquired during get_task
|
|
with patch.object(scheduler.lock, 'acquire') as mock_acquire:
|
|
scheduler.get_task('test2')
|
|
mock_acquire.assert_called_once()
|
|
|
|
# Verify time offset stays within ±1s
|
|
assert abs(scheduler.time_offset) <= 1.0
|
|
|
|
def test_get_task_execution_tracking(self, scheduler):
|
|
"""Test get_task tracks execution status"""
|
|
test_task = {
|
|
'id': 'test3',
|
|
'callback': lambda: None,
|
|
'schedule': '* * * * *',
|
|
'next_run': datetime.now(pytz.UTC),
|
|
'last_run': None,
|
|
'encrypted': False,
|
|
'is_test': True
|
|
}
|
|
|
|
# Add test task
|
|
with scheduler.lock:
|
|
scheduler.tasks['test3'] = test_task
|
|
|
|
# Get task before execution
|
|
task = scheduler.get_task('test3')
|
|
assert not task.get('executed', False)
|
|
|
|
# Simulate execution
|
|
with scheduler.lock:
|
|
scheduler.tasks['test3']['executed'] = True
|
|
|
|
# Verify execution status is tracked
|
|
task = scheduler.get_task('test3')
|
|
assert task['executed']
|
|
|
|
class TestSchedulerExtendedTiming:
|
|
"""Additional tests for timing accuracy improvements"""
|
|
|
|
@patch('orchestrator.scheduler.time')
|
|
@patch('orchestrator.scheduler.datetime')
|
|
def test_tight_timing_parameters(self, mock_datetime, mock_time):
|
|
"""Verify new tighter timing parameters maintain ±1s accuracy"""
|
|
# Setup mock time with tighter drift
|
|
mock_time.monotonic.side_effect = [0, 5, 10, 15] # 5s intervals
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 5, 3, 12, 0, 0, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 5, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 10, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 15, tzinfo=pytz.UTC)
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify tighter parameters maintain accuracy
|
|
assert abs(scheduler.time_offset) <= 0.01 # 10ms threshold
|
|
|
|
@patch('orchestrator.scheduler.ntplib.NTPClient')
|
|
def test_ntp_server_failover(self, mock_ntp):
|
|
"""Verify scheduler handles NTP server failures"""
|
|
# Setup mock NTP client to fail first 2 servers
|
|
mock_client = MagicMock()
|
|
mock_ntp.return_value = mock_client
|
|
mock_client.request.side_effect = [
|
|
Exception("Server 1 down"),
|
|
Exception("Server 2 down"),
|
|
MagicMock(offset=0.01) # Third server succeeds
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.sync_with_ntp()
|
|
|
|
# Verify it tried multiple servers
|
|
assert mock_client.request.call_count == 3
|
|
assert abs(scheduler.time_offset - 0.01) < 0.001
|
|
|
|
class TestSchedulerStressConditions:
|
|
"""Tests for extreme operating conditions"""
|
|
|
|
@patch('orchestrator.scheduler.time')
|
|
@patch('orchestrator.scheduler.datetime')
|
|
def test_extreme_time_drift(self, mock_datetime, mock_time):
|
|
"""Verify scheduler recovers from extreme time drift"""
|
|
# Setup mock time with 10s initial drift
|
|
mock_time.monotonic.side_effect = [0, 30, 60, 90]
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 5, 3, 12, 0, 0, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 40, tzinfo=pytz.UTC), # +10s drift
|
|
datetime(2025, 5, 3, 12, 1, 20, tzinfo=pytz.UTC), # +20s drift
|
|
datetime(2025, 5, 3, 12, 1, 30, tzinfo=pytz.UTC) # Corrected
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify extreme drift was corrected
|
|
assert abs(scheduler.time_offset) <= 1.0
|
|
|
|
@patch('orchestrator.scheduler.Thread')
|
|
@patch('orchestrator.scheduler.Lock')
|
|
def test_high_thread_contention(self, mock_lock, mock_thread):
|
|
"""Verify scheduler handles high thread contention"""
|
|
# Setup mock lock with contention
|
|
lock = MagicMock()
|
|
lock.acquire.side_effect = [False] * 5 + [True] # Fail 5 times then succeed
|
|
mock_lock.return_value = lock
|
|
|
|
# Simulate many concurrent threads
|
|
scheduler = Scheduler()
|
|
threads = []
|
|
for i in range(20):
|
|
t = MagicMock()
|
|
t.is_alive.return_value = False
|
|
threads.append(t)
|
|
mock_thread.side_effect = threads
|
|
|
|
scheduler.run_pending()
|
|
|
|
# Verify lock contention was handled
|
|
assert lock.acquire.call_count == 6 # 5 failures + 1 success
|
|
assert lock.release.call_count == 1
|
|
assert not lock.locked()
|
|
|
|
class TestSchedulerDeadlockPrevention:
|
|
@patch('orchestrator.scheduler.Thread')
|
|
@patch('orchestrator.scheduler.Lock')
|
|
def test_no_deadlock_on_concurrent_access(self, mock_lock, mock_thread):
|
|
"""Verify scheduler handles concurrent access without deadlocks"""
|
|
# Setup mock lock to track acquisition/release
|
|
lock = MagicMock()
|
|
mock_lock.return_value = lock
|
|
|
|
# Simulate concurrent threads
|
|
scheduler = Scheduler()
|
|
threads = []
|
|
for i in range(5):
|
|
t = MagicMock()
|
|
t.is_alive.return_value = False # Mark as completed
|
|
threads.append(t)
|
|
mock_thread.side_effect = threads
|
|
|
|
scheduler.run_pending()
|
|
|
|
# Verify proper lock usage
|
|
assert lock.acquire.call_count == len(threads)
|
|
assert lock.release.call_count == len(threads)
|
|
assert not lock.locked() # Lock should be released
|
|
|
|
@patch('orchestrator.scheduler.Thread')
|
|
@patch('orchestrator.scheduler.Lock')
|
|
def test_timeout_on_lock_acquisition(self, mock_lock, mock_thread):
|
|
"""Verify scheduler handles lock timeout gracefully"""
|
|
lock = MagicMock()
|
|
lock.acquire.side_effect = [False, False, True] # Fail twice then succeed
|
|
mock_lock.return_value = lock
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify proper retry behavior
|
|
assert lock.acquire.call_count == 3
|
|
assert lock.release.call_count == 1
|
|
|
|
class TestSchedulerErrorConditions:
|
|
def test_invalid_cron_expression(self):
|
|
"""Verify scheduler handles invalid cron expressions"""
|
|
scheduler = Scheduler()
|
|
with pytest.raises(ValueError):
|
|
scheduler.schedule_task("invalid_cron", lambda: None)
|
|
|
|
@patch('orchestrator.scheduler.encrypt_data')
|
|
def test_encryption_failure(self, mock_encrypt):
|
|
"""Verify scheduler handles encryption failures"""
|
|
mock_encrypt.side_effect = Exception("Encryption failed")
|
|
scheduler = Scheduler()
|
|
|
|
with pytest.raises(Exception, match="Failed to encrypt task"):
|
|
scheduler.schedule_task("* * * * *", lambda: None)
|
|
|
|
@patch('orchestrator.scheduler.Thread')
|
|
def test_thread_start_failure(self, mock_thread):
|
|
"""Verify scheduler handles thread start failures"""
|
|
mock_thread.return_value.start.side_effect = Exception("Thread failed")
|
|
scheduler = Scheduler()
|
|
|
|
with pytest.raises(Exception, match="Failed to start scheduler thread"):
|
|
scheduler.start()
|
|
|
|
@patch('orchestrator.scheduler.time')
|
|
@patch('orchestrator.scheduler.datetime')
|
|
def test_time_drift_correction(self, mock_datetime, mock_time):
|
|
"""Verify scheduler corrects time drift"""
|
|
# Setup mock time with increasing drift
|
|
mock_time.monotonic.side_effect = [0, 30, 60, 90]
|
|
mock_datetime.now.side_effect = [
|
|
datetime(2025, 5, 3, 12, 0, 0, tzinfo=pytz.UTC),
|
|
datetime(2025, 5, 3, 12, 0, 31, tzinfo=pytz.UTC), # +1s drift
|
|
datetime(2025, 5, 3, 12, 1, 2, tzinfo=pytz.UTC), # +2s drift
|
|
datetime(2025, 5, 3, 12, 1, 30, tzinfo=pytz.UTC) # Corrected
|
|
]
|
|
|
|
scheduler = Scheduler()
|
|
scheduler.run_pending()
|
|
|
|
# Verify drift was corrected
|
|
assert abs(scheduler.time_offset) <= 1.0 |