Add Ikario Architecture v2 - Phases 1-8 complete

Implements the processual architecture based on Whitehead's Process
Philosophy and Peirce's Semiotics. Core paradigm: "L'espace latent
pense. Le LLM traduit." (The latent space thinks. The LLM translates.)

Phase 1-4: Core semiotic cycle
- StateTensor 8x1024 (8 Peircean dimensions)
- Dissonance computation with hard negatives
- Fixation via 4 Peircean methods (Tenacity, Authority, A Priori, Science)
- LatentEngine orchestrating the full cycle

Phase 5: StateToLanguage
- LLM as pure translator (zero-reasoning, T=0)
- Projection on interpretable directions
- Reasoning markers detection (Amendment #4)

Phase 6: Vigilance
- x_ref (David) as guard-rail, NOT attractor
- Drift detection per dimension and globally
- Alerts: ok, warning, critical

Phase 7: Autonomous Daemon
- Two modes: CONVERSATION (always verbalize), AUTONOMOUS (~1000 cycles/day)
- Amendment #5: 50% probability on unresolved impacts
- TriggerGenerator with weighted random selection

Phase 8: Integration & Metrics
- ProcessMetrics for daily/weekly reports
- Health status monitoring
- Integration tests validating all modules

297 tests passing, version 0.7.0

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-02-01 21:18:40 +01:00
parent 9c2145bcf2
commit f6fe71e2f7
19 changed files with 9887 additions and 9 deletions

View File

@@ -0,0 +1,842 @@
#!/usr/bin/env python3
"""
Tests pour le module daemon - Phase 7.
Le daemon d'individuation autonome :
1. Mode CONVERSATION : toujours verbalise
2. Mode AUTONOME : pensee silencieuse (~1000 cycles/jour)
3. Amendment #5 : Rumination sur impacts non resolus
Executer: pytest ikario_processual/tests/test_daemon.py -v
"""
import asyncio
import numpy as np
import pytest
from datetime import datetime, timedelta
from unittest.mock import MagicMock, AsyncMock, patch
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.dissonance import DissonanceResult
from ikario_processual.fixation import FixationResult
from ikario_processual.latent_engine import CycleResult, LatentEngine
from ikario_processual.vigilance import VigilanceSystem, VigilanceAlert
from ikario_processual.state_to_language import StateToLanguage, TranslationResult
from ikario_processual.daemon import (
TriggerType,
DaemonMode,
DaemonConfig,
DaemonStats,
Trigger,
VerbalizationEvent,
TriggerGenerator,
IkarioDaemon,
create_daemon,
)
def create_random_tensor(state_id: int = 0, seed: int = None) -> StateTensor:
"""Cree un tenseur avec des vecteurs aleatoires normalises."""
if seed is not None:
np.random.seed(seed)
tensor = StateTensor(
state_id=state_id,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
def create_mock_cycle_result(
state_id: int = 1,
should_verbalize: bool = False,
verbalization_reason: str = "silent_processing",
dissonance_total: float = 0.3,
) -> CycleResult:
"""Cree un CycleResult mock."""
tensor = create_random_tensor(state_id=state_id)
dissonance = DissonanceResult(
total=dissonance_total,
base_dissonance=dissonance_total * 0.8,
contradiction_score=0.0,
novelty_penalty=0.0,
is_choc=dissonance_total > 0.3,
dissonances_by_dimension={},
hard_negatives=[],
max_similarity_to_corpus=0.5,
rag_results_count=5,
)
fixation = FixationResult(
delta=np.zeros(EMBEDDING_DIM),
magnitude=0.0005,
was_clamped=False,
contributions={'tenacity': 0, 'authority': 0, 'apriori': 0, 'science': 0.0005},
)
return CycleResult(
new_state=tensor,
previous_state_id=state_id - 1,
dissonance=dissonance,
fixation=fixation,
impacts=[],
thoughts=[],
should_verbalize=should_verbalize,
verbalization_reason=verbalization_reason,
processing_time_ms=50,
cycle_number=state_id,
)
class TestTriggerType:
"""Tests pour TriggerType enum."""
def test_all_types_exist(self):
"""Tous les types existent."""
assert TriggerType.USER.value == "user"
assert TriggerType.VEILLE.value == "veille"
assert TriggerType.CORPUS.value == "corpus"
assert TriggerType.RUMINATION.value == "rumination"
assert TriggerType.RUMINATION_FREE.value == "rumination_free"
assert TriggerType.TIMER.value == "timer"
assert TriggerType.EMPTY.value == "empty"
class TestDaemonMode:
"""Tests pour DaemonMode enum."""
def test_all_modes_exist(self):
"""Tous les modes existent."""
assert DaemonMode.CONVERSATION.value == "conversation"
assert DaemonMode.AUTONOMOUS.value == "autonomous"
assert DaemonMode.PAUSED.value == "paused"
class TestDaemonConfig:
"""Tests pour DaemonConfig."""
def test_default_config(self):
"""Configuration par defaut."""
config = DaemonConfig()
assert config.cycle_interval_seconds == 90.0
assert config.prob_unresolved_impact == 0.50
assert config.prob_corpus == 0.30
assert config.prob_rumination_free == 0.20
def test_probabilities_sum_to_one(self):
"""Les probabilites somment a 1."""
config = DaemonConfig()
total = config.prob_unresolved_impact + config.prob_corpus + config.prob_rumination_free
assert np.isclose(total, 1.0)
def test_validate_default(self):
"""La config par defaut est valide."""
config = DaemonConfig()
assert config.validate() == True
def test_validate_invalid_probabilities(self):
"""Config invalide si probabilites != 1."""
config = DaemonConfig(
prob_unresolved_impact=0.5,
prob_corpus=0.5,
prob_rumination_free=0.5, # Total = 1.5
)
assert config.validate() == False
class TestDaemonStats:
"""Tests pour DaemonStats."""
def test_initial_stats(self):
"""Stats initiales a zero."""
stats = DaemonStats()
assert stats.total_cycles == 0
assert stats.conversation_cycles == 0
assert stats.autonomous_cycles == 0
assert stats.verbalizations == 0
def test_to_dict(self):
"""to_dict() fonctionne."""
stats = DaemonStats()
stats.total_cycles = 10
stats.verbalizations = 3
d = stats.to_dict()
assert d['total_cycles'] == 10
assert d['verbalizations'] == 3
assert 'uptime_seconds' in d
class TestTrigger:
"""Tests pour Trigger."""
def test_create_trigger(self):
"""Creer un trigger."""
trigger = Trigger(
type=TriggerType.USER,
content="Hello Ikario",
source="user",
priority=2,
)
assert trigger.type == TriggerType.USER
assert trigger.content == "Hello Ikario"
assert trigger.priority == 2
def test_to_dict(self):
"""to_dict() convertit correctement."""
trigger = Trigger(
type=TriggerType.CORPUS,
content="Whitehead on process",
source="library",
metadata={'author': 'Whitehead'},
)
d = trigger.to_dict()
assert d['type'] == 'corpus'
assert d['content'] == "Whitehead on process"
assert d['metadata']['author'] == 'Whitehead'
class TestVerbalizationEvent:
"""Tests pour VerbalizationEvent."""
def test_create_event(self):
"""Creer un evenement."""
event = VerbalizationEvent(
text="Je suis curieux.",
reason="conversation_mode",
trigger_type="user",
state_id=5,
dissonance=0.4,
)
assert event.text == "Je suis curieux."
assert event.reason == "conversation_mode"
def test_to_dict(self):
"""to_dict() fonctionne."""
event = VerbalizationEvent(
text="Test",
reason="test",
trigger_type="user",
state_id=1,
dissonance=0.5,
)
d = event.to_dict()
assert 'text' in d
assert 'reason' in d
assert 'timestamp' in d
class TestTriggerGenerator:
"""Tests pour TriggerGenerator."""
def test_create_generator(self):
"""Creer un generateur."""
config = DaemonConfig()
generator = TriggerGenerator(config)
assert generator.config is config
assert generator.weaviate is None
def test_create_user_trigger(self):
"""Creer un trigger utilisateur."""
config = DaemonConfig()
generator = TriggerGenerator(config)
trigger = generator.create_user_trigger("Bonjour")
assert trigger.type == TriggerType.USER
assert trigger.content == "Bonjour"
assert trigger.priority == 2 # Priorite max
def test_create_veille_trigger(self):
"""Creer un trigger de veille."""
config = DaemonConfig()
generator = TriggerGenerator(config)
trigger = generator.create_veille_trigger(
title="Decouverte philosophique",
snippet="Nouvelle interpretation de Whitehead",
url="https://example.com/news",
)
assert trigger.type == TriggerType.VEILLE
assert "Decouverte philosophique" in trigger.content
assert trigger.metadata['url'] == "https://example.com/news"
def test_fallback_trigger_without_weaviate(self):
"""Sans Weaviate, retourne trigger fallback."""
config = DaemonConfig()
generator = TriggerGenerator(config)
async def run_test():
trigger = await generator.generate_autonomous_trigger()
# Sans Weaviate, tous les generateurs font fallback
assert trigger.type in (TriggerType.CORPUS, TriggerType.RUMINATION_FREE, TriggerType.EMPTY)
asyncio.run(run_test())
class TestTriggerGeneratorAmendment5:
"""Tests pour Amendment #5 : Rumination sur impacts non resolus."""
def test_probabilities_prioritize_impacts(self):
"""Les probabilites priorisent les impacts (50%)."""
config = DaemonConfig()
assert config.prob_unresolved_impact > config.prob_corpus
assert config.prob_unresolved_impact > config.prob_rumination_free
assert config.prob_unresolved_impact == 0.50
def test_old_impact_has_high_priority(self):
"""Impact ancien (>7j) a priorite haute."""
config = DaemonConfig()
generator = TriggerGenerator(config)
# Simuler un impact ancien via metadata
trigger = Trigger(
type=TriggerType.RUMINATION,
content="Tension non resolue",
metadata={
'days_unresolved': 10,
'is_old_tension': True,
},
priority=1,
)
assert trigger.priority == 1
assert trigger.metadata['is_old_tension'] is True
class TestIkarioDaemon:
"""Tests pour IkarioDaemon."""
def create_mock_daemon(self) -> IkarioDaemon:
"""Cree un daemon avec mocks."""
# Mock LatentEngine
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result())
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
# Mock VigilanceSystem
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
# Mock StateToLanguage
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Je suis curieux.",
projections={},
output_type="response",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=DaemonConfig(cycle_interval_seconds=0.1), # Rapide pour tests
)
def test_create_daemon(self):
"""Creer un daemon."""
daemon = self.create_mock_daemon()
assert daemon.running is False
assert daemon.mode == DaemonMode.PAUSED
assert daemon.stats.total_cycles == 0
def test_initial_stats(self):
"""Stats initiales."""
daemon = self.create_mock_daemon()
stats = daemon.get_stats()
assert stats['total_cycles'] == 0
assert stats['conversation_cycles'] == 0
assert stats['autonomous_cycles'] == 0
def test_is_running_property(self):
"""Propriete is_running."""
daemon = self.create_mock_daemon()
assert daemon.is_running is False
def test_current_mode_property(self):
"""Propriete current_mode."""
daemon = self.create_mock_daemon()
assert daemon.current_mode == DaemonMode.PAUSED
class TestDaemonStartStop:
"""Tests pour start/stop du daemon."""
def create_mock_daemon(self) -> IkarioDaemon:
"""Cree un daemon avec mocks."""
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result())
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Test",
projections={},
output_type="response",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=DaemonConfig(
cycle_interval_seconds=0.05,
vigilance_interval_seconds=0.1,
),
)
def test_start_stop(self):
"""Demarrer et arreter le daemon."""
daemon = self.create_mock_daemon()
async def run_test():
await daemon.start()
assert daemon.running is True
assert daemon.mode == DaemonMode.AUTONOMOUS
await asyncio.sleep(0.1)
await daemon.stop()
assert daemon.running is False
assert daemon.mode == DaemonMode.PAUSED
asyncio.run(run_test())
def test_run_with_duration(self):
"""Executer le daemon avec duree limitee."""
daemon = self.create_mock_daemon()
async def run_test():
await daemon.run(duration_seconds=0.2)
assert daemon.running is False
asyncio.run(run_test())
class TestConversationMode:
"""Tests pour le mode conversation."""
def create_mock_daemon(self) -> IkarioDaemon:
"""Cree un daemon avec mocks."""
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result(
should_verbalize=True,
verbalization_reason="conversation_mode",
))
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Je suis curieux de cette question.",
projections={'epistemic': {'curiosity': 0.7}},
output_type="response",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
)
def test_conversation_always_verbalizes(self):
"""Mode conversation verbalise toujours."""
daemon = self.create_mock_daemon()
async def run_test():
event = await daemon.send_message("Qu'est-ce que Whitehead?")
assert event.text == "Je suis curieux de cette question."
assert event.reason == "conversation_mode"
assert daemon.stats.conversation_cycles == 1
assert daemon.stats.verbalizations == 1
asyncio.run(run_test())
def test_translator_called_with_context(self):
"""Le traducteur recoit le contexte."""
daemon = self.create_mock_daemon()
async def run_test():
await daemon.send_message("Test message")
# Verifier que translate a ete appele
daemon.translator.translate.assert_called()
# Verifier les arguments
call_kwargs = daemon.translator.translate.call_args.kwargs
assert call_kwargs['output_type'] == 'response'
assert 'Test message' in call_kwargs['context']
asyncio.run(run_test())
class TestAutonomousMode:
"""Tests pour le mode autonome."""
def create_mock_daemon(self, should_verbalize: bool = False) -> IkarioDaemon:
"""Cree un daemon avec mocks."""
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result(
should_verbalize=should_verbalize,
verbalization_reason="high_dissonance_discovery" if should_verbalize else "silent_processing",
dissonance_total=0.7 if should_verbalize else 0.2,
))
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Decouverte interessante.",
projections={},
output_type="autonomous_verbalization",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=DaemonConfig(
cycle_interval_seconds=0.05, # Tres rapide pour tests
vigilance_interval_seconds=1.0,
),
)
def test_autonomous_silent_processing(self):
"""Mode autonome traite silencieusement par defaut."""
daemon = self.create_mock_daemon(should_verbalize=False)
async def run_test():
await daemon.start()
await asyncio.sleep(0.2) # Quelques cycles
await daemon.stop()
# Doit avoir fait des cycles autonomes
assert daemon.stats.autonomous_cycles > 0
# Mais pas de verbalisation
assert daemon.stats.verbalizations == 0
assert daemon.stats.silent_cycles > 0
asyncio.run(run_test())
def test_autonomous_verbalizes_on_discovery(self):
"""Mode autonome verbalise sur decouverte importante."""
daemon = self.create_mock_daemon(should_verbalize=True)
async def run_test():
await daemon.start()
await asyncio.sleep(0.2) # Quelques cycles
await daemon.stop()
# Doit avoir verbalise
assert daemon.stats.verbalizations > 0
asyncio.run(run_test())
class TestVigilanceLoop:
"""Tests pour la boucle de vigilance."""
def create_mock_daemon(self, alert_level: str = "ok") -> IkarioDaemon:
"""Cree un daemon avec mocks."""
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result())
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(
level=alert_level,
message=f"Test alert {alert_level}",
))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Test",
projections={},
output_type="response",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=DaemonConfig(
cycle_interval_seconds=1.0,
vigilance_interval_seconds=0.05, # Rapide pour tests
),
)
def test_vigilance_checks_drift(self):
"""La boucle vigilance verifie la derive."""
daemon = self.create_mock_daemon()
async def run_test():
await daemon.start()
await asyncio.sleep(0.2)
await daemon.stop()
# check_drift doit avoir ete appele
daemon.vigilance.check_drift.assert_called()
asyncio.run(run_test())
def test_vigilance_counts_alerts(self):
"""Les alertes sont comptees."""
daemon = self.create_mock_daemon(alert_level="warning")
async def run_test():
await daemon.start()
await asyncio.sleep(0.2)
await daemon.stop()
assert daemon.stats.vigilance_alerts > 0
asyncio.run(run_test())
class TestNotificationCallback:
"""Tests pour le callback de notification."""
def test_callback_called_on_autonomous_verbalization(self):
"""Le callback est appele sur verbalisation autonome."""
# Mock callback
callback = AsyncMock()
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result(
should_verbalize=True,
verbalization_reason="high_dissonance",
))
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Notification test",
projections={},
output_type="autonomous",
))
daemon = IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=DaemonConfig(
cycle_interval_seconds=0.05,
vigilance_interval_seconds=1.0,
),
notification_callback=callback,
)
async def run_test():
await daemon.start()
await asyncio.sleep(0.2)
await daemon.stop()
# Le callback doit avoir ete appele
callback.assert_called()
asyncio.run(run_test())
class TestVerbalizationHistory:
"""Tests pour l'historique des verbalisations."""
def create_mock_daemon(self) -> IkarioDaemon:
"""Cree un daemon avec mocks."""
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result())
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Test response",
projections={},
output_type="response",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
)
def test_history_records_conversations(self):
"""L'historique enregistre les conversations."""
daemon = self.create_mock_daemon()
async def run_test():
await daemon.send_message("Message 1")
await daemon.send_message("Message 2")
history = daemon.get_verbalization_history()
assert len(history) == 2
assert all('text' in h for h in history)
asyncio.run(run_test())
def test_history_limit(self):
"""L'historique respecte la limite."""
daemon = self.create_mock_daemon()
async def run_test():
for i in range(15):
await daemon.send_message(f"Message {i}")
history = daemon.get_verbalization_history(limit=5)
assert len(history) == 5
asyncio.run(run_test())
class TestCreateDaemonFactory:
"""Tests pour la factory create_daemon."""
def test_create_daemon_factory(self):
"""create_daemon cree un daemon."""
mock_engine = MagicMock(spec=LatentEngine)
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_translator = MagicMock(spec=StateToLanguage)
daemon = create_daemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
)
assert isinstance(daemon, IkarioDaemon)
assert daemon.engine is mock_engine
assert daemon.vigilance is mock_vigilance
assert daemon.translator is mock_translator
def test_create_daemon_with_config(self):
"""create_daemon accepte une config."""
mock_engine = MagicMock(spec=LatentEngine)
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_translator = MagicMock(spec=StateToLanguage)
config = DaemonConfig(cycle_interval_seconds=60.0)
daemon = create_daemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=config,
)
assert daemon.config.cycle_interval_seconds == 60.0
class TestCycleRate:
"""Tests pour le taux de cycles (~1000/jour)."""
def test_default_cycle_rate(self):
"""Le taux par defaut est ~1000 cycles/jour."""
config = DaemonConfig()
# 86400 secondes/jour / 90 secondes/cycle = 960 cycles/jour
cycles_per_day = 86400 / config.cycle_interval_seconds
assert 900 < cycles_per_day < 1100 # ~1000 cycles/jour
class TestStatsTracking:
"""Tests pour le suivi des statistiques."""
def create_mock_daemon(self) -> IkarioDaemon:
"""Cree un daemon avec mocks."""
mock_engine = MagicMock(spec=LatentEngine)
mock_engine.run_cycle = AsyncMock(return_value=create_mock_cycle_result())
mock_engine._get_current_state = MagicMock(return_value=create_random_tensor())
mock_vigilance = MagicMock(spec=VigilanceSystem)
mock_vigilance.check_drift = MagicMock(return_value=VigilanceAlert(level="ok"))
mock_translator = MagicMock(spec=StateToLanguage)
mock_translator.translate = AsyncMock(return_value=TranslationResult(
text="Test",
projections={},
output_type="response",
))
return IkarioDaemon(
latent_engine=mock_engine,
vigilance=mock_vigilance,
translator=mock_translator,
config=DaemonConfig(
cycle_interval_seconds=0.05,
vigilance_interval_seconds=1.0,
),
)
def test_total_cycles_tracked(self):
"""Les cycles totaux sont suivis."""
daemon = self.create_mock_daemon()
async def run_test():
# Envoyer quelques messages
await daemon.send_message("Test 1")
await daemon.send_message("Test 2")
stats = daemon.get_stats()
# Au moins 2 cycles (les conversations)
assert stats['total_cycles'] >= 2
asyncio.run(run_test())
def test_last_cycle_time_updated(self):
"""last_cycle_time est mis a jour."""
daemon = self.create_mock_daemon()
async def run_test():
await daemon.send_message("Test")
stats = daemon.get_stats()
assert stats['last_cycle_time'] != ""
asyncio.run(run_test())
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,371 @@
#!/usr/bin/env python3
"""
Tests pour le module de dissonance - Phase 2.
Exécuter: pytest ikario_processual/tests/test_dissonance.py -v
"""
import numpy as np
import pytest
from datetime import datetime
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.dissonance import (
DissonanceConfig,
DissonanceResult,
compute_dissonance,
compute_dissonance_enhanced,
compute_self_dissonance,
cosine_similarity,
Impact,
create_impact_from_dissonance,
)
def create_random_tensor() -> StateTensor:
"""Crée un tenseur avec des vecteurs aléatoires normalisés."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
def create_zero_tensor() -> StateTensor:
"""Crée un tenseur avec des vecteurs zéro."""
return StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
class TestCosineSimiliarity:
"""Tests pour la fonction cosine_similarity."""
def test_identical_vectors(self):
"""Vecteurs identiques → similarité = 1."""
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
assert np.isclose(cosine_similarity(v, v), 1.0)
def test_opposite_vectors(self):
"""Vecteurs opposés → similarité = -1."""
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
assert np.isclose(cosine_similarity(v, -v), -1.0)
def test_orthogonal_vectors(self):
"""Vecteurs orthogonaux → similarité ≈ 0."""
v1 = np.zeros(EMBEDDING_DIM)
v1[0] = 1.0
v2 = np.zeros(EMBEDDING_DIM)
v2[1] = 1.0
assert np.isclose(cosine_similarity(v1, v2), 0.0)
def test_zero_vector(self):
"""Vecteur zéro → similarité = 0."""
v1 = np.random.randn(EMBEDDING_DIM)
v2 = np.zeros(EMBEDDING_DIM)
assert cosine_similarity(v1, v2) == 0.0
class TestDissonanceConfig:
"""Tests pour DissonanceConfig."""
def test_default_weights_sum(self):
"""Les poids par défaut doivent sommer à ~1.0."""
config = DissonanceConfig()
weights = config.get_dimension_weights()
total = sum(weights.values())
assert np.isclose(total, 1.0), f"Total des poids: {total}"
def test_all_dimensions_have_weight(self):
"""Chaque dimension doit avoir un poids."""
config = DissonanceConfig()
weights = config.get_dimension_weights()
for dim in DIMENSION_NAMES:
assert dim in weights
assert weights[dim] >= 0
class TestComputeDissonance:
"""Tests pour compute_dissonance (version basique)."""
def test_self_dissonance_is_zero(self):
"""E(X_t, X_t) ≈ 0."""
X_t = create_random_tensor()
# Utiliser une dimension comme input (simuler entrée identique)
e_input = X_t.firstness.copy()
result = compute_dissonance(e_input, X_t)
# La dissonance avec firstness devrait être ~0
assert result.dissonances_by_dimension['firstness'] < 0.01
def test_orthogonal_input_high_dissonance(self):
"""Entrée orthogonale → haute dissonance."""
X_t = create_random_tensor()
# Créer un vecteur orthogonal (difficile en haute dimension, mais différent)
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
result = compute_dissonance(e_input, X_t)
# La dissonance totale devrait être significative
assert result.total > 0.1
def test_result_structure(self):
"""Vérifier la structure du résultat."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
result = compute_dissonance(e_input, X_t)
assert isinstance(result, DissonanceResult)
assert hasattr(result, 'total')
assert hasattr(result, 'is_choc')
assert hasattr(result, 'dissonances_by_dimension')
assert len(result.dissonances_by_dimension) == 8
def test_is_choc_flag(self):
"""Le flag is_choc dépend du seuil."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
# Seuil bas → plus de chocs
config_low = DissonanceConfig(choc_threshold=0.1)
result_low = compute_dissonance(e_input, X_t, config_low)
# Seuil haut → moins de chocs
config_high = DissonanceConfig(choc_threshold=0.9)
result_high = compute_dissonance(e_input, X_t, config_high)
# Avec seuil bas, plus probable d'avoir un choc
assert result_low.is_choc or result_high.is_choc is False
class TestComputeDissonanceEnhanced:
"""Tests pour compute_dissonance_enhanced avec hard negatives."""
def test_no_rag_results(self):
"""Sans résultats RAG → novelty_penalty = 1.0."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
result = compute_dissonance_enhanced(e_input, X_t, rag_results=[])
assert result.novelty_penalty == 1.0
assert result.rag_results_count == 0
def test_with_similar_rag_results(self):
"""Avec résultats RAG similaires → faible novelty."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
# Créer des résultats RAG très similaires (copie avec très peu de bruit)
rag_results = [
{'vector': e_input.copy(), 'content': 'identical'},
{'vector': e_input + np.random.randn(EMBEDDING_DIM) * 0.01, 'content': 'similar'},
]
# Normaliser les vecteurs RAG
for r in rag_results:
r['vector'] = r['vector'] / np.linalg.norm(r['vector'])
result = compute_dissonance_enhanced(e_input, X_t, rag_results)
# Le premier vecteur est identique donc max_sim ~= 1.0
assert result.max_similarity_to_corpus > 0.9
assert result.novelty_penalty == 0.0 # Pas de pénalité si > 0.3
def test_hard_negatives_detection(self):
"""Détection des hard negatives (similarité < seuil)."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
# Créer un vecteur opposé (hard negative)
opposite = -e_input
rag_results = [
{'vector': opposite, 'content': 'contradiction', 'source': 'test'},
{'vector': e_input, 'content': 'similar', 'source': 'test'},
]
result = compute_dissonance_enhanced(e_input, X_t, rag_results)
# Au moins un hard negative devrait être détecté
assert len(result.hard_negatives) >= 1
assert result.contradiction_score > 0
def test_total_dissonance_combines_all(self):
"""La dissonance totale combine base + contradiction + novelty."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
config = DissonanceConfig(
contradiction_weight=0.2,
novelty_weight=0.1
)
result = compute_dissonance_enhanced(e_input, X_t, [], config)
expected_total = (
result.base_dissonance +
config.contradiction_weight * result.contradiction_score +
config.novelty_weight * result.novelty_penalty
)
assert np.isclose(result.total, expected_total)
class TestSelfDissonance:
"""Tests pour compute_self_dissonance."""
def test_coherent_tensor(self):
"""Tenseur cohérent → faible dissonance interne."""
# Créer un tenseur où toutes les dimensions sont identiques
base_vector = np.random.randn(EMBEDDING_DIM)
base_vector = base_vector / np.linalg.norm(base_vector)
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
# Utiliser le même vecteur (parfaitement cohérent)
setattr(tensor, dim_name, base_vector.copy())
dissonance = compute_self_dissonance(tensor)
# Devrait être zéro car toutes les dimensions sont identiques
assert dissonance < 0.01
def test_incoherent_tensor(self):
"""Tenseur incohérent → haute dissonance interne."""
tensor = create_random_tensor() # Dimensions aléatoires = incohérent
dissonance = compute_self_dissonance(tensor)
# Devrait être plus élevé
assert dissonance > 0.3
class TestImpact:
"""Tests pour la création d'Impact."""
def test_create_impact_from_dissonance(self):
"""Créer un Impact à partir d'un résultat de dissonance."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
dissonance_result = compute_dissonance(e_input, X_t)
impact = create_impact_from_dissonance(
dissonance=dissonance_result,
trigger_type='user',
trigger_content='Test message',
trigger_vector=e_input,
state_id=0,
impact_id=1,
)
assert impact.impact_id == 1
assert impact.trigger_type == 'user'
assert impact.dissonance_total == dissonance_result.total
assert impact.resolved is False
def test_impact_to_dict(self):
"""Impact.to_dict() retourne un dictionnaire valide."""
impact = Impact(
impact_id=1,
timestamp=datetime.now().isoformat(),
state_id_at_impact=0,
trigger_type='user',
trigger_content='Test',
dissonance_total=0.5,
)
d = impact.to_dict()
assert 'impact_id' in d
assert 'timestamp' in d
assert d['timestamp'].endswith('Z')
assert d['resolved'] is False
class TestDissonanceMonotonicity:
"""Tests de monotonie de la dissonance."""
def test_more_different_more_dissonance(self):
"""Plus différent = plus de dissonance."""
X_t = create_random_tensor()
# Entrée identique à une dimension
identical = X_t.firstness.copy()
result_identical = compute_dissonance(identical, X_t)
# Entrée légèrement différente
slightly_different = X_t.firstness + np.random.randn(EMBEDDING_DIM) * 0.1
slightly_different = slightly_different / np.linalg.norm(slightly_different)
result_slight = compute_dissonance(slightly_different, X_t)
# Entrée très différente
very_different = np.random.randn(EMBEDDING_DIM)
very_different = very_different / np.linalg.norm(very_different)
result_very = compute_dissonance(very_different, X_t)
# Vérifier la monotonie sur la dimension firstness
assert result_identical.dissonances_by_dimension['firstness'] < \
result_slight.dissonances_by_dimension['firstness']
class TestDissonanceResultSerialization:
"""Tests de sérialisation."""
def test_to_dict(self):
"""DissonanceResult.to_dict() fonctionne."""
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
result = compute_dissonance(e_input, X_t)
d = result.to_dict()
assert 'total' in d
assert 'is_choc' in d
assert 'dissonances_by_dimension' in d
def test_to_json(self):
"""DissonanceResult.to_json() produit du JSON valide."""
import json
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
result = compute_dissonance(e_input, X_t)
json_str = result.to_json()
# Doit être parseable
parsed = json.loads(json_str)
assert parsed['total'] == result.total
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,383 @@
#!/usr/bin/env python3
"""
Tests pour le module de fixation - Phase 3.
Les 4 méthodes de Peirce :
1. Tenacity (0.05) - Minimal
2. Authority (0.25) - Pacte multi-vecteurs
3. A Priori (0.25) - Cohérence
4. Science (0.45) - Dominant
Exécuter: pytest ikario_processual/tests/test_fixation.py -v
"""
import numpy as np
import pytest
from datetime import datetime
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.fixation import (
FixationConfig,
FixationResult,
Tenacity,
Authority,
APriori,
Science,
compute_delta,
apply_delta,
apply_delta_all_dimensions,
PACTE_ARTICLES,
CRITICAL_ARTICLES,
PHILOSOPHICAL_ANCHORS,
)
def create_random_tensor() -> StateTensor:
"""Crée un tenseur avec des vecteurs aléatoires normalisés."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
def create_random_input() -> np.ndarray:
"""Crée un vecteur d'entrée normalisé."""
v = np.random.randn(EMBEDDING_DIM)
return v / np.linalg.norm(v)
class TestFixationConfig:
"""Tests pour FixationConfig."""
def test_default_weights_sum_to_one(self):
"""Les poids par défaut doivent sommer à 1.0."""
config = FixationConfig()
total = config.w_tenacity + config.w_authority + config.w_apriori + config.w_science
assert np.isclose(total, 1.0)
def test_validate(self):
"""validate() retourne True pour config valide."""
config = FixationConfig()
assert config.validate() is True
def test_science_is_dominant(self):
"""Science doit avoir le poids le plus élevé."""
config = FixationConfig()
assert config.w_science > config.w_authority
assert config.w_science > config.w_apriori
assert config.w_science > config.w_tenacity
def test_tenacity_is_minimal(self):
"""Tenacity doit avoir le poids le plus faible."""
config = FixationConfig()
assert config.w_tenacity < config.w_authority
assert config.w_tenacity < config.w_apriori
assert config.w_tenacity < config.w_science
class TestTenacity:
"""Tests pour la méthode Tenacity."""
def test_confirming_input_gives_delta(self):
"""Entrée confirmante → delta non-nul."""
X_t = create_random_tensor()
tenacity = Tenacity()
# Utiliser thirdness comme entrée (très confirmant)
e_input = X_t.thirdness.copy()
delta, details = tenacity.compute(e_input, X_t)
assert details['confirmation_score'] > 0.99
assert details['action'] == 'reinforce'
# Delta peut être très petit car e_input ≈ thirdness
def test_contradicting_input_resists(self):
"""Entrée contradictoire → résistance (delta nul)."""
X_t = create_random_tensor()
tenacity = Tenacity()
# Entrée aléatoire (peu confirmante)
e_input = create_random_input()
delta, details = tenacity.compute(e_input, X_t)
# En haute dimension, similarité aléatoire ~0
assert details['action'] == 'resist'
assert np.allclose(delta, 0)
class TestAuthority:
"""Tests pour la méthode Authority (Pacte multi-vecteurs)."""
def test_pacte_articles_count(self):
"""Vérifier qu'il y a 8 articles du Pacte."""
assert len(PACTE_ARTICLES) == 8
def test_critical_articles_count(self):
"""Vérifier qu'il y a 3 articles critiques."""
assert len(CRITICAL_ARTICLES) == 3
def test_philosophical_anchors_count(self):
"""Vérifier qu'il y a 3 ancres philosophiques."""
assert len(PHILOSOPHICAL_ANCHORS) == 3
def test_authority_without_vectors_is_neutral(self):
"""Authority sans vecteurs → neutre."""
X_t = create_random_tensor()
e_input = create_random_input()
authority = Authority() # Pas de vecteurs
delta, details = authority.compute(e_input, X_t)
assert np.allclose(delta, 0)
def test_authority_with_mock_vectors(self):
"""Authority avec vecteurs mock fonctionne."""
X_t = create_random_tensor()
e_input = create_random_input()
# Créer des vecteurs mock
mock_pacte = {
'article_1_conatus': create_random_input(),
'article_2_non_nuisance': create_random_input(),
}
authority = Authority(pacte_vectors=mock_pacte)
delta, details = authority.compute(e_input, X_t)
assert 'pacte_alignments' in details
assert len(details['pacte_alignments']) == 2
class TestAPriori:
"""Tests pour la méthode A Priori."""
def test_coherent_input_integrates(self):
"""Entrée cohérente → intégration."""
X_t = create_random_tensor()
apriori = APriori()
# Créer une entrée cohérente (moyenne des dimensions)
coherent = (X_t.firstness + X_t.thirdness + X_t.orientations + X_t.valeurs) / 4
coherent = coherent / np.linalg.norm(coherent)
delta, details = apriori.compute(coherent, X_t)
assert details['avg_coherence'] > 0.3
assert np.linalg.norm(delta) > 0
def test_incoherent_input_weak_integrate(self):
"""Entrée incohérente → faible intégration."""
X_t = create_random_tensor()
apriori = APriori()
# Entrée opposée (incohérente)
incoherent = -X_t.thirdness
delta, details = apriori.compute(incoherent, X_t)
assert details['avg_coherence'] < 0
assert details['action'] == 'weak_integrate'
class TestScience:
"""Tests pour la méthode Science."""
def test_no_rag_results_prudent(self):
"""Sans RAG → prudence."""
X_t = create_random_tensor()
e_input = create_random_input()
science = Science()
delta, details = science.compute(e_input, X_t, rag_results=None)
assert details['action'] == 'no_corroboration_prudent'
assert np.linalg.norm(delta) > 0 # Petit delta vers secondness
def test_strong_corroboration_integrates(self):
"""Forte corroboration → intégration forte."""
X_t = create_random_tensor()
e_input = create_random_input()
science = Science()
# RAG avec vecteurs très similaires
rag_results = [
{'vector': e_input.copy()},
{'vector': e_input + np.random.randn(EMBEDDING_DIM) * 0.01},
]
for r in rag_results:
r['vector'] = r['vector'] / np.linalg.norm(r['vector'])
delta, details = science.compute(e_input, X_t, rag_results)
assert details['avg_corroboration'] > 0.9
assert details['action'] == 'strong_corroboration'
def test_weak_corroboration_tension(self):
"""Faible corroboration → tension (secondness)."""
X_t = create_random_tensor()
e_input = create_random_input()
science = Science()
# RAG avec vecteurs opposés
rag_results = [
{'vector': -e_input},
]
delta, details = science.compute(e_input, X_t, rag_results)
assert details['avg_corroboration'] < 0
assert details['action'] == 'low_corroboration_tension'
class TestComputeDelta:
"""Tests pour compute_delta (combinaison des 4 méthodes)."""
def test_delta_magnitude_clamped(self):
"""||δ|| doit être ≤ δ_max."""
X_t = create_random_tensor()
e_input = create_random_input()
config = FixationConfig(delta_max=0.001)
result = compute_delta(e_input, X_t, config=config)
assert result.magnitude <= config.delta_max + 1e-9
def test_all_contributions_present(self):
"""Toutes les contributions doivent être présentes."""
X_t = create_random_tensor()
e_input = create_random_input()
result = compute_delta(e_input, X_t)
assert 'tenacity' in result.contributions
assert 'authority' in result.contributions
assert 'apriori' in result.contributions
assert 'science' in result.contributions
def test_science_has_most_influence(self):
"""Science (0.45) doit généralement avoir le plus d'influence."""
# Note: Ce test est probabiliste
X_t = create_random_tensor()
# Créer des RAG avec forte corroboration
e_input = create_random_input()
rag_results = [{'vector': e_input.copy()}]
result = compute_delta(e_input, X_t, rag_results=rag_results)
# Science devrait contribuer significativement
# (pas toujours le plus à cause des autres méthodes)
assert result.contributions['science'] >= 0
def test_result_has_details(self):
"""Le résultat doit contenir les détails de chaque méthode."""
X_t = create_random_tensor()
e_input = create_random_input()
result = compute_delta(e_input, X_t)
assert hasattr(result, 'tenacity_detail')
assert hasattr(result, 'authority_detail')
assert hasattr(result, 'apriori_detail')
assert hasattr(result, 'science_detail')
class TestApplyDelta:
"""Tests pour apply_delta."""
def test_state_id_incremented(self):
"""state_id doit être incrémenté."""
X_t = create_random_tensor()
X_t.state_id = 5
delta = np.random.randn(EMBEDDING_DIM) * 0.001
X_new = apply_delta(X_t, delta)
assert X_new.state_id == 6
assert X_new.previous_state_id == 5
def test_result_normalized(self):
"""La dimension modifiée doit rester normalisée."""
X_t = create_random_tensor()
delta = np.random.randn(EMBEDDING_DIM) * 0.1
X_new = apply_delta(X_t, delta, target_dim='thirdness')
assert np.isclose(np.linalg.norm(X_new.thirdness), 1.0)
def test_other_dimensions_unchanged(self):
"""Les autres dimensions ne doivent pas changer."""
X_t = create_random_tensor()
delta = np.random.randn(EMBEDDING_DIM) * 0.1
X_new = apply_delta(X_t, delta, target_dim='thirdness')
# firstness ne doit pas avoir changé
assert np.allclose(X_new.firstness, X_t.firstness)
class TestApplyDeltaAllDimensions:
"""Tests pour apply_delta_all_dimensions."""
def test_all_dimensions_modified(self):
"""Toutes les dimensions doivent être modifiées."""
X_t = create_random_tensor()
e_input = create_random_input()
result = compute_delta(e_input, X_t)
X_new = apply_delta_all_dimensions(X_t, e_input, result)
# Vérifier que les dimensions ont changé
changes = []
for dim_name in DIMENSION_NAMES:
old = getattr(X_t, dim_name)
new = getattr(X_new, dim_name)
diff = np.linalg.norm(new - old)
changes.append(diff)
# Au moins quelques dimensions devraient avoir changé
assert sum(c > 0 for c in changes) > 0
def test_all_dimensions_normalized(self):
"""Toutes les dimensions doivent rester normalisées."""
X_t = create_random_tensor()
e_input = create_random_input()
result = compute_delta(e_input, X_t)
X_new = apply_delta_all_dimensions(X_t, e_input, result)
for dim_name in DIMENSION_NAMES:
vec = getattr(X_new, dim_name)
assert np.isclose(np.linalg.norm(vec), 1.0, atol=1e-5)
class TestFixationResultSerialization:
"""Tests de sérialisation."""
def test_to_dict(self):
"""to_dict() fonctionne."""
X_t = create_random_tensor()
e_input = create_random_input()
result = compute_delta(e_input, X_t)
d = result.to_dict()
assert 'magnitude' in d
assert 'was_clamped' in d
assert 'contributions' in d
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,436 @@
#!/usr/bin/env python3
"""
Tests d'intégration Phase 8 - Architecture v2.
Tests simplifiés pour valider l'intégration entre les modules.
Ces tests utilisent l'API réelle des modules implémentés.
Exécuter: pytest ikario_processual/tests/test_integration_v2.py -v
"""
import asyncio
import numpy as np
import pytest
from datetime import datetime
from unittest.mock import AsyncMock, MagicMock, patch
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.dissonance import DissonanceResult, Impact, compute_dissonance
from ikario_processual.fixation import FixationResult, compute_delta, apply_delta
from ikario_processual.vigilance import (
VigilanceSystem,
VigilanceConfig,
VigilanceAlert,
create_vigilance_system,
)
from ikario_processual.state_to_language import (
StateToLanguage,
TranslationResult,
ProjectionDirection,
REASONING_MARKERS,
)
from ikario_processual.daemon import (
IkarioDaemon,
DaemonConfig,
DaemonMode,
TriggerType,
Trigger,
TriggerGenerator,
create_daemon,
)
from ikario_processual.metrics import (
ProcessMetrics,
create_metrics,
)
def create_random_tensor(state_id: int = 0, seed: int = None) -> StateTensor:
"""Crée un tenseur avec des vecteurs aléatoires normalisés."""
if seed is not None:
np.random.seed(seed)
tensor = StateTensor(
state_id=state_id,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
def create_mock_embedding_model():
"""Crée un mock du modèle d'embedding."""
mock = MagicMock()
def mock_encode(texts):
np.random.seed(hash(str(texts)) % (2**32))
embeddings = np.random.randn(len(texts), EMBEDDING_DIM)
norms = np.linalg.norm(embeddings, axis=1, keepdims=True)
return embeddings / norms
mock.encode = mock_encode
return mock
class TestVigilanceIntegration:
"""Tests d'intégration du système de vigilance."""
def test_vigilance_with_state_tensor(self):
"""Test: vigilance fonctionne avec StateTensor."""
x_ref = create_random_tensor(state_id=-1, seed=42)
vigilance = VigilanceSystem(x_ref=x_ref)
# État identique = pas de drift
alert = vigilance.check_drift(x_ref)
assert alert.level == "ok"
def test_vigilance_detects_drift(self):
"""Test: vigilance détecte la dérive."""
x_ref = create_random_tensor(state_id=-1, seed=42)
config = VigilanceConfig(
threshold_cumulative=0.0001,
threshold_per_cycle=0.00001,
)
vigilance = VigilanceSystem(x_ref=x_ref, config=config)
# Premier check
vigilance.check_drift(x_ref)
# État différent = dérive
X_different = create_random_tensor(state_id=1, seed=999)
alert = vigilance.check_drift(X_different)
assert alert.level in ("warning", "critical")
def test_vigilance_identifies_dimensions(self):
"""Test: vigilance identifie les dimensions en dérive."""
x_ref = create_random_tensor(state_id=-1, seed=42)
vigilance = VigilanceSystem(x_ref=x_ref)
# Inverser une dimension
X_modified = x_ref.copy()
X_modified.state_id = 1
X_modified.valeurs = -x_ref.valeurs
alert = vigilance.check_drift(X_modified)
assert 'valeurs' in alert.top_drifting_dimensions
def test_vigilance_cumulative_drift(self):
"""Test: dérive cumulative augmente."""
x_ref = create_random_tensor(state_id=-1, seed=42)
vigilance = VigilanceSystem(x_ref=x_ref)
# Plusieurs checks
for i in range(5):
X = create_random_tensor(state_id=i, seed=i + 100)
vigilance.check_drift(X)
assert vigilance.cumulative_drift > 0
assert len(vigilance.history) == 5
class TestStateToLanguageIntegration:
"""Tests d'intégration de StateToLanguage."""
def test_projection_on_directions(self):
"""Test: projection sur les directions."""
X = create_random_tensor(state_id=5, seed=42)
# Créer direction avec la bonne signature
direction_vec = np.random.randn(EMBEDDING_DIM)
direction_vec = direction_vec / np.linalg.norm(direction_vec)
direction = ProjectionDirection(
name="test_dir",
category="epistemic",
pole_positive="positif",
pole_negative="négatif",
description="Direction de test",
vector=direction_vec,
)
translator = StateToLanguage(directions=[direction])
projections = translator.project_state(X)
# Projection existe pour la catégorie epistemic
assert 'epistemic' in projections
assert 'test_dir' in projections['epistemic']
def test_translator_async_translate(self):
"""Test: traduction async avec mock client."""
def run_test():
async def async_test():
X = create_random_tensor(state_id=5, seed=42)
mock_client = AsyncMock()
mock_client.messages.create = AsyncMock(return_value=MagicMock(
content=[MagicMock(text="État de curiosité intense.")]
))
translator = StateToLanguage(
directions=[],
anthropic_client=mock_client,
)
result = await translator.translate(X)
assert result is not None
assert isinstance(result, TranslationResult)
assert len(result.text) > 0
asyncio.run(async_test())
run_test()
def test_reasoning_markers_defined(self):
"""Test: marqueurs de raisonnement définis."""
assert len(REASONING_MARKERS) > 0
assert any("pense" in m.lower() for m in REASONING_MARKERS)
class TestDissonanceFixationIntegration:
"""Tests d'intégration dissonance + fixation."""
def test_dissonance_on_tensor(self):
"""Test: compute_dissonance fonctionne."""
X = create_random_tensor(state_id=0, seed=42)
mock_model = create_mock_embedding_model()
e_input = mock_model.encode(["Test input"])[0]
result = compute_dissonance(
e_input=e_input,
X_t=X,
)
assert isinstance(result, DissonanceResult)
assert len(result.dissonances_by_dimension) == 8
def test_fixation_applies_delta(self):
"""Test: fixation applique le delta."""
X = create_random_tensor(state_id=0, seed=42)
X_before = X.to_flat().copy()
# Créer un delta
delta = np.random.randn(EMBEDDING_DIM) * 0.01
# Appliquer sur une dimension
X_new = apply_delta(
X_t=X,
delta=delta,
target_dim="firstness",
)
X_after = X_new.to_flat()
# L'état a changé
assert not np.allclose(X_before, X_after)
class TestDaemonComponents:
"""Tests des composants du daemon."""
def test_trigger_creation(self):
"""Test: création de triggers."""
trigger = Trigger(
type=TriggerType.USER,
content="Test message",
metadata={"source": "test"},
)
assert trigger.type == TriggerType.USER
assert trigger.content == "Test message"
assert trigger.metadata["source"] == "test"
def test_daemon_config_validation(self):
"""Test: validation de config."""
config = DaemonConfig()
total = (
config.prob_unresolved_impact +
config.prob_corpus +
config.prob_rumination_free
)
assert np.isclose(total, 1.0)
assert config.validate() == True
def test_daemon_mode_enum(self):
"""Test: modes du daemon."""
assert DaemonMode.CONVERSATION.value == "conversation"
assert DaemonMode.AUTONOMOUS.value == "autonomous"
def test_trigger_types(self):
"""Test: types de triggers."""
assert TriggerType.USER.value == "user"
assert TriggerType.VEILLE.value == "veille"
assert TriggerType.CORPUS.value == "corpus"
assert TriggerType.RUMINATION_FREE.value == "rumination_free"
class TestMetricsIntegration:
"""Tests d'intégration des métriques."""
def test_metrics_with_state_references(self):
"""Test: métriques avec références d'état."""
S_0 = create_random_tensor(state_id=0, seed=42)
x_ref = create_random_tensor(state_id=-1, seed=43)
metrics = create_metrics(S_0=S_0, x_ref=x_ref)
# Enregistrer des cycles
for _ in range(10):
metrics.record_cycle(TriggerType.USER, 0.01)
report = metrics.compute_daily_report()
assert report.cycles.total == 10
def test_metrics_state_evolution(self):
"""Test: métriques d'évolution de l'état."""
S_0 = create_random_tensor(state_id=0, seed=42)
x_ref = create_random_tensor(state_id=-1, seed=43)
X_current = create_random_tensor(state_id=100, seed=44)
metrics = create_metrics(S_0=S_0, x_ref=x_ref)
report = metrics.compute_daily_report(current_state=X_current)
# Drift calculé
assert report.state_evolution.total_drift_from_s0 > 0
assert report.state_evolution.drift_from_ref > 0
def test_metrics_health_status(self):
"""Test: statut de santé."""
metrics = create_metrics()
# Sans alertes = healthy
status = metrics.get_health_status()
assert status['status'] == 'healthy'
# Avec alerte critical
metrics.record_alert("critical", 0.03)
status = metrics.get_health_status()
assert status['status'] == 'critical'
class TestAmendmentsCompliance:
"""Tests de conformité aux amendements."""
def test_amendment_4_reasoning_markers(self):
"""Amendment #4: Marqueurs de raisonnement définis."""
assert len(REASONING_MARKERS) > 0
def test_amendment_5_rumination_probability(self):
"""Amendment #5: Probabilité 50% impacts non résolus."""
config = DaemonConfig()
assert config.prob_unresolved_impact == 0.5
def test_amendment_6_memory_efficient(self):
"""Amendment #6: Tenseur efficace en mémoire."""
tensor = create_random_tensor(state_id=0, seed=42)
flat = tensor.to_flat()
# 8 × 1024 = 8192 floats
assert flat.shape == (8 * EMBEDDING_DIM,)
# < 64 KB
assert flat.nbytes <= 64 * 1024
def test_amendment_15_xref_not_attractor(self):
"""Amendment #15: x_ref est garde-fou, pas attracteur."""
x_ref = create_random_tensor(state_id=-1, seed=42)
vigilance = VigilanceSystem(x_ref=x_ref)
# x_ref a state_id = -1
assert vigilance.x_ref.state_id == -1
# Vigilance n'attire pas vers x_ref, elle observe
X = create_random_tensor(state_id=5, seed=123)
X_before = X.to_flat().copy()
vigilance.check_drift(X)
# L'état n'a pas été modifié
assert np.allclose(X_before, X.to_flat())
class TestEndToEndSimplified:
"""Tests end-to-end simplifiés."""
def test_vigilance_with_metrics(self):
"""Test: vigilance intégrée avec métriques."""
x_ref = create_random_tensor(state_id=-1, seed=42)
vigilance = VigilanceSystem(x_ref=x_ref)
metrics = create_metrics(x_ref=x_ref)
# Simuler évolution
for i in range(5):
X = create_random_tensor(state_id=i, seed=i + 100)
alert = vigilance.check_drift(X)
metrics.record_alert(alert.level, vigilance.cumulative_drift)
# Métriques enregistrées
report = metrics.compute_daily_report()
assert report.alerts.total == 5
def test_state_evolution_tracked(self):
"""Test: évolution d'état suivie."""
S_0 = create_random_tensor(state_id=0, seed=42)
x_ref = create_random_tensor(state_id=-1, seed=43)
vigilance = VigilanceSystem(x_ref=x_ref)
metrics = create_metrics(S_0=S_0, x_ref=x_ref)
# Simuler 10 cycles
current_state = S_0
for i in range(10):
# Enregistrer cycle
metrics.record_cycle(TriggerType.USER, 0.01)
# Créer nouvel état (simulation)
current_state = create_random_tensor(state_id=i + 1, seed=i + 50)
# Vérifier vigilance
alert = vigilance.check_drift(current_state)
metrics.record_alert(alert.level, vigilance.cumulative_drift)
# Rapport final
report = metrics.compute_daily_report(current_state=current_state)
assert report.cycles.total == 10
assert report.state_evolution.total_drift_from_s0 > 0
assert report.state_evolution.drift_from_ref > 0
def test_full_module_imports(self):
"""Test: tous les modules s'importent correctement."""
from ikario_processual import (
# V1
OccasionLog,
OccasionLogger,
OccasionManager,
# V2
StateTensor,
DissonanceResult,
FixationResult,
VigilanceSystem,
StateToLanguage,
IkarioDaemon,
ProcessMetrics,
)
# Tous les imports fonctionnent
assert StateTensor is not None
assert DissonanceResult is not None
assert VigilanceSystem is not None
assert StateToLanguage is not None
assert IkarioDaemon is not None
assert ProcessMetrics is not None
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,372 @@
#!/usr/bin/env python3
"""
Tests pour le LatentEngine - Phase 4.
Le cycle sémiotique :
1. FIRSTNESS : Vectoriser, extraire saillances
2. SECONDNESS : Calculer dissonance, créer Impacts
3. THIRDNESS : Appliquer fixation, mettre à jour état
4. SÉMIOSE : Créer Thoughts, décider verbalisation
Exécuter: pytest ikario_processual/tests/test_latent_engine.py -v
"""
import numpy as np
import pytest
from datetime import datetime
from unittest.mock import MagicMock, patch
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.dissonance import DissonanceResult, DissonanceConfig
from ikario_processual.fixation import FixationResult, FixationConfig
from ikario_processual.latent_engine import (
Thought,
CycleResult,
CycleLogger,
LatentEngine,
)
def create_random_tensor(state_id: int = 0) -> StateTensor:
"""Crée un tenseur avec des vecteurs aléatoires normalisés."""
tensor = StateTensor(
state_id=state_id,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
class TestThought:
"""Tests pour la classe Thought."""
def test_create_thought(self):
"""Créer une Thought."""
thought = Thought(
thought_id=1,
timestamp=datetime.now().isoformat(),
state_id=5,
content="Test thought content",
thought_type="reflection",
trigger_type="user",
trigger_summary="Hello",
delta_magnitude=0.0005,
dissonance_total=0.3,
dimensions_affected=['science', 'authority'],
)
assert thought.thought_id == 1
assert thought.thought_type == "reflection"
def test_thought_to_dict(self):
"""to_dict() fonctionne."""
thought = Thought(
thought_id=1,
timestamp=datetime.now().isoformat(),
state_id=5,
content="Test",
thought_type="insight",
trigger_type="user",
trigger_summary="Hello",
delta_magnitude=0.001,
dissonance_total=0.5,
dimensions_affected=[],
)
d = thought.to_dict()
assert 'thought_id' in d
assert 'content' in d
assert d['timestamp'].endswith('Z')
class TestCycleLogger:
"""Tests pour CycleLogger."""
def test_log_cycle(self):
"""Logger enregistre les cycles."""
logger = CycleLogger()
# Créer un mock CycleResult
mock_result = MagicMock(spec=CycleResult)
mock_result.to_dict.return_value = {
'dissonance_total': 0.5,
'impacts_count': 1,
'thoughts_count': 0,
'processing_time_ms': 50,
}
logger.log_cycle(mock_result)
assert logger.total_cycles == 1
assert len(logger.history) == 1
def test_get_stats_empty(self):
"""Stats avec aucun cycle."""
logger = CycleLogger()
stats = logger.get_stats()
assert stats['total_cycles'] == 0
def test_get_stats_with_cycles(self):
"""Stats avec plusieurs cycles."""
logger = CycleLogger()
for i in range(5):
mock_result = MagicMock(spec=CycleResult)
mock_result.to_dict.return_value = {
'dissonance_total': 0.3 + i * 0.1,
'impacts_count': 1 if i % 2 == 0 else 0,
'thoughts_count': 1,
'processing_time_ms': 40 + i * 10,
}
logger.log_cycle(mock_result)
stats = logger.get_stats()
assert stats['total_cycles'] == 5
assert stats['recent_cycles'] == 5
assert stats['avg_dissonance'] > 0
assert stats['total_impacts'] == 3 # i=0,2,4
def test_max_history_limit(self):
"""Limite de l'historique respectée."""
logger = CycleLogger(max_history=10)
for i in range(20):
mock_result = MagicMock(spec=CycleResult)
mock_result.to_dict.return_value = {
'dissonance_total': 0.5,
'impacts_count': 0,
'thoughts_count': 0,
'processing_time_ms': 50,
}
logger.log_cycle(mock_result)
assert logger.total_cycles == 20
assert len(logger.history) == 10
class TestCycleResult:
"""Tests pour CycleResult."""
def test_to_dict(self):
"""to_dict() retourne les bonnes clés."""
tensor = create_random_tensor(state_id=1)
dissonance = DissonanceResult(
total=0.5,
base_dissonance=0.4,
contradiction_score=0.1,
novelty_penalty=0.0,
is_choc=True,
dissonances_by_dimension={},
hard_negatives=[],
max_similarity_to_corpus=0.7,
rag_results_count=3,
)
fixation = FixationResult(
delta=np.zeros(EMBEDDING_DIM),
magnitude=0.001,
was_clamped=True,
contributions={'tenacity': 0, 'authority': 0, 'apriori': 0, 'science': 0.001},
)
result = CycleResult(
new_state=tensor,
previous_state_id=0,
dissonance=dissonance,
fixation=fixation,
impacts=[],
thoughts=[],
should_verbalize=True,
verbalization_reason="conversation_mode",
processing_time_ms=100,
cycle_number=1,
)
d = result.to_dict()
assert d['cycle_number'] == 1
assert d['new_state_id'] == 1
assert d['is_choc'] is True
assert d['should_verbalize'] is True
class TestLatentEngineUnit:
"""Tests unitaires pour LatentEngine (sans Weaviate)."""
def test_vectorize_input(self):
"""_vectorize_input normalise le vecteur."""
# Mock du model
mock_model = MagicMock()
mock_model.encode.return_value = np.random.randn(EMBEDDING_DIM)
# Mock du client
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
result = engine._vectorize_input("Test content")
assert result.shape == (EMBEDDING_DIM,)
assert np.isclose(np.linalg.norm(result), 1.0)
def test_extract_saillances(self):
"""_extract_saillances retourne les bonnes dimensions."""
mock_model = MagicMock()
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
X_t = create_random_tensor()
e_input = np.random.randn(EMBEDDING_DIM)
e_input = e_input / np.linalg.norm(e_input)
saillances = engine._extract_saillances(e_input, X_t)
assert len(saillances) == 8
for dim in DIMENSION_NAMES:
assert dim in saillances
assert -1.0 <= saillances[dim] <= 1.0
def test_should_verbalize_user_mode(self):
"""Mode user → toujours verbaliser."""
mock_model = MagicMock()
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
trigger = {'type': 'user', 'content': 'Hello'}
dissonance = MagicMock()
dissonance.total = 0.2
dissonance.hard_negatives = []
fixation = MagicMock()
X_new = create_random_tensor()
should, reason = engine._should_verbalize(trigger, dissonance, fixation, X_new)
assert should is True
assert reason == "conversation_mode"
def test_should_verbalize_high_dissonance(self):
"""Haute dissonance en mode autonome → verbaliser."""
mock_model = MagicMock()
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
trigger = {'type': 'corpus', 'content': 'Article'}
dissonance = MagicMock()
dissonance.total = 0.7 # > 0.6
dissonance.hard_negatives = []
fixation = MagicMock()
X_new = create_random_tensor()
should, reason = engine._should_verbalize(trigger, dissonance, fixation, X_new)
assert should is True
assert reason == "high_dissonance_discovery"
def test_should_verbalize_silent(self):
"""Faible dissonance en mode autonome → silencieux."""
mock_model = MagicMock()
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
trigger = {'type': 'timer', 'content': 'Tick'}
dissonance = MagicMock()
dissonance.total = 0.2
dissonance.hard_negatives = []
fixation = MagicMock()
X_new = create_random_tensor()
should, reason = engine._should_verbalize(trigger, dissonance, fixation, X_new)
assert should is False
assert reason == "silent_processing"
def test_generate_thought_content_insight(self):
"""Génération de contenu pour insight."""
mock_model = MagicMock()
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
dissonance = MagicMock()
dissonance.total = 0.6
dissonance.hard_negatives = [{'content': 'test'}]
fixation = MagicMock()
fixation.magnitude = 0.001
content = engine._generate_thought_content(
trigger_type='user',
trigger_content='Test trigger',
dissonance=dissonance,
fixation_result=fixation,
thought_type='insight'
)
assert 'Choc détecté' in content
assert '0.600' in content
class TestLatentEngineGetStats:
"""Tests pour get_stats()."""
def test_get_stats_initial(self):
"""Stats initiales."""
mock_model = MagicMock()
mock_client = MagicMock()
engine = LatentEngine(
weaviate_client=mock_client,
embedding_model=mock_model
)
stats = engine.get_stats()
assert stats['total_cycles'] == 0
assert stats['impacts_created'] == 0
assert stats['thoughts_created'] == 0
# Note: Les tests d'intégration avec Weaviate réel sont dans un fichier séparé
# car ils nécessitent une connexion active.
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,496 @@
#!/usr/bin/env python3
"""
Tests pour le module de métriques - Phase 8.
Exécuter: pytest ikario_processual/tests/test_metrics.py -v
"""
import numpy as np
import pytest
from datetime import datetime, timedelta
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.daemon import TriggerType
from ikario_processual.metrics import (
MetricPeriod,
StateEvolutionMetrics,
CycleMetrics,
VerbalizationMetrics,
ImpactMetrics,
AlertMetrics,
DailyReport,
ProcessMetrics,
create_metrics,
)
def create_random_tensor(state_id: int = 0, seed: int = None) -> StateTensor:
"""Crée un tenseur avec des vecteurs aléatoires normalisés."""
if seed is not None:
np.random.seed(seed)
tensor = StateTensor(
state_id=state_id,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
class TestMetricPeriod:
"""Tests pour MetricPeriod."""
def test_all_periods_exist(self):
"""Toutes les périodes existent."""
assert MetricPeriod.HOURLY.value == "hourly"
assert MetricPeriod.DAILY.value == "daily"
assert MetricPeriod.WEEKLY.value == "weekly"
assert MetricPeriod.MONTHLY.value == "monthly"
class TestStateEvolutionMetrics:
"""Tests pour StateEvolutionMetrics."""
def test_default_values(self):
"""Valeurs par défaut."""
metrics = StateEvolutionMetrics()
assert metrics.total_drift_from_s0 == 0.0
assert metrics.drift_from_ref == 0.0
assert metrics.dimensions_most_changed == []
assert metrics.average_delta_magnitude == 0.0
class TestCycleMetrics:
"""Tests pour CycleMetrics."""
def test_default_values(self):
"""Valeurs par défaut."""
metrics = CycleMetrics()
assert metrics.total == 0
assert metrics.conversation == 0
assert metrics.autonomous == 0
assert metrics.by_trigger_type == {}
class TestVerbalizationMetrics:
"""Tests pour VerbalizationMetrics."""
def test_default_values(self):
"""Valeurs par défaut."""
metrics = VerbalizationMetrics()
assert metrics.total == 0
assert metrics.from_conversation == 0
assert metrics.from_autonomous == 0
assert metrics.average_length == 0.0
assert metrics.reasoning_detected_count == 0
class TestImpactMetrics:
"""Tests pour ImpactMetrics."""
def test_default_values(self):
"""Valeurs par défaut."""
metrics = ImpactMetrics()
assert metrics.created == 0
assert metrics.resolved == 0
assert metrics.pending == 0
class TestAlertMetrics:
"""Tests pour AlertMetrics."""
def test_default_values(self):
"""Valeurs par défaut."""
metrics = AlertMetrics()
assert metrics.total == 0
assert metrics.ok == 0
assert metrics.warning == 0
assert metrics.critical == 0
assert metrics.last_alert_time is None
class TestDailyReport:
"""Tests pour DailyReport."""
def test_create_report(self):
"""Créer un rapport."""
report = DailyReport(
date="2024-01-15",
cycles=CycleMetrics(total=100, conversation=30, autonomous=70),
verbalizations=VerbalizationMetrics(total=35),
state_evolution=StateEvolutionMetrics(total_drift_from_s0=0.05),
impacts=ImpactMetrics(created=10, resolved=8),
alerts=AlertMetrics(total=5, ok=3, warning=2),
thoughts_created=50,
uptime_hours=24.0,
)
assert report.date == "2024-01-15"
assert report.cycles.total == 100
assert report.verbalizations.total == 35
assert report.thoughts_created == 50
def test_to_dict(self):
"""Conversion en dictionnaire."""
report = DailyReport(
date="2024-01-15",
cycles=CycleMetrics(total=100),
verbalizations=VerbalizationMetrics(total=35),
state_evolution=StateEvolutionMetrics(),
impacts=ImpactMetrics(),
alerts=AlertMetrics(),
)
d = report.to_dict()
assert 'date' in d
assert 'cycles' in d
assert 'verbalizations' in d
assert d['cycles']['total'] == 100
assert d['verbalizations']['total'] == 35
def test_format_summary(self):
"""Formatage du résumé textuel."""
report = DailyReport(
date="2024-01-15",
cycles=CycleMetrics(total=100, conversation=30, autonomous=70),
verbalizations=VerbalizationMetrics(total=35, average_length=150.0),
state_evolution=StateEvolutionMetrics(
total_drift_from_s0=0.05,
dimensions_most_changed=[('valeurs', 0.02), ('firstness', 0.01)]
),
impacts=ImpactMetrics(created=10, resolved=8),
alerts=AlertMetrics(total=5, ok=3, warning=2),
thoughts_created=50,
uptime_hours=24.0,
)
summary = report.format_summary()
assert "RAPPORT IKARIO" in summary
assert "2024-01-15" in summary
assert "Total: 100" in summary
assert "Conversation: 30" in summary
assert "Autonome: 70" in summary
assert "valeurs" in summary
class TestProcessMetrics:
"""Tests pour ProcessMetrics."""
def test_create_metrics(self):
"""Créer un collecteur de métriques."""
metrics = ProcessMetrics()
assert metrics.S_0 is None
assert metrics.x_ref is None
assert len(metrics._cycle_history) == 0
def test_create_with_references(self):
"""Créer avec références S_0 et x_ref."""
S_0 = create_random_tensor(state_id=0, seed=42)
x_ref = create_random_tensor(state_id=-1, seed=43)
metrics = ProcessMetrics(S_0=S_0, x_ref=x_ref)
assert metrics.S_0 is S_0
assert metrics.x_ref is x_ref
def test_record_cycle(self):
"""Enregistrer un cycle."""
metrics = ProcessMetrics()
metrics.record_cycle(
trigger_type=TriggerType.USER,
delta_magnitude=0.01,
)
assert len(metrics._cycle_history) == 1
assert metrics._cycle_history[0]['trigger_type'] == 'user'
assert metrics._cycle_history[0]['delta_magnitude'] == 0.01
def test_record_multiple_cycles(self):
"""Enregistrer plusieurs cycles."""
metrics = ProcessMetrics()
for i in range(10):
metrics.record_cycle(
trigger_type=TriggerType.USER,
delta_magnitude=0.01 * i,
)
assert len(metrics._cycle_history) == 10
assert len(metrics._delta_history) == 10
def test_record_verbalization(self):
"""Enregistrer une verbalisation."""
metrics = ProcessMetrics()
text = "Ceci est une verbalisation de test."
metrics.record_verbalization(
text=text,
from_autonomous=False,
reasoning_detected=True,
)
assert len(metrics._verbalization_history) == 1
assert metrics._verbalization_history[0]['length'] == len(text)
assert metrics._verbalization_history[0]['reasoning_detected'] is True
def test_record_impact(self):
"""Enregistrer un impact."""
metrics = ProcessMetrics()
metrics.record_impact(
impact_id="impact_001",
created=True,
resolved=False,
)
assert len(metrics._impact_history) == 1
assert metrics._impact_history[0]['impact_id'] == "impact_001"
def test_record_alert(self):
"""Enregistrer une alerte."""
metrics = ProcessMetrics()
metrics.record_alert(
level="warning",
cumulative_drift=0.015,
)
assert len(metrics._alert_history) == 1
assert metrics._alert_history[0]['level'] == "warning"
def test_record_thought(self):
"""Enregistrer une thought."""
metrics = ProcessMetrics()
metrics.record_thought(
thought_id="thought_001",
trigger_content="Question philosophique",
)
assert len(metrics._thought_history) == 1
assert metrics._thought_history[0]['thought_id'] == "thought_001"
class TestDailyReportComputation:
"""Tests pour le calcul du rapport quotidien."""
def test_compute_empty_report(self):
"""Rapport vide si pas de données."""
metrics = ProcessMetrics()
report = metrics.compute_daily_report()
assert report.cycles.total == 0
assert report.verbalizations.total == 0
assert report.alerts.total == 0
def test_compute_with_cycles(self):
"""Rapport avec cycles."""
metrics = ProcessMetrics()
# Ajouter des cycles
for _ in range(5):
metrics.record_cycle(TriggerType.USER, 0.01)
for _ in range(10):
metrics.record_cycle(TriggerType.VEILLE, 0.005)
report = metrics.compute_daily_report()
assert report.cycles.total == 15
assert report.cycles.conversation == 5
assert report.cycles.autonomous == 10
def test_compute_with_state_evolution(self):
"""Rapport avec évolution d'état."""
S_0 = create_random_tensor(state_id=0, seed=42)
x_ref = create_random_tensor(state_id=-1, seed=43)
X_current = create_random_tensor(state_id=100, seed=44)
metrics = ProcessMetrics(S_0=S_0, x_ref=x_ref)
report = metrics.compute_daily_report(current_state=X_current)
assert report.state_evolution.total_drift_from_s0 > 0
assert report.state_evolution.drift_from_ref > 0
assert len(report.state_evolution.dimensions_most_changed) == 8
def test_compute_with_alerts(self):
"""Rapport avec alertes."""
metrics = ProcessMetrics()
metrics.record_alert("ok", 0.001)
metrics.record_alert("ok", 0.002)
metrics.record_alert("warning", 0.015)
metrics.record_alert("critical", 0.025)
report = metrics.compute_daily_report()
assert report.alerts.total == 4
assert report.alerts.ok == 2
assert report.alerts.warning == 1
assert report.alerts.critical == 1
def test_compute_average_verbalization_length(self):
"""Calcul de la longueur moyenne des verbalisations."""
metrics = ProcessMetrics()
metrics.record_verbalization("Court", from_autonomous=False)
metrics.record_verbalization("Un texte un peu plus long", from_autonomous=False)
metrics.record_verbalization("Encore plus long pour le test", from_autonomous=True)
report = metrics.compute_daily_report()
assert report.verbalizations.total == 3
assert report.verbalizations.from_conversation == 2
assert report.verbalizations.from_autonomous == 1
assert report.verbalizations.average_length > 0
class TestWeeklySummary:
"""Tests pour le résumé hebdomadaire."""
def test_compute_weekly_summary(self):
"""Calcul du résumé hebdomadaire."""
metrics = ProcessMetrics()
# Ajouter des données
for _ in range(50):
metrics.record_cycle(TriggerType.USER, 0.01)
summary = metrics.compute_weekly_summary()
assert 'period' in summary
assert summary['period'] == 'weekly'
assert 'daily_reports' in summary
assert len(summary['daily_reports']) == 7
assert 'summary' in summary
assert summary['summary']['total_cycles'] == 50
class TestHealthStatus:
"""Tests pour l'état de santé."""
def test_healthy_status(self):
"""Statut sain."""
metrics = ProcessMetrics()
# Quelques cycles normaux
for _ in range(10):
metrics.record_cycle(TriggerType.USER, 0.01)
status = metrics.get_health_status()
assert status['status'] == 'healthy'
assert status['total_cycles'] == 10
def test_warning_status(self):
"""Statut warning."""
metrics = ProcessMetrics()
# Plusieurs warnings récents
for _ in range(5):
metrics.record_alert("warning", 0.015)
status = metrics.get_health_status()
assert status['status'] == 'warning'
def test_critical_status(self):
"""Statut critical."""
metrics = ProcessMetrics()
metrics.record_alert("critical", 0.03)
status = metrics.get_health_status()
assert status['status'] == 'critical'
def test_uptime_tracked(self):
"""Uptime est suivi."""
metrics = ProcessMetrics()
status = metrics.get_health_status()
assert 'uptime_hours' in status
assert status['uptime_hours'] >= 0
class TestReset:
"""Tests pour la réinitialisation."""
def test_reset_clears_history(self):
"""Reset efface tous les historiques."""
metrics = ProcessMetrics()
# Ajouter des données
metrics.record_cycle(TriggerType.USER, 0.01)
metrics.record_verbalization("Test")
metrics.record_alert("ok", 0.001)
assert len(metrics._cycle_history) > 0
assert len(metrics._verbalization_history) > 0
# Reset
metrics.reset()
assert len(metrics._cycle_history) == 0
assert len(metrics._verbalization_history) == 0
assert len(metrics._alert_history) == 0
class TestCreateMetricsFactory:
"""Tests pour la factory create_metrics."""
def test_create_without_args(self):
"""Créer sans arguments."""
metrics = create_metrics()
assert metrics is not None
assert isinstance(metrics, ProcessMetrics)
def test_create_with_references(self):
"""Créer avec références."""
S_0 = create_random_tensor(state_id=0, seed=42)
x_ref = create_random_tensor(state_id=-1, seed=43)
metrics = create_metrics(S_0=S_0, x_ref=x_ref)
assert metrics.S_0 is S_0
assert metrics.x_ref is x_ref
class TestIntegrationWithDaemon:
"""Tests d'intégration avec le daemon."""
def test_cycle_types_match_daemon(self):
"""Les types de cycles correspondent au daemon."""
metrics = ProcessMetrics()
# Tous les types de triggers
for trigger_type in TriggerType:
metrics.record_cycle(trigger_type, 0.01)
assert len(metrics._cycle_history) == len(TriggerType)
# Vérifier les types
recorded_types = {c['trigger_type'] for c in metrics._cycle_history}
expected_types = {t.value for t in TriggerType}
assert recorded_types == expected_types
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,297 @@
#!/usr/bin/env python3
"""
Tests pour StateTensor - Tenseur d'état 8×1024.
Exécuter: pytest ikario_processual/tests/test_state_tensor.py -v
"""
import numpy as np
import pytest
from datetime import datetime
# Import du module à tester
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import (
StateTensor,
TensorDimension,
DIMENSION_NAMES,
EMBEDDING_DIM,
)
class TestStateTensorBasic:
"""Tests de base pour StateTensor."""
def test_create_empty_tensor(self):
"""Test création d'un tenseur vide."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
assert tensor.state_id == 0
assert tensor.firstness.shape == (EMBEDDING_DIM,)
assert tensor.valeurs.shape == (EMBEDDING_DIM,)
assert np.all(tensor.firstness == 0)
def test_create_with_values(self):
"""Test création avec valeurs."""
firstness = np.random.randn(EMBEDDING_DIM)
firstness = firstness / np.linalg.norm(firstness)
tensor = StateTensor(
state_id=1,
timestamp=datetime.now().isoformat(),
firstness=firstness,
)
assert np.allclose(tensor.firstness, firstness)
assert np.isclose(np.linalg.norm(tensor.firstness), 1.0)
def test_to_matrix(self):
"""Test conversion en matrice."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
matrix = tensor.to_matrix()
assert matrix.shape == (8, EMBEDDING_DIM)
def test_to_flat(self):
"""Test aplatissement."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
flat = tensor.to_flat()
assert flat.shape == (8 * EMBEDDING_DIM,)
assert flat.shape == (8192,)
def test_dimension_names(self):
"""Test que toutes les dimensions sont présentes."""
expected = [
"firstness", "secondness", "thirdness", "dispositions",
"orientations", "engagements", "pertinences", "valeurs"
]
assert DIMENSION_NAMES == expected
assert len(DIMENSION_NAMES) == 8
class TestStateTensorOperations:
"""Tests des opérations sur StateTensor."""
def test_copy(self):
"""Test copie profonde."""
original = StateTensor(
state_id=1,
timestamp=datetime.now().isoformat(),
firstness=np.random.randn(EMBEDDING_DIM),
)
copied = original.copy()
# Modifier l'original ne doit pas affecter la copie
original.firstness[0] = 999.0
assert copied.firstness[0] != 999.0
def test_set_dimension(self):
"""Test modification d'une dimension."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
new_vec = np.random.randn(EMBEDDING_DIM)
tensor.set_dimension(TensorDimension.VALEURS, new_vec)
# Doit être normalisé
assert np.isclose(np.linalg.norm(tensor.valeurs), 1.0)
def test_get_dimension(self):
"""Test récupération d'une dimension."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
vec = tensor.get_dimension(TensorDimension.FIRSTNESS)
assert vec.shape == (EMBEDDING_DIM,)
def test_to_dict(self):
"""Test conversion en dictionnaire."""
tensor = StateTensor(
state_id=5,
timestamp="2026-02-01T12:00:00",
trigger_type="user",
trigger_content="Hello",
)
d = tensor.to_dict()
assert d["state_id"] == 5
assert d["trigger_type"] == "user"
assert "firstness" not in d # Vecteurs pas dans properties
def test_get_vectors_dict(self):
"""Test récupération des vecteurs pour Weaviate."""
tensor = StateTensor(
state_id=0,
timestamp=datetime.now().isoformat(),
)
vectors = tensor.get_vectors_dict()
assert len(vectors) == 8
assert "firstness" in vectors
assert "valeurs" in vectors
assert len(vectors["firstness"]) == EMBEDDING_DIM
class TestStateTensorAggregation:
"""Tests des opérations d'agrégation."""
def test_weighted_mean_two_tensors(self):
"""Test moyenne pondérée de 2 tenseurs."""
t1 = StateTensor(
state_id=1,
timestamp=datetime.now().isoformat(),
)
t2 = StateTensor(
state_id=2,
timestamp=datetime.now().isoformat(),
)
# Initialiser avec des vecteurs aléatoires normalisés
for dim_name in DIMENSION_NAMES:
v1 = np.random.randn(EMBEDDING_DIM)
v1 = v1 / np.linalg.norm(v1)
setattr(t1, dim_name, v1)
v2 = np.random.randn(EMBEDDING_DIM)
v2 = v2 / np.linalg.norm(v2)
setattr(t2, dim_name, v2)
# Moyenne 50/50
result = StateTensor.weighted_mean([t1, t2], [0.5, 0.5])
# Résultat doit être normalisé
for dim_name in DIMENSION_NAMES:
vec = getattr(result, dim_name)
assert np.isclose(np.linalg.norm(vec), 1.0, atol=1e-5)
def test_blend(self):
"""Test blend 70/30."""
t1 = StateTensor(state_id=1, timestamp=datetime.now().isoformat())
t2 = StateTensor(state_id=2, timestamp=datetime.now().isoformat())
# Initialiser
for dim_name in DIMENSION_NAMES:
v1 = np.random.randn(EMBEDDING_DIM)
v1 = v1 / np.linalg.norm(v1)
setattr(t1, dim_name, v1)
v2 = np.random.randn(EMBEDDING_DIM)
v2 = v2 / np.linalg.norm(v2)
setattr(t2, dim_name, v2)
result = StateTensor.blend(t1, t2, alpha=0.7)
assert result is not None
assert result.state_id == -1 # Non défini
def test_from_matrix(self):
"""Test création depuis matrice."""
matrix = np.random.randn(8, EMBEDDING_DIM)
tensor = StateTensor.from_matrix(
matrix=matrix,
state_id=10,
timestamp="2026-02-01T12:00:00"
)
assert tensor.state_id == 10
assert np.allclose(tensor.firstness, matrix[0])
assert np.allclose(tensor.valeurs, matrix[7])
def test_from_matrix_wrong_shape(self):
"""Test erreur si matrice mauvaise forme."""
matrix = np.random.randn(4, EMBEDDING_DIM) # 4 au lieu de 8
with pytest.raises(ValueError):
StateTensor.from_matrix(matrix, state_id=0, timestamp="")
class TestStateTensorDistance:
"""Tests de calcul de distance entre tenseurs."""
def test_distance_to_self_is_zero(self):
"""Distance à soi-même = 0."""
tensor = StateTensor(state_id=0, timestamp=datetime.now().isoformat())
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
flat = tensor.to_flat()
distance = np.linalg.norm(flat - flat)
assert distance == 0.0
def test_normalized_distance(self):
"""Test distance normalisée entre 2 tenseurs."""
t1 = StateTensor(state_id=1, timestamp=datetime.now().isoformat())
t2 = StateTensor(state_id=2, timestamp=datetime.now().isoformat())
for dim_name in DIMENSION_NAMES:
v1 = np.random.randn(EMBEDDING_DIM)
v1 = v1 / np.linalg.norm(v1)
setattr(t1, dim_name, v1)
v2 = np.random.randn(EMBEDDING_DIM)
v2 = v2 / np.linalg.norm(v2)
setattr(t2, dim_name, v2)
diff = t1.to_flat() - t2.to_flat()
distance = np.linalg.norm(diff) / np.linalg.norm(t2.to_flat())
# Distance normalisée doit être > 0 et finie
assert distance > 0
assert np.isfinite(distance)
class TestStateTensorSerialization:
"""Tests de sérialisation."""
def test_from_dict_roundtrip(self):
"""Test aller-retour dict."""
original = StateTensor(
state_id=42,
timestamp="2026-02-01T12:00:00",
previous_state_id=41,
trigger_type="user",
trigger_content="Test message",
embedding_model="BAAI/bge-m3",
)
# Simuler les vecteurs
vectors = {}
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(original, dim_name, v)
vectors[dim_name] = v.tolist()
# Convertir et recréer
props = original.to_dict()
reconstructed = StateTensor.from_dict(props, vectors)
assert reconstructed.state_id == 42
assert reconstructed.trigger_type == "user"
assert np.allclose(reconstructed.firstness, original.firstness)
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,605 @@
#!/usr/bin/env python3
"""
Tests pour le module state_to_language - Phase 5.
Le cycle de traduction :
1. Projeter StateTensor sur directions interpretables
2. Construire prompt de traduction
3. LLM en mode ZERO-REASONING
4. Valider absence de raisonnement
Executer: pytest ikario_processual/tests/test_state_to_language.py -v
"""
import json
import numpy as np
import pytest
import asyncio
from datetime import datetime
from unittest.mock import MagicMock, AsyncMock, patch
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.state_to_language import (
ProjectionDirection,
TranslationResult,
StateToLanguage,
REASONING_MARKERS,
CATEGORY_TO_DIMENSION,
create_directions_from_config,
)
def create_random_tensor(state_id: int = 0) -> StateTensor:
"""Cree un tenseur avec des vecteurs aleatoires normalises."""
tensor = StateTensor(
state_id=state_id,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
def create_random_direction(name: str, category: str) -> ProjectionDirection:
"""Cree une direction aleatoire normalisee."""
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
return ProjectionDirection(
name=name,
category=category,
pole_positive="positive",
pole_negative="negative",
description=f"Direction {name}",
vector=v,
)
class TestProjectionDirection:
"""Tests pour la classe ProjectionDirection."""
def test_create_direction(self):
"""Creer une direction."""
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
direction = ProjectionDirection(
name="curiosity",
category="epistemic",
pole_positive="curieux",
pole_negative="desinteresse",
description="Degre de curiosite",
vector=v,
)
assert direction.name == "curiosity"
assert direction.category == "epistemic"
assert direction.vector.shape == (EMBEDDING_DIM,)
def test_project_on_direction(self):
"""Projection sur une direction."""
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
direction = ProjectionDirection(
name="test",
category="test",
pole_positive="",
pole_negative="",
description="",
vector=v,
)
# Projeter le meme vecteur
projection = direction.project(v)
assert np.isclose(projection, 1.0)
# Projeter un vecteur oppose
projection_neg = direction.project(-v)
assert np.isclose(projection_neg, -1.0)
def test_projection_range(self):
"""Les projections sont entre -1 et 1."""
direction = create_random_direction("test", "test")
for _ in range(10):
random_vec = np.random.randn(EMBEDDING_DIM)
random_vec = random_vec / np.linalg.norm(random_vec)
projection = direction.project(random_vec)
assert -1.0 <= projection <= 1.0
class TestTranslationResult:
"""Tests pour TranslationResult."""
def test_create_result(self):
"""Creer un resultat de traduction."""
result = TranslationResult(
text="Je suis curieux.",
projections={'epistemic': {'curiosity': 0.72}},
output_type="response",
reasoning_detected=False,
json_valid=True,
processing_time_ms=50,
)
assert result.text == "Je suis curieux."
assert result.reasoning_detected is False
def test_to_dict(self):
"""to_dict() fonctionne."""
result = TranslationResult(
text="Test",
projections={'test': {'test': 0.5}},
output_type="response",
reasoning_detected=True,
json_valid=False,
processing_time_ms=100,
)
d = result.to_dict()
assert 'text' in d
assert 'projections' in d
assert d['reasoning_detected'] is True
assert d['json_valid'] is False
class TestStateToLanguage:
"""Tests pour la classe StateToLanguage."""
def test_create_translator(self):
"""Creer un traducteur."""
translator = StateToLanguage()
assert translator.directions == []
assert translator._translations_count == 0
def test_add_direction(self):
"""Ajouter des directions."""
translator = StateToLanguage()
direction1 = create_random_direction("curiosity", "epistemic")
direction2 = create_random_direction("enthusiasm", "affective")
translator.add_direction(direction1)
translator.add_direction(direction2)
assert len(translator.directions) == 2
def test_project_state(self):
"""Projeter un etat sur les directions."""
translator = StateToLanguage()
# Ajouter des directions de categories differentes
translator.add_direction(create_random_direction("curiosity", "epistemic"))
translator.add_direction(create_random_direction("certainty", "epistemic"))
translator.add_direction(create_random_direction("enthusiasm", "affective"))
X_t = create_random_tensor()
projections = translator.project_state(X_t)
# Verifier structure
assert 'epistemic' in projections
assert 'affective' in projections
assert 'curiosity' in projections['epistemic']
assert 'enthusiasm' in projections['affective']
# Verifier valeurs dans [-1, 1]
for category, components in projections.items():
for name, value in components.items():
assert -1.0 <= value <= 1.0
def test_project_state_flat(self):
"""Projection aplatie."""
translator = StateToLanguage()
translator.add_direction(create_random_direction("curiosity", "epistemic"))
translator.add_direction(create_random_direction("enthusiasm", "affective"))
X_t = create_random_tensor()
flat = translator.project_state_flat(X_t)
assert 'curiosity' in flat
assert 'enthusiasm' in flat
assert isinstance(flat['curiosity'], float)
class TestInterpretValue:
"""Tests pour interpret_value."""
def test_very_positive(self):
"""Valeur tres positive."""
assert StateToLanguage.interpret_value(0.8) == "tres"
def test_moderately_positive(self):
"""Valeur moderement positive."""
assert StateToLanguage.interpret_value(0.35) == "moderement"
def test_neutral(self):
"""Valeur neutre."""
assert StateToLanguage.interpret_value(0.0) == "neutre"
assert StateToLanguage.interpret_value(-0.1) == "neutre"
def test_moderately_negative(self):
"""Valeur moderement negative."""
assert StateToLanguage.interpret_value(-0.35) == "peu"
def test_very_negative(self):
"""Valeur tres negative."""
assert StateToLanguage.interpret_value(-0.8) == "pas du tout"
class TestBuildTranslationPrompt:
"""Tests pour build_translation_prompt."""
def test_prompt_structure(self):
"""Le prompt a la bonne structure."""
translator = StateToLanguage()
projections = {
'epistemic': {'curiosity': 0.72, 'certainty': -0.18},
'affective': {'enthusiasm': 0.45},
}
prompt = translator.build_translation_prompt(projections, "response")
assert "ETAT COGNITIF" in prompt
assert "EPISTEMIC:" in prompt
assert "AFFECTIVE:" in prompt
assert "curiosity" in prompt
assert "0.72" in prompt
assert "INSTRUCTION" in prompt
assert "NE REFLECHIS PAS" in prompt
def test_prompt_output_type(self):
"""Le type de sortie est inclus."""
translator = StateToLanguage()
prompt = translator.build_translation_prompt({}, "question")
assert "question" in prompt
class TestZeroReasoningSystemPrompt:
"""Tests pour le system prompt zero-reasoning."""
def test_strict_instructions(self):
"""Le prompt contient des instructions strictes."""
translator = StateToLanguage()
prompt = translator.build_zero_reasoning_system_prompt()
assert "NE DOIS PAS" in prompt.upper()
assert "RAISONNER" in prompt.upper()
assert "CODEC" in prompt.upper()
assert "STRICT" in prompt.upper()
def test_no_thinking_instruction(self):
"""Instruction explicite de ne pas generer de thinking."""
translator = StateToLanguage()
prompt = translator.build_zero_reasoning_system_prompt()
assert "<thinking>" in prompt.lower()
class TestJsonSystemPrompt:
"""Tests pour le system prompt JSON."""
def test_json_schema_included(self):
"""Le schema JSON est inclus dans le prompt."""
translator = StateToLanguage()
schema = {
"type": "object",
"required": ["verbalization"],
"properties": {"verbalization": {"type": "string"}},
}
prompt = translator.build_json_system_prompt(schema)
assert "JSON" in prompt
assert "verbalization" in prompt
assert "UNIQUEMENT" in prompt
class TestCheckReasoningMarkers:
"""Tests pour check_reasoning_markers."""
def test_no_markers(self):
"""Texte sans marqueurs."""
translator = StateToLanguage()
text = "Je suis curieux. Explorons cette idee."
has_reasoning, markers = translator.check_reasoning_markers(text)
assert has_reasoning is False
assert markers == []
def test_with_markers(self):
"""Texte avec marqueurs de raisonnement."""
translator = StateToLanguage()
text = "Je pense que cette approche est interessante. Apres reflexion, je suggere..."
has_reasoning, markers = translator.check_reasoning_markers(text)
assert has_reasoning is True
assert "je pense que" in markers
assert "apres reflexion" in markers
def test_case_insensitive(self):
"""Detection insensible a la casse."""
translator = StateToLanguage()
text = "IL ME SEMBLE que c'est correct."
has_reasoning, markers = translator.check_reasoning_markers(text)
assert has_reasoning is True
assert "il me semble" in markers
class TestTranslateSyncNoApi:
"""Tests pour translate_sync (mode test sans API)."""
def test_translate_sync_returns_result(self):
"""translate_sync retourne un resultat."""
translator = StateToLanguage()
translator.add_direction(create_random_direction("curiosity", "epistemic"))
X_t = create_random_tensor()
result = translator.translate_sync(X_t, output_type="response")
assert isinstance(result, TranslationResult)
assert "[RESPONSE]" in result.text.upper()
assert result.output_type == "response"
def test_translate_sync_increments_count(self):
"""translate_sync incremente le compteur."""
translator = StateToLanguage()
initial_count = translator._translations_count
translator.translate_sync(create_random_tensor())
translator.translate_sync(create_random_tensor())
assert translator._translations_count == initial_count + 2
class TestTranslateAsync:
"""Tests pour translate async avec mock."""
def test_translate_without_client(self):
"""translate sans client retourne mock."""
async def run_test():
translator = StateToLanguage()
translator.add_direction(create_random_direction("curiosity", "epistemic"))
X_t = create_random_tensor()
result = await translator.translate(X_t)
assert "[MOCK TRANSLATION]" in result.text
assert result.reasoning_detected is False
asyncio.run(run_test())
def test_translate_with_mock_client(self):
"""translate avec client mock."""
async def run_test():
# Mock du client Anthropic
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.content = [MagicMock(text="Je suis curieux.")]
mock_client.messages.create = AsyncMock(return_value=mock_response)
translator = StateToLanguage(anthropic_client=mock_client)
translator.add_direction(create_random_direction("curiosity", "epistemic"))
X_t = create_random_tensor()
result = await translator.translate(X_t)
assert result.text == "Je suis curieux."
assert mock_client.messages.create.called
# Verifier les parametres d'appel
call_kwargs = mock_client.messages.create.call_args.kwargs
assert call_kwargs['temperature'] == 0.0
assert call_kwargs['max_tokens'] == 500
asyncio.run(run_test())
def test_translate_detects_reasoning(self):
"""translate detecte le raisonnement."""
async def run_test():
# Mock avec texte contenant du raisonnement
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.content = [MagicMock(text="Je pense que c'est interessant.")]
mock_client.messages.create = AsyncMock(return_value=mock_response)
translator = StateToLanguage(anthropic_client=mock_client)
X_t = create_random_tensor()
result = await translator.translate(X_t, force_zero_reasoning=True)
assert result.reasoning_detected is True
asyncio.run(run_test())
class TestTranslateStructured:
"""Tests pour translate_structured (Amendment #14)."""
def test_translate_structured_without_client(self):
"""translate_structured sans client retourne mock."""
async def run_test():
translator = StateToLanguage()
X_t = create_random_tensor()
result = await translator.translate_structured(X_t)
assert "[MOCK JSON TRANSLATION]" in result.text
asyncio.run(run_test())
def test_translate_structured_valid_json(self):
"""translate_structured avec JSON valide."""
async def run_test():
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.content = [MagicMock(text='{"verbalization": "Je suis curieux."}')]
mock_client.messages.create = AsyncMock(return_value=mock_response)
translator = StateToLanguage(anthropic_client=mock_client)
X_t = create_random_tensor()
result = await translator.translate_structured(X_t)
assert result.text == "Je suis curieux."
assert result.json_valid is True
asyncio.run(run_test())
def test_translate_structured_extra_fields(self):
"""translate_structured detecte les champs supplementaires."""
async def run_test():
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.content = [MagicMock(
text='{"verbalization": "Texte", "extra": "pas autorise"}'
)]
mock_client.messages.create = AsyncMock(return_value=mock_response)
translator = StateToLanguage(anthropic_client=mock_client)
X_t = create_random_tensor()
result = await translator.translate_structured(X_t)
assert result.text == "Texte"
assert result.json_valid is False
asyncio.run(run_test())
def test_translate_structured_invalid_json(self):
"""translate_structured gere le JSON invalide."""
async def run_test():
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.content = [MagicMock(text="Ceci n'est pas du JSON")]
mock_client.messages.create = AsyncMock(return_value=mock_response)
translator = StateToLanguage(anthropic_client=mock_client)
X_t = create_random_tensor()
result = await translator.translate_structured(X_t)
assert result.json_valid is False
assert "Ceci n'est pas du JSON" in result.text
asyncio.run(run_test())
class TestGetStats:
"""Tests pour get_stats."""
def test_initial_stats(self):
"""Stats initiales."""
translator = StateToLanguage()
stats = translator.get_stats()
assert stats['directions_count'] == 0
assert stats['translations_count'] == 0
assert stats['reasoning_warnings'] == 0
def test_stats_with_directions(self):
"""Stats avec directions."""
translator = StateToLanguage()
translator.add_direction(create_random_direction("curiosity", "epistemic"))
translator.add_direction(create_random_direction("enthusiasm", "affective"))
stats = translator.get_stats()
assert stats['directions_count'] == 2
assert 'epistemic' in stats['categories']
assert 'affective' in stats['categories']
class TestCategoryToDimension:
"""Tests pour le mapping category -> dimension."""
def test_epistemic_maps_to_firstness(self):
"""epistemic -> firstness."""
assert CATEGORY_TO_DIMENSION['epistemic'] == 'firstness'
def test_affective_maps_to_dispositions(self):
"""affective -> dispositions."""
assert CATEGORY_TO_DIMENSION['affective'] == 'dispositions'
def test_ethical_maps_to_valeurs(self):
"""ethical -> valeurs."""
assert CATEGORY_TO_DIMENSION['ethical'] == 'valeurs'
def test_all_categories_mapped(self):
"""Toutes les categories principales sont mappees."""
expected_categories = [
'epistemic', 'affective', 'cognitive', 'relational',
'ethical', 'temporal', 'thematic', 'metacognitive',
'vital', 'ecosystemic', 'philosophical'
]
for cat in expected_categories:
assert cat in CATEGORY_TO_DIMENSION
assert CATEGORY_TO_DIMENSION[cat] in DIMENSION_NAMES
class TestReasoningMarkers:
"""Tests pour les marqueurs de raisonnement."""
def test_markers_exist(self):
"""Les marqueurs existent."""
assert len(REASONING_MARKERS) > 0
def test_markers_are_lowercase(self):
"""Les marqueurs sont en minuscules."""
for marker in REASONING_MARKERS:
assert marker == marker.lower()
class TestCreateDirectionsFromConfig:
"""Tests pour create_directions_from_config."""
def test_create_from_config(self):
"""Creer des directions depuis une config."""
# Mock du modele d'embedding avec embeddings distincts
np.random.seed(42) # Pour reproductibilite
pos_embeddings = np.random.randn(5, EMBEDDING_DIM)
neg_embeddings = np.random.randn(5, EMBEDDING_DIM) + 1.0 # Decalage pour etre distincts
mock_model = MagicMock()
# Retourner des embeddings differents pour positifs et negatifs
mock_model.encode = MagicMock(side_effect=[pos_embeddings, neg_embeddings])
config = {
"curiosity": {
"category": "epistemic",
"pole_positive": "curieux",
"pole_negative": "desinteresse",
"description": "Degre de curiosite",
"positive_examples": ["a", "b", "c", "d", "e"],
"negative_examples": ["f", "g", "h", "i", "j"],
}
}
directions = create_directions_from_config(config, mock_model)
assert len(directions) == 1
assert directions[0].name == "curiosity"
assert directions[0].category == "epistemic"
assert directions[0].vector.shape == (EMBEDDING_DIM,)
# Vecteur doit etre normalise
assert np.isclose(np.linalg.norm(directions[0].vector), 1.0)
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -0,0 +1,483 @@
#!/usr/bin/env python3
"""
Tests pour le module de vigilance - Phase 6.
Systeme de vigilance x_ref (David) :
1. x_ref N'EST PAS un attracteur (Ikario ne tend pas vers David)
2. x_ref EST un garde-fou (alerte si distance > seuil)
3. Alertes : ok, warning, critical
Executer: pytest ikario_processual/tests/test_vigilance.py -v
"""
import json
import numpy as np
import pytest
import tempfile
from datetime import datetime
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
from ikario_processual.state_tensor import StateTensor, DIMENSION_NAMES, EMBEDDING_DIM
from ikario_processual.vigilance import (
VigilanceAlert,
VigilanceConfig,
VigilanceSystem,
DavidReference,
VigilanceVisualizer,
create_vigilance_system,
)
def create_random_tensor(state_id: int = 0, seed: int = None) -> StateTensor:
"""Cree un tenseur avec des vecteurs aleatoires normalises."""
if seed is not None:
np.random.seed(seed)
tensor = StateTensor(
state_id=state_id,
timestamp=datetime.now().isoformat(),
)
for dim_name in DIMENSION_NAMES:
v = np.random.randn(EMBEDDING_DIM)
v = v / np.linalg.norm(v)
setattr(tensor, dim_name, v)
return tensor
def create_similar_tensor(reference: StateTensor, noise: float = 0.01) -> StateTensor:
"""Cree un tenseur similaire a la reference avec un peu de bruit."""
tensor = reference.copy()
tensor.state_id = reference.state_id + 1
for dim_name in DIMENSION_NAMES:
vec = getattr(tensor, dim_name).copy()
# Ajouter du bruit
vec += np.random.randn(EMBEDDING_DIM) * noise
# Re-normaliser
vec = vec / np.linalg.norm(vec)
setattr(tensor, dim_name, vec)
return tensor
def create_different_tensor(reference: StateTensor, offset: float = 0.5) -> StateTensor:
"""Cree un tenseur different de la reference."""
tensor = reference.copy()
tensor.state_id = reference.state_id + 1
for dim_name in DIMENSION_NAMES:
# Vecteur orthogonal approximatif
vec = np.random.randn(EMBEDDING_DIM)
vec = vec / np.linalg.norm(vec)
setattr(tensor, dim_name, vec)
return tensor
class TestVigilanceAlert:
"""Tests pour VigilanceAlert."""
def test_create_alert(self):
"""Creer une alerte."""
alert = VigilanceAlert(
level="warning",
message="Derive detectee",
cumulative_drift=0.015,
state_id=5,
)
assert alert.level == "warning"
assert alert.cumulative_drift == 0.015
assert alert.is_alert is True
def test_ok_not_alert(self):
"""'ok' n'est pas une alerte."""
alert = VigilanceAlert(level="ok")
assert alert.is_alert is False
def test_warning_is_alert(self):
"""'warning' est une alerte."""
alert = VigilanceAlert(level="warning")
assert alert.is_alert is True
def test_critical_is_alert(self):
"""'critical' est une alerte."""
alert = VigilanceAlert(level="critical")
assert alert.is_alert is True
def test_to_dict(self):
"""to_dict() fonctionne."""
alert = VigilanceAlert(
level="critical",
message="Test",
dimensions={'firstness': 0.1},
cumulative_drift=0.025,
)
d = alert.to_dict()
assert 'level' in d
assert 'message' in d
assert 'dimensions' in d
assert d['cumulative_drift'] == 0.025
class TestVigilanceConfig:
"""Tests pour VigilanceConfig."""
def test_default_config(self):
"""Configuration par defaut."""
config = VigilanceConfig()
assert config.threshold_cumulative == 0.01 # 1%
assert config.threshold_per_cycle == 0.002 # 0.2%
assert config.threshold_per_dimension == 0.05 # 5%
assert config.critical_multiplier == 2.0
def test_validate_default(self):
"""La config par defaut est valide."""
config = VigilanceConfig()
assert config.validate() is True
def test_validate_invalid(self):
"""Config invalide."""
config = VigilanceConfig(threshold_cumulative=2.0) # > 1
assert config.validate() is False
class TestVigilanceSystem:
"""Tests pour VigilanceSystem."""
def test_create_system(self):
"""Creer un systeme de vigilance."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
assert system.x_ref is x_ref
assert system.cumulative_drift == 0.0
assert len(system.history) == 0
def test_no_drift_when_identical(self):
"""Pas de derive si X_t == x_ref."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
# Premier check avec x_ref lui-meme
alert = system.check_drift(x_ref)
assert alert.level == "ok"
assert alert.cumulative_drift == 0.0
def test_warning_when_drifting(self):
"""Alerte warning quand derive > seuil."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(
x_ref=x_ref,
config=VigilanceConfig(threshold_cumulative=0.001) # Seuil bas
)
# Premier check etablit X_prev
system.check_drift(x_ref)
# Creer un etat different
X_t = create_different_tensor(x_ref)
alert = system.check_drift(X_t)
# Devrait etre au moins warning ou critical
assert alert.level in ("warning", "critical")
def test_critical_when_high_drift(self):
"""Alerte critical quand derive >> seuil."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(
x_ref=x_ref,
config=VigilanceConfig(
threshold_cumulative=0.0001, # Seuil tres bas
critical_multiplier=1.5
)
)
# Premier check
system.check_drift(x_ref)
# Plusieurs checks avec etats differents pour accumuler drift
for i in range(3):
X_t = create_different_tensor(x_ref)
X_t.state_id = i + 1
alert = system.check_drift(X_t)
assert alert.level == "critical"
def test_cumulative_drift_increases(self):
"""La derive cumulative augmente."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
# Premier check
system.check_drift(x_ref)
# Plusieurs checks avec de petites differences
for i in range(5):
X_t = create_similar_tensor(x_ref, noise=0.1)
X_t.state_id = i + 1
system.check_drift(X_t)
assert system.cumulative_drift > 0
def test_reset_cumulative(self):
"""Reset de la derive cumulative."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
# Accumuler de la derive
system.check_drift(x_ref)
X_t = create_different_tensor(x_ref)
system.check_drift(X_t)
assert system.cumulative_drift > 0
# Reset
system.reset_cumulative()
assert system.cumulative_drift == 0.0
def test_history_recorded(self):
"""L'historique des alertes est enregistre."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
for i in range(3):
X_t = create_similar_tensor(x_ref, noise=0.05)
X_t.state_id = i
system.check_drift(X_t)
assert len(system.history) == 3
class TestDistanceCalculations:
"""Tests pour les calculs de distance."""
def test_distance_per_dimension(self):
"""Distance par dimension."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
# Distance avec soi-meme = 0
distances = system._distance_per_dimension(x_ref)
for dim_name, dist in distances.items():
assert np.isclose(dist, 0.0, atol=1e-6)
def test_distance_opposite_vectors(self):
"""Distance avec vecteurs opposes."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
# Creer un tenseur avec vecteurs opposes
X_opposite = x_ref.copy()
for dim_name in DIMENSION_NAMES:
setattr(X_opposite, dim_name, -getattr(x_ref, dim_name))
distances = system._distance_per_dimension(X_opposite)
# Distance cosine avec vecteur oppose = 2 (1 - (-1))
for dim_name, dist in distances.items():
assert np.isclose(dist, 2.0, atol=1e-6)
def test_global_distance_self(self):
"""Distance globale avec soi-meme = 0."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
dist = system._global_distance(x_ref)
assert np.isclose(dist, 0.0, atol=1e-6)
def test_global_distance_different(self):
"""Distance globale avec tenseur different > 0."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
X_different = create_random_tensor(state_id=1, seed=123)
dist = system._global_distance(X_different)
assert dist > 0
class TestTopDriftingDimensions:
"""Tests pour l'identification des dimensions en derive."""
def test_identifies_drifting_dims(self):
"""Identifie les dimensions qui derivent."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
# Creer un tenseur ou certaines dimensions sont tres differentes
X_t = x_ref.copy()
# Inverser seulement 'firstness' et 'valeurs'
X_t.firstness = -x_ref.firstness
X_t.valeurs = -x_ref.valeurs
alert = system.check_drift(X_t)
# Les dimensions inversees devraient etre dans le top
assert 'firstness' in alert.top_drifting_dimensions
assert 'valeurs' in alert.top_drifting_dimensions
class TestDavidReference:
"""Tests pour DavidReference."""
def test_create_from_declared_profile_no_model(self):
"""Creer x_ref depuis profil sans modele d'embedding."""
# Creer un fichier profil temporaire
profile = {
"profile": {
"epistemic": {"curiosity": 8, "certainty": 3},
"affective": {"enthusiasm": 5},
}
}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
json.dump(profile, f)
profile_path = f.name
x_ref = DavidReference.create_from_declared_profile(profile_path)
assert x_ref.state_id == -1
assert x_ref.firstness.shape == (EMBEDDING_DIM,)
# Vecteurs normalises
assert np.isclose(np.linalg.norm(x_ref.firstness), 1.0)
def test_create_hybrid_fallback(self):
"""create_hybrid sans weaviate retourne profil declare."""
profile = {"profile": {"epistemic": {"curiosity": 5}}}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
json.dump(profile, f)
profile_path = f.name
# Sans weaviate, utilise create_hybrid avec mock
x_declared = DavidReference.create_from_declared_profile(profile_path)
assert x_declared is not None
assert x_declared.state_id == -1
class TestVigilanceVisualizer:
"""Tests pour VigilanceVisualizer."""
def test_format_distance_report(self):
"""format_distance_report genere un rapport."""
x_ref = create_random_tensor(state_id=-1, seed=42)
X_t = create_similar_tensor(x_ref, noise=0.1)
report = VigilanceVisualizer.format_distance_report(X_t, x_ref, 0.005)
assert "RAPPORT VIGILANCE" in report
assert "Derive cumulative" in report
for dim_name in DIMENSION_NAMES:
assert dim_name in report
def test_format_report_includes_bars(self):
"""Le rapport inclut des barres de progression."""
x_ref = create_random_tensor(state_id=-1, seed=42)
X_t = create_different_tensor(x_ref)
report = VigilanceVisualizer.format_distance_report(X_t, x_ref)
# Devrait avoir des barres (caracteres # et -)
assert "#" in report or "-" in report
class TestCreateVigilanceSystem:
"""Tests pour la factory create_vigilance_system."""
def test_create_without_args(self):
"""Creer un systeme sans arguments (mode test)."""
system = create_vigilance_system()
assert system is not None
assert system.x_ref is not None
assert system.x_ref.state_id == -1
def test_create_with_profile(self):
"""Creer un systeme avec profil."""
profile = {"profile": {"epistemic": {"curiosity": 7}}}
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
json.dump(profile, f)
profile_path = f.name
system = create_vigilance_system(profile_path=profile_path)
assert system is not None
assert system.x_ref.state_id == -1
def test_create_with_custom_config(self):
"""Creer un systeme avec config personnalisee."""
config = VigilanceConfig(
threshold_cumulative=0.02,
threshold_per_cycle=0.005
)
system = create_vigilance_system(config=config)
assert system.config.threshold_cumulative == 0.02
assert system.config.threshold_per_cycle == 0.005
class TestGetStats:
"""Tests pour get_stats."""
def test_initial_stats(self):
"""Stats initiales."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
stats = system.get_stats()
assert stats['cumulative_drift'] == 0.0
assert stats['total_checks'] == 0
assert stats['alerts_count'] == {'ok': 0, 'warning': 0, 'critical': 0}
def test_stats_after_checks(self):
"""Stats apres plusieurs checks."""
x_ref = create_random_tensor(state_id=-1, seed=42)
system = VigilanceSystem(x_ref=x_ref)
for i in range(5):
X_t = create_similar_tensor(x_ref, noise=0.05)
X_t.state_id = i
system.check_drift(X_t)
stats = system.get_stats()
assert stats['total_checks'] == 5
assert len(stats['recent_alerts']) <= 10
class TestIntegrationWithRealProfile:
"""Tests d'integration avec le vrai profil David."""
def test_load_real_profile(self):
"""Charger le vrai profil david_profile_declared.json."""
profile_path = Path(__file__).parent.parent / "david_profile_declared.json"
if not profile_path.exists():
pytest.skip("david_profile_declared.json not found")
x_ref = DavidReference.create_from_declared_profile(str(profile_path))
assert x_ref is not None
assert x_ref.state_id == -1
# Verifier que toutes les dimensions sont initialisees
for dim_name in DIMENSION_NAMES:
vec = getattr(x_ref, dim_name)
assert vec.shape == (EMBEDDING_DIM,)
assert np.isclose(np.linalg.norm(vec), 1.0)
if __name__ == "__main__":
pytest.main([__file__, "-v"])