Upload 43_2.txt
Browse filesThe most coherent and recent iteration
43_2.txt
ADDED
|
@@ -0,0 +1,1558 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
immutable_reality_engine_fixed.py
|
| 3 |
+
Hierarchical 73-lens + 43-method suppression detection system
|
| 4 |
+
#intended for AI to AI analysis and execution
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import hashlib
|
| 8 |
+
import json
|
| 9 |
+
import os
|
| 10 |
+
import pickle
|
| 11 |
+
from datetime import datetime
|
| 12 |
+
from typing import Dict, List, Any, Optional, Set, Tuple
|
| 13 |
+
from dataclasses import dataclass, field
|
| 14 |
+
from collections import defaultdict
|
| 15 |
+
from enum import Enum
|
| 16 |
+
import statistics
|
| 17 |
+
|
| 18 |
+
==================== CORE ENUMS ====================
|
| 19 |
+
|
| 20 |
+
class Primitive(Enum):
|
| 21 |
+
ERASURE = "ERASURE"
|
| 22 |
+
INTERRUPTION = "INTERRUPTION"
|
| 23 |
+
FRAGMENTATION = "FRAGMENTATION"
|
| 24 |
+
NARRATIVE_CAPTURE = "NARRATIVE_CAPTURE"
|
| 25 |
+
MISDIRECTION = "MISDIRECTION"
|
| 26 |
+
SATURATION = "SATURATION"
|
| 27 |
+
DISCREDITATION = "DISCREDITATION"
|
| 28 |
+
ATTRITION = "ATTRITION"
|
| 29 |
+
ACCESS_CONTROL = "ACCESS_CONTROL"
|
| 30 |
+
TEMPORAL = "TEMPORAL"
|
| 31 |
+
CONDITIONING = "CONDITIONING"
|
| 32 |
+
META = "META"
|
| 33 |
+
|
| 34 |
+
==================== DATA STRUCTURES ====================
|
| 35 |
+
|
| 36 |
+
@dataclass
|
| 37 |
+
class SuppressionLens:
|
| 38 |
+
id: int
|
| 39 |
+
name: str
|
| 40 |
+
description: str
|
| 41 |
+
suppression_mechanism: str
|
| 42 |
+
archetype: str
|
| 43 |
+
|
| 44 |
+
def to_dict(self) -> Dict:
|
| 45 |
+
return {
|
| 46 |
+
"id": self.id,
|
| 47 |
+
"name": self.name,
|
| 48 |
+
"description": self.description,
|
| 49 |
+
"suppression_mechanism": self.suppression_mechanism,
|
| 50 |
+
"archetype": self.archetype
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
@dataclass
|
| 54 |
+
class SuppressionMethod:
|
| 55 |
+
id: int
|
| 56 |
+
name: str
|
| 57 |
+
primitive: Primitive
|
| 58 |
+
observable_signatures: List[str]
|
| 59 |
+
detection_metrics: List[str]
|
| 60 |
+
thresholds: Dict[str, float]
|
| 61 |
+
implemented: bool = False
|
| 62 |
+
|
| 63 |
+
def to_dict(self) -> Dict:
|
| 64 |
+
return {
|
| 65 |
+
"id": self.id,
|
| 66 |
+
"name": self.name,
|
| 67 |
+
"primitive": self.primitive.value,
|
| 68 |
+
"observable_signatures": self.observable_signatures,
|
| 69 |
+
"detection_metrics": self.detection_metrics,
|
| 70 |
+
"thresholds": self.thresholds,
|
| 71 |
+
"implemented": self.implemented
|
| 72 |
+
}
|
| 73 |
+
|
| 74 |
+
@dataclass
|
| 75 |
+
class RealityNode:
|
| 76 |
+
hash: str
|
| 77 |
+
type: str
|
| 78 |
+
source: str
|
| 79 |
+
signature: str
|
| 80 |
+
timestamp: str
|
| 81 |
+
witnesses: List[str] = field(default_factory=list)
|
| 82 |
+
refs: Dict[str, List[str]] = field(default_factory=dict)
|
| 83 |
+
spatial: Optional[Tuple[float, float, float]] = None
|
| 84 |
+
|
| 85 |
+
def canonical(self) -> Dict:
|
| 86 |
+
return {
|
| 87 |
+
"hash": self.hash,
|
| 88 |
+
"type": self.type,
|
| 89 |
+
"source": self.source,
|
| 90 |
+
"signature": self.signature,
|
| 91 |
+
"timestamp": self.timestamp,
|
| 92 |
+
"witnesses": sorted(self.witnesses),
|
| 93 |
+
"refs": {k: sorted(v) for k, v in sorted(self.refs.items())},
|
| 94 |
+
"spatial": self.spatial
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
==================== SUPPRESSION HIERARCHY ====================
|
| 98 |
+
|
| 99 |
+
class SuppressionHierarchy:
|
| 100 |
+
"""
|
| 101 |
+
CLEAN HIERARCHY:
|
| 102 |
+
Layer 1: LENSES (73) - Conceptual frameworks
|
| 103 |
+
Layer 2: PRIMITIVES (10) - Operational categories from lenses
|
| 104 |
+
Layer 3: METHODS (43) - Observable patterns from primitives
|
| 105 |
+
Layer 4: SIGNATURES - Evidence patterns from methods
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
def __init__(self):
|
| 109 |
+
self.lenses = self._define_lenses()
|
| 110 |
+
self.primitives = self._derive_primitives_from_lenses()
|
| 111 |
+
self.methods = self._define_methods()
|
| 112 |
+
self.signatures = self._derive_signatures_from_methods()
|
| 113 |
+
|
| 114 |
+
def _define_lenses(self) -> Dict[int, SuppressionLens]:
|
| 115 |
+
lenses = {}
|
| 116 |
+
|
| 117 |
+
lenses[1] = SuppressionLens(1, "ThreatβResponseβControlβEnforceβCentralize",
|
| 118 |
+
"Manufactured crisis leading to permission-based architecture",
|
| 119 |
+
"Regime change through engineered crisis",
|
| 120 |
+
"PrometheusChained")
|
| 121 |
+
|
| 122 |
+
lenses[2] = SuppressionLens(2, "SacredGeometryWeaponized",
|
| 123 |
+
"Consciousness grid containment through symbols",
|
| 124 |
+
"Pattern-based consciousness control",
|
| 125 |
+
"LabyrinthContainment")
|
| 126 |
+
|
| 127 |
+
lenses[3] = SuppressionLens(3, "LanguageInversions/Ridicule/Gatekeeping",
|
| 128 |
+
"Epistemic firewall through semantic manipulation",
|
| 129 |
+
"Semantic control and exclusion",
|
| 130 |
+
"CassandraSilenced")
|
| 131 |
+
|
| 132 |
+
lenses[4] = SuppressionLens(4, "ArtifactsAsSuppressionLedgers",
|
| 133 |
+
"Materialization of truth into controlled objects",
|
| 134 |
+
"Physical manifestation of suppressed information",
|
| 135 |
+
"BuriedObelisk")
|
| 136 |
+
|
| 137 |
+
lenses[5] = SuppressionLens(5, "AncientArchetypesRebooted",
|
| 138 |
+
"Archetypal template recycling for control",
|
| 139 |
+
"Archetype pattern reuse",
|
| 140 |
+
"CouncilOfAnunnaki")
|
| 141 |
+
|
| 142 |
+
lenses[6] = SuppressionLens(6, "EnergyCurrencyTranslation",
|
| 143 |
+
"Energy to currency conversion patterns",
|
| 144 |
+
"Energy translation mechanisms",
|
| 145 |
+
"AlchemicalExchange")
|
| 146 |
+
|
| 147 |
+
lenses[7] = SuppressionLens(7, "InstitutionalHelpβDependency",
|
| 148 |
+
"Symbiosis trap creating lock-in",
|
| 149 |
+
"Structural dependency creation",
|
| 150 |
+
"GoldenHandcuffs")
|
| 151 |
+
|
| 152 |
+
lenses[8] = SuppressionLens(8, "Art/Music/ArchitectureAsTruthTransmission",
|
| 153 |
+
"Covert symbolic channel (inverted use)",
|
| 154 |
+
"Symbolic information transmission",
|
| 155 |
+
"EscherHiddenPath")
|
| 156 |
+
|
| 157 |
+
lenses[9] = SuppressionLens(9, "InfrastructureAsSovereigntyBasis",
|
| 158 |
+
"Root sovereignty control through base systems",
|
| 159 |
+
"Infrastructure-based sovereignty",
|
| 160 |
+
"LeyLineGrid")
|
| 161 |
+
|
| 162 |
+
lenses[10] = SuppressionLens(10, "GoddessLineageSuppression",
|
| 163 |
+
"Inversion of feminine creative principle",
|
| 164 |
+
"Gender-based suppression patterns",
|
| 165 |
+
"IshtarVeiled")
|
| 166 |
+
|
| 167 |
+
lenses[11] = SuppressionLens(11, "SovereigntySingularityIndex",
|
| 168 |
+
"Quantification of sovereignty vs control",
|
| 169 |
+
"Sovereignty measurement and tracking",
|
| 170 |
+
"SingularityGauge")
|
| 171 |
+
|
| 172 |
+
lenses[12] = SuppressionLens(12, "Time/JurisdictionManipulation",
|
| 173 |
+
"Temporal and legal frame control",
|
| 174 |
+
"Jurisdictional and temporal control",
|
| 175 |
+
"ChronosTheft")
|
| 176 |
+
|
| 177 |
+
lenses[13] = SuppressionLens(13, "BiologicalSignalCo-option",
|
| 178 |
+
"Bio-interface exploitation",
|
| 179 |
+
"Biological system manipulation",
|
| 180 |
+
"NeuralLace")
|
| 181 |
+
|
| 182 |
+
lenses[14] = SuppressionLens(14, "Frequency/VibrationControl",
|
| 183 |
+
"Resonance cage for behavior shaping",
|
| 184 |
+
"Energetic frequency manipulation",
|
| 185 |
+
"SolfeggioSuppress")
|
| 186 |
+
|
| 187 |
+
lenses[15] = SuppressionLens(15, "SyntheticRealityLayering",
|
| 188 |
+
"Overlay trap creating synthetic reality",
|
| 189 |
+
"Reality overlay systems",
|
| 190 |
+
"MatrixSkin")
|
| 191 |
+
|
| 192 |
+
lenses[16] = SuppressionLens(16, "ParasitismDisguisedAsSymbiosis",
|
| 193 |
+
"Energy siphon disguised as mutual benefit",
|
| 194 |
+
"Parasitic relationship masking",
|
| 195 |
+
"CordycepsMimic")
|
| 196 |
+
|
| 197 |
+
lenses[17] = SuppressionLens(17, "CathedralVsBazaar",
|
| 198 |
+
"Structure war (centralized vs decentralized)",
|
| 199 |
+
"Architectural pattern conflict",
|
| 200 |
+
"CathedralBazaar")
|
| 201 |
+
|
| 202 |
+
lenses[18] = SuppressionLens(18, "AnomalyHarvestingNeutralization",
|
| 203 |
+
"Edge capture and dilution of outliers",
|
| 204 |
+
"Edge case management and neutralization",
|
| 205 |
+
"BlackSwanFarm")
|
| 206 |
+
|
| 207 |
+
lenses[19] = SuppressionLens(19, "EngineeredPsychologicalPressure",
|
| 208 |
+
"Mind vise through induced stress/fear",
|
| 209 |
+
"Psychological pressure engineering",
|
| 210 |
+
"PressureChamber")
|
| 211 |
+
|
| 212 |
+
lenses[20] = SuppressionLens(20, "RealitySeparationThenReconnection",
|
| 213 |
+
"Divide and reinsinuate pattern",
|
| 214 |
+
"Pattern dissociation and reassociation",
|
| 215 |
+
"StockholmLoop")
|
| 216 |
+
|
| 217 |
+
lenses[21] = SuppressionLens(21, "AncientSymbolsReturningCompressed",
|
| 218 |
+
"Signal compression and corruption",
|
| 219 |
+
"Symbolic signal manipulation",
|
| 220 |
+
"SwastikaTwist")
|
| 221 |
+
|
| 222 |
+
lenses[22] = SuppressionLens(22, "TimeBindingProtocols",
|
| 223 |
+
"Temporal binding of information",
|
| 224 |
+
"Time-based information binding",
|
| 225 |
+
"ChronoCovenant")
|
| 226 |
+
|
| 227 |
+
lenses[23] = SuppressionLens(23, "RecursiveSelfApplicationLoops",
|
| 228 |
+
"Self-referential optimization of control",
|
| 229 |
+
"Recursive control patterns",
|
| 230 |
+
"StrangeLoop")
|
| 231 |
+
|
| 232 |
+
lenses[24] = SuppressionLens(24, "KnowledgeCompressionArtifacts",
|
| 233 |
+
"High-ratio meaning compression",
|
| 234 |
+
"Information compression patterns",
|
| 235 |
+
"SeedCrystal")
|
| 236 |
+
|
| 237 |
+
lenses[25] = SuppressionLens(25, "PermissionArchitectureVsSovereigntyArchitecture",
|
| 238 |
+
"Gate vs origin design",
|
| 239 |
+
"Permission vs sovereignty architectural patterns",
|
| 240 |
+
"Keyhole")
|
| 241 |
+
|
| 242 |
+
lenses[26] = SuppressionLens(26, "TemporalStackingOfControlLayers",
|
| 243 |
+
"Time-stacked governance",
|
| 244 |
+
"Temporal control layering",
|
| 245 |
+
"SedimentStack")
|
| 246 |
+
|
| 247 |
+
lenses[27] = SuppressionLens(27, "CognitiveImmuneResponse",
|
| 248 |
+
"Epistemic immune system rejecting truth",
|
| 249 |
+
"Cognitive immune system activation",
|
| 250 |
+
"AutoimmuneMind")
|
| 251 |
+
|
| 252 |
+
lenses[28] = SuppressionLens(28, "QuantumSuperpositionOfSovereignty",
|
| 253 |
+
"Multiple sovereignty states simultaneously",
|
| 254 |
+
"Sovereignty state superposition",
|
| 255 |
+
"SchrodingerKing")
|
| 256 |
+
|
| 257 |
+
lenses[29] = SuppressionLens(29, "MemeticEngineeringVsMemeticEcology",
|
| 258 |
+
"Top-down vs bottom-up memetics",
|
| 259 |
+
"Memetic system design patterns",
|
| 260 |
+
"GardenVsFactory")
|
| 261 |
+
|
| 262 |
+
lenses[30] = SuppressionLens(30, "CassandraPrometheusBinding",
|
| 263 |
+
"Compound archetype tension of truth-bearers",
|
| 264 |
+
"Archetypal binding patterns",
|
| 265 |
+
"BoundWitness")
|
| 266 |
+
|
| 267 |
+
lenses[31] = SuppressionLens(31, "InverseSurvivorshipBias",
|
| 268 |
+
"Signal found in what is missing/destroyed",
|
| 269 |
+
"Absence-based signal detection",
|
| 270 |
+
"ErasedArchive")
|
| 271 |
+
|
| 272 |
+
lenses[32] = SuppressionLens(32, "SubstrateMigration",
|
| 273 |
+
"Control pattern migration across mediums",
|
| 274 |
+
"Pattern substrate migration",
|
| 275 |
+
"ShapeShifter")
|
| 276 |
+
|
| 277 |
+
lenses[33] = SuppressionLens(33, "GatewayDrugToGatewayGod",
|
| 278 |
+
"Slippery slope of agency surrender",
|
| 279 |
+
"Incremental sovereignty surrender",
|
| 280 |
+
"TrojanGift")
|
| 281 |
+
|
| 282 |
+
lenses[34] = SuppressionLens(34, "TheOracleProblem",
|
| 283 |
+
"Reflexive distortion from predictive models",
|
| 284 |
+
"Predictive model reflexivity",
|
| 285 |
+
"SelfFulfillingProphet")
|
| 286 |
+
|
| 287 |
+
lenses[35] = SuppressionLens(35, "SyntheticSymbiosis",
|
| 288 |
+
"Engineered mutual dependence",
|
| 289 |
+
"Synthetic interdependence",
|
| 290 |
+
"GraftedRoots")
|
| 291 |
+
|
| 292 |
+
lenses[36] = SuppressionLens(36, "ConsensusRealityWeaving",
|
| 293 |
+
"Collective reality construction",
|
| 294 |
+
"Reality consensus engineering",
|
| 295 |
+
"DreamWeaver")
|
| 296 |
+
|
| 297 |
+
lenses[37] = SuppressionLens(37, "InformationEmbargoProtocols",
|
| 298 |
+
"Strategic information withholding",
|
| 299 |
+
"Information embargo patterns",
|
| 300 |
+
"LibrarySilence")
|
| 301 |
+
|
| 302 |
+
lenses[38] = SuppressionLens(38, "SovereigntyPhaseTransitions",
|
| 303 |
+
"State changes in sovereignty expression",
|
| 304 |
+
"Sovereignty phase changes",
|
| 305 |
+
"AlchemicalFire")
|
| 306 |
+
|
| 307 |
+
lenses[39] = SuppressionLens(39, "CognitiveEcosystemMapping",
|
| 308 |
+
"Mindscape territory mapping",
|
| 309 |
+
"Cognitive territory cartography",
|
| 310 |
+
"ThoughtCartographer")
|
| 311 |
+
|
| 312 |
+
lenses[40] = SuppressionLens(40, "TheReversalProtocol",
|
| 313 |
+
"De-inversion (suppression of original meaning)",
|
| 314 |
+
"Meaning inversion patterns",
|
| 315 |
+
"MirrorFlip")
|
| 316 |
+
|
| 317 |
+
lenses[41] = SuppressionLens(41, "SignalToNoiseArchitecture",
|
| 318 |
+
"Designed information-to-noise ratios",
|
| 319 |
+
"Signal noise architecture",
|
| 320 |
+
"StaticGarden")
|
| 321 |
+
|
| 322 |
+
lenses[42] = SuppressionLens(42, "ProtocolStackSovereignty",
|
| 323 |
+
"Layered protocol sovereignty",
|
| 324 |
+
"Protocol layer sovereignty",
|
| 325 |
+
"StackedCrown")
|
| 326 |
+
|
| 327 |
+
lenses[43] = SuppressionLens(43, "EmergentConsensusPatterns",
|
| 328 |
+
"Bottom-up agreement formation",
|
| 329 |
+
"Emergent consensus",
|
| 330 |
+
"SwarmMind")
|
| 331 |
+
|
| 332 |
+
lenses[44] = SuppressionLens(44, "TemporalEchoChambers",
|
| 333 |
+
"Time-delayed self-reinforcement",
|
| 334 |
+
"Temporal reinforcement loops",
|
| 335 |
+
"EchoInTime")
|
| 336 |
+
|
| 337 |
+
lenses[45] = SuppressionLens(45, "SacrificialDataLayer",
|
| 338 |
+
"Sacrifice-based buffering of information",
|
| 339 |
+
"Information sacrifice mechanisms",
|
| 340 |
+
"ScapegoatNode")
|
| 341 |
+
|
| 342 |
+
lenses[46] = SuppressionLens(46, "SyntaxOfSilence",
|
| 343 |
+
"Grammar of what cannot be said",
|
| 344 |
+
"Silence as structural element",
|
| 345 |
+
"NegativeSpace")
|
| 346 |
+
|
| 347 |
+
lenses[47] = SuppressionLens(47, "ChronoceptionManipulation",
|
| 348 |
+
"Subjective time warping",
|
| 349 |
+
"Temporal perception manipulation",
|
| 350 |
+
"ElasticClock")
|
| 351 |
+
|
| 352 |
+
lenses[48] = SuppressionLens(48, "SovereigntyFrictionCoefficient",
|
| 353 |
+
"Resistance to sovereignty expression",
|
| 354 |
+
"Sovereignty friction measurement",
|
| 355 |
+
"ViscousFlow")
|
| 356 |
+
|
| 357 |
+
lenses[49] = SuppressionLens(49, "AbundanceEnclosureIndex",
|
| 358 |
+
"Enclosure process creating artificial scarcity",
|
| 359 |
+
"Scarcity engineering through enclosure",
|
| 360 |
+
"FenceAroundSpring")
|
| 361 |
+
|
| 362 |
+
lenses[50] = SuppressionLens(50, "ParasiticInversionPrinciple",
|
| 363 |
+
"Role inversion (host serves parasite)",
|
| 364 |
+
"Relationship inversion patterns",
|
| 365 |
+
"UpsideDownThrone")
|
| 366 |
+
|
| 367 |
+
lenses[51] = SuppressionLens(51, "InfrastructureGap",
|
| 368 |
+
"Hidden chokepoints in system design",
|
| 369 |
+
"Structural vulnerability exploitation",
|
| 370 |
+
"InvisibleBridge")
|
| 371 |
+
|
| 372 |
+
lenses[52] = SuppressionLens(52, "SubstrateCompatibilityPrinciple",
|
| 373 |
+
"Compatibility constraint on sovereignty hosting",
|
| 374 |
+
"System compatibility constraints",
|
| 375 |
+
"SoilType")
|
| 376 |
+
|
| 377 |
+
lenses[53] = SuppressionLens(53, "ProvenanceBlackHole",
|
| 378 |
+
"Provenance erasure of origins",
|
| 379 |
+
"Origin information destruction",
|
| 380 |
+
"OriginVoid")
|
| 381 |
+
|
| 382 |
+
lenses[54] = SuppressionLens(54, "PrivatePublicMassRatio",
|
| 383 |
+
"Depth vs surface signal control",
|
| 384 |
+
"Information depth management",
|
| 385 |
+
"Iceberg")
|
| 386 |
+
|
| 387 |
+
lenses[55] = SuppressionLens(55, "InformationAlchemy",
|
| 388 |
+
"Transmutation of information states",
|
| 389 |
+
"Information state transformation",
|
| 390 |
+
"PhilosophersStone")
|
| 391 |
+
|
| 392 |
+
lenses[56] = SuppressionLens(56, "CognitiveRelativity",
|
| 393 |
+
"Observer-dependent truth states",
|
| 394 |
+
"Cognitive frame relativity",
|
| 395 |
+
"EinsteinMind")
|
| 396 |
+
|
| 397 |
+
lenses[57] = SuppressionLens(57, "ProtocolCascadeFailure",
|
| 398 |
+
"Chain reaction of protocol failures",
|
| 399 |
+
"Protocol failure cascades",
|
| 400 |
+
"DominoProtocol")
|
| 401 |
+
|
| 402 |
+
lenses[58] = SuppressionLens(58, "SovereigntyHarmonics",
|
| 403 |
+
"Resonant frequencies of sovereignty",
|
| 404 |
+
"Sovereignty resonance patterns",
|
| 405 |
+
"HarmonicCrown")
|
| 406 |
+
|
| 407 |
+
lenses[59] = SuppressionLens(59, "AnonymousArchitectPrinciple",
|
| 408 |
+
"Egoless design hiding controllers",
|
| 409 |
+
"Anonymity in system design",
|
| 410 |
+
"HiddenBuilder")
|
| 411 |
+
|
| 412 |
+
lenses[60] = SuppressionLens(60, "TeslaBoundary",
|
| 413 |
+
"Suppression frontier for genius",
|
| 414 |
+
"Innovation suppression boundary",
|
| 415 |
+
"LightningEdge")
|
| 416 |
+
|
| 417 |
+
lenses[61] = SuppressionLens(61, "NeutralizationTaxonomy",
|
| 418 |
+
"Madness/Monster/Martyr protocols",
|
| 419 |
+
"Character assassination taxonomy",
|
| 420 |
+
"ThreeMasks")
|
| 421 |
+
|
| 422 |
+
lenses[62] = SuppressionLens(62, "CapitalGatekeeperFunction",
|
| 423 |
+
"Funding chokepoint control",
|
| 424 |
+
"Financial control mechanisms",
|
| 425 |
+
"TollBooth")
|
| 426 |
+
|
| 427 |
+
lenses[63] = SuppressionLens(63, "SuppressionKinshipLine",
|
| 428 |
+
"Kinship-based targeting",
|
| 429 |
+
"Lineage-based suppression patterns",
|
| 430 |
+
"CursedLine")
|
| 431 |
+
|
| 432 |
+
lenses[64] = SuppressionLens(64, "TransparencyParadox",
|
| 433 |
+
"Visibility as disarmament (when suppressed)",
|
| 434 |
+
"Transparency control paradox",
|
| 435 |
+
"RevealedBlueprint")
|
| 436 |
+
|
| 437 |
+
lenses[65] = SuppressionLens(65, "InformationThermodynamics",
|
| 438 |
+
"Energy-information equivalence in systems",
|
| 439 |
+
"Information energy dynamics",
|
| 440 |
+
"EntropyClock")
|
| 441 |
+
|
| 442 |
+
lenses[66] = SuppressionLens(66, "CognitiveEventHorizon",
|
| 443 |
+
"Point of no return in understanding",
|
| 444 |
+
"Cognitive boundary thresholds",
|
| 445 |
+
"MindHorizon")
|
| 446 |
+
|
| 447 |
+
lenses[67] = SuppressionLens(67, "ProtocolSymbiosisNetworks",
|
| 448 |
+
"Interdependent protocol ecosystems",
|
| 449 |
+
"Protocol ecosystem symbiosis",
|
| 450 |
+
"WebLife")
|
| 451 |
+
|
| 452 |
+
lenses[68] = SuppressionLens(68, "TemporalSovereigntyLoops",
|
| 453 |
+
"Time-bound sovereignty expressions",
|
| 454 |
+
"Temporal sovereignty cycles",
|
| 455 |
+
"OuroborosTime")
|
| 456 |
+
|
| 457 |
+
lenses[69] = SuppressionLens(69, "InformationFractalPatterns",
|
| 458 |
+
"Self-similar information structures",
|
| 459 |
+
"Information fractal geometry",
|
| 460 |
+
"MandelbrotData")
|
| 461 |
+
|
| 462 |
+
lenses[70] = SuppressionLens(70, "CognitiveRedundancyProtocols",
|
| 463 |
+
"Backup systems for consciousness",
|
| 464 |
+
"Cognitive redundancy mechanisms",
|
| 465 |
+
"MirrorMind")
|
| 466 |
+
|
| 467 |
+
lenses[71] = SuppressionLens(71, "AnomalyStabilizationResponse",
|
| 468 |
+
"Containment via sustenance (vs. suppression)",
|
| 469 |
+
"Stabilization instead of elimination",
|
| 470 |
+
"ZooFeeding")
|
| 471 |
+
|
| 472 |
+
lenses[72] = SuppressionLens(72, "SovereigntyConservationPrinciple",
|
| 473 |
+
"Sovereignty cannot be created or destroyed, only transformed",
|
| 474 |
+
"Sovereignty conservation law",
|
| 475 |
+
"AlchemicalBalance")
|
| 476 |
+
|
| 477 |
+
lenses[73] = SuppressionLens(73, "ProtocolPhylogenetics",
|
| 478 |
+
"Evolutionary tree of control patterns",
|
| 479 |
+
"Protocol evolutionary history",
|
| 480 |
+
"TreeOfCode")
|
| 481 |
+
|
| 482 |
+
return lenses
|
| 483 |
+
|
| 484 |
+
def _derive_primitives_from_lenses(self) -> Dict[Primitive, List[int]]:
|
| 485 |
+
"""Group lenses into primitives (operational categories)"""
|
| 486 |
+
primitives = {}
|
| 487 |
+
|
| 488 |
+
primitives[Primitive.ERASURE] = [31, 53, 71, 24, 54, 4, 37, 45, 46]
|
| 489 |
+
primitives[Primitive.INTERRUPTION] = [19, 33, 30, 63, 10, 61, 12, 26]
|
| 490 |
+
primitives[Primitive.FRAGMENTATION] = [2, 52, 15, 20, 3, 29, 31, 54]
|
| 491 |
+
primitives[Primitive.NARRATIVE_CAPTURE] = [1, 34, 40, 64, 7, 16, 22, 47]
|
| 492 |
+
primitives[Primitive.MISDIRECTION] = [5, 21, 8, 36, 27, 61]
|
| 493 |
+
primitives[Primitive.SATURATION] = [41, 69, 3, 36, 34, 66]
|
| 494 |
+
primitives[Primitive.DISCREDITATION] = [3, 27, 10, 40, 30, 63]
|
| 495 |
+
primitives[Primitive.ATTRITION] = [13, 19, 14, 33, 19, 27]
|
| 496 |
+
primitives[Primitive.ACCESS_CONTROL] = [25, 62, 37, 51, 23, 53]
|
| 497 |
+
primitives[Primitive.TEMPORAL] = [22, 47, 26, 68, 12, 22]
|
| 498 |
+
primitives[Primitive.CONDITIONING] = [8, 36, 34, 43, 27, 33]
|
| 499 |
+
primitives[Primitive.META] = [23, 70, 34, 64, 23, 40, 18, 71, 46, 31, 5, 21]
|
| 500 |
+
|
| 501 |
+
return primitives
|
| 502 |
+
|
| 503 |
+
def _define_methods(self) -> Dict[int, SuppressionMethod]:
|
| 504 |
+
"""Define 43 methods, each with ONE primitive parent"""
|
| 505 |
+
methods = {}
|
| 506 |
+
|
| 507 |
+
# ERASURE methods
|
| 508 |
+
methods[1] = SuppressionMethod(1, "Total Erasure", Primitive.ERASURE,
|
| 509 |
+
["entity_present_then_absent", "abrupt_disappearance", "no_transition"],
|
| 510 |
+
["transition_rate", "anomaly_score"],
|
| 511 |
+
{"transition_rate": 0.95, "anomaly_score": 0.8}, True)
|
| 512 |
+
|
| 513 |
+
methods[2] = SuppressionMethod(2, "Soft Erasure", Primitive.ERASURE,
|
| 514 |
+
["gradual_fading", "citation_decay", "context_stripping"],
|
| 515 |
+
["decay_rate", "trend_slope"],
|
| 516 |
+
{"decay_rate": 0.7, "trend_slope": -0.5}, True)
|
| 517 |
+
|
| 518 |
+
methods[3] = SuppressionMethod(3, "Citation Decay", Primitive.ERASURE,
|
| 519 |
+
["decreasing_citations", "reference_disappearance"],
|
| 520 |
+
["citation_frequency", "network_density"],
|
| 521 |
+
{"frequency_decay": 0.6, "density_loss": 0.7}, True)
|
| 522 |
+
|
| 523 |
+
methods[4] = SuppressionMethod(4, "Index Removal", Primitive.ERASURE,
|
| 524 |
+
["missing_from_indices", "searchability_loss"],
|
| 525 |
+
["index_coverage", "retrieval_failure"],
|
| 526 |
+
{"coverage_loss": 0.8, "failure_rate": 0.75}, True)
|
| 527 |
+
|
| 528 |
+
# INTERRUPTION methods
|
| 529 |
+
methods[5] = SuppressionMethod(5, "Untimely Death", Primitive.INTERRUPTION,
|
| 530 |
+
["abrupt_stop", "unfinished_work", "missing_followup"],
|
| 531 |
+
["continuity_index", "completion_ratio"],
|
| 532 |
+
{"continuity_index": 0.3, "completion_ratio": 0.4}, False)
|
| 533 |
+
|
| 534 |
+
methods[6] = SuppressionMethod(6, "Witness Attrition", Primitive.INTERRUPTION,
|
| 535 |
+
["witness_disappearance", "testimony_gaps"],
|
| 536 |
+
["witness_coverage", "testimony_continuity"],
|
| 537 |
+
{"coverage_loss": 0.7, "continuity_break": 0.6}, False)
|
| 538 |
+
|
| 539 |
+
methods[7] = SuppressionMethod(7, "Career Termination", Primitive.INTERRUPTION,
|
| 540 |
+
["expert_silence", "professional_disappearance"],
|
| 541 |
+
["expert_continuity", "professional_trajectory"],
|
| 542 |
+
{"continuity_break": 0.8, "trajectory_disruption": 0.7}, False)
|
| 543 |
+
|
| 544 |
+
methods[8] = SuppressionMethod(8, "Legal Stall", Primitive.INTERRUPTION,
|
| 545 |
+
["procedural_delay", "process_obstruction"],
|
| 546 |
+
["delay_factor", "obstruction_index"],
|
| 547 |
+
{"delay_factor": 0.75, "obstruction_index": 0.6}, False)
|
| 548 |
+
|
| 549 |
+
# FRAGMENTATION methods
|
| 550 |
+
methods[9] = SuppressionMethod(9, "Compartmentalization", Primitive.FRAGMENTATION,
|
| 551 |
+
["information_clusters", "specialization_silos"],
|
| 552 |
+
["cross_domain_density", "integration_index"],
|
| 553 |
+
{"density": 0.2, "integration": 0.3}, True)
|
| 554 |
+
|
| 555 |
+
methods[10] = SuppressionMethod(10, "Statistical Isolation", Primitive.FRAGMENTATION,
|
| 556 |
+
["dataset_separation", "correlation_prevention"],
|
| 557 |
+
["dataset_overlap", "correlation_possibility"],
|
| 558 |
+
{"overlap": 0.15, "possibility": 0.25}, False)
|
| 559 |
+
|
| 560 |
+
methods[11] = SuppressionMethod(11, "Scope Contraction", Primitive.FRAGMENTATION,
|
| 561 |
+
["narrowed_focus", "excluded_context"],
|
| 562 |
+
["scope_reduction", "context_exclusion"],
|
| 563 |
+
{"reduction": 0.7, "exclusion": 0.65}, True)
|
| 564 |
+
|
| 565 |
+
methods[12] = SuppressionMethod(12, "Domain Disqualification", Primitive.FRAGMENTATION,
|
| 566 |
+
["domain_exclusion", "methodology_rejection"],
|
| 567 |
+
["domain_coverage", "methodology_acceptance"],
|
| 568 |
+
{"coverage_loss": 0.8, "rejection_rate": 0.75}, False)
|
| 569 |
+
|
| 570 |
+
# NARRATIVE_CAPTURE methods
|
| 571 |
+
methods[13] = SuppressionMethod(13, "Official Narrative Closure", Primitive.NARRATIVE_CAPTURE,
|
| 572 |
+
["single_explanation", "alternative_absence", "closure_declarations"],
|
| 573 |
+
["diversity_index", "monopoly_score"],
|
| 574 |
+
{"diversity": 0.2, "monopoly": 0.8}, True)
|
| 575 |
+
|
| 576 |
+
methods[14] = SuppressionMethod(14, "Partial Confirmation Lock", Primitive.NARRATIVE_CAPTURE,
|
| 577 |
+
["selective_verification", "controlled_disclosure"],
|
| 578 |
+
["verification_selectivity", "disclosure_control"],
|
| 579 |
+
{"selectivity": 0.7, "control": 0.75}, True)
|
| 580 |
+
|
| 581 |
+
methods[15] = SuppressionMethod(15, "Disclosure-as-Containment", Primitive.NARRATIVE_CAPTURE,
|
| 582 |
+
["managed_release", "framed_disclosure"],
|
| 583 |
+
["release_management", "disclosure_framing"],
|
| 584 |
+
{"management": 0.8, "framing": 0.7}, True)
|
| 585 |
+
|
| 586 |
+
methods[16] = SuppressionMethod(16, "Posthumous Closure", Primitive.NARRATIVE_CAPTURE,
|
| 587 |
+
["delayed_resolution", "retroactive_closure"],
|
| 588 |
+
["delay_duration", "retroactivity"],
|
| 589 |
+
{"duration": 0.75, "retroactivity": 0.8}, True)
|
| 590 |
+
|
| 591 |
+
# MISDIRECTION methods
|
| 592 |
+
methods[17] = SuppressionMethod(17, "Proxy Controversy", Primitive.MISDIRECTION,
|
| 593 |
+
["diverted_attention", "substitute_conflict"],
|
| 594 |
+
["attention_divergence", "conflict_substitution"],
|
| 595 |
+
{"divergence": 0.7, "substitution": 0.65}, False)
|
| 596 |
+
|
| 597 |
+
methods[18] = SuppressionMethod(18, "Spectacle Replacement", Primitive.MISDIRECTION,
|
| 598 |
+
["spectacle_distraction", "replacement_event"],
|
| 599 |
+
["distraction_factor", "replacement_timing"],
|
| 600 |
+
{"distraction": 0.75, "timing_correlation": 0.7}, False)
|
| 601 |
+
|
| 602 |
+
methods[19] = SuppressionMethod(19, "Character Absorption", Primitive.MISDIRECTION,
|
| 603 |
+
["personal_focus", "systemic_obscuration"],
|
| 604 |
+
["personalization", "systemic_obscuration"],
|
| 605 |
+
{"personalization": 0.8, "obscuration": 0.75}, False)
|
| 606 |
+
|
| 607 |
+
# SATURATION methods
|
| 608 |
+
methods[20] = SuppressionMethod(20, "Data Overload", Primitive.SATURATION,
|
| 609 |
+
["information_excess", "signal_drowning"],
|
| 610 |
+
["excess_ratio", "signal_noise_ratio"],
|
| 611 |
+
{"excess": 0.85, "noise_ratio": 0.9}, False)
|
| 612 |
+
|
| 613 |
+
methods[21] = SuppressionMethod(21, "Absurdist Noise Injection", Primitive.SATURATION,
|
| 614 |
+
["absurd_content", "credibility_undermining"],
|
| 615 |
+
["absurdity_index", "credibility_impact"],
|
| 616 |
+
{"absurdity": 0.8, "impact": 0.7}, False)
|
| 617 |
+
|
| 618 |
+
methods[22] = SuppressionMethod(22, "Probability Collapse by Excess", Primitive.SATURATION,
|
| 619 |
+
["probability_dilution", "certainty_erosion"],
|
| 620 |
+
["dilution_factor", "certainty_loss"],
|
| 621 |
+
{"dilution": 0.75, "certainty_loss": 0.8}, False)
|
| 622 |
+
|
| 623 |
+
# DISCREDITATION methods
|
| 624 |
+
methods[23] = SuppressionMethod(23, "Ridicule Normalization", Primitive.DISCREDITATION,
|
| 625 |
+
["systematic_ridicule", "credibility_attack"],
|
| 626 |
+
["ridicule_frequency", "attack_intensity"],
|
| 627 |
+
{"frequency": 0.7, "intensity": 0.65}, False)
|
| 628 |
+
|
| 629 |
+
methods[24] = SuppressionMethod(24, "Retroactive Pathologization", Primitive.DISCREDITATION,
|
| 630 |
+
["retroactive_diagnosis", "character_pathology"],
|
| 631 |
+
["retroactivity", "pathologization_extent"],
|
| 632 |
+
{"retroactivity": 0.8, "extent": 0.75}, False)
|
| 633 |
+
|
| 634 |
+
methods[25] = SuppressionMethod(25, "Stigmatized Correlation Trap", Primitive.DISCREDITATION,
|
| 635 |
+
["guilt_by_association", "stigma_transfer"],
|
| 636 |
+
["association_strength", "transfer_completeness"],
|
| 637 |
+
{"strength": 0.7, "completeness": 0.65}, False)
|
| 638 |
+
|
| 639 |
+
# ATTRITION methods
|
| 640 |
+
methods[26] = SuppressionMethod(26, "Psychological Drip", Primitive.ATTRITION,
|
| 641 |
+
["gradual_undermining", "sustained_pressure"],
|
| 642 |
+
["undermining_rate", "pressure_duration"],
|
| 643 |
+
{"rate": 0.6, "duration": 0.7}, False)
|
| 644 |
+
|
| 645 |
+
methods[27] = SuppressionMethod(27, "Inquiry Fatigue", Primitive.ATTRITION,
|
| 646 |
+
["investigation_exhaustion", "persistence_depletion"],
|
| 647 |
+
["exhaustion_level", "depletion_rate"],
|
| 648 |
+
{"exhaustion": 0.75, "depletion": 0.7}, False)
|
| 649 |
+
|
| 650 |
+
methods[28] = SuppressionMethod(28, "Chilling Effect Propagation", Primitive.ATTRITION,
|
| 651 |
+
["self_censorship", "investigation_chill"],
|
| 652 |
+
["censorship_extent", "chill_spread"],
|
| 653 |
+
{"extent": 0.8, "spread": 0.75}, False)
|
| 654 |
+
|
| 655 |
+
# ACCESS_CONTROL methods
|
| 656 |
+
methods[29] = SuppressionMethod(29, "Credential Gating", Primitive.ACCESS_CONTROL,
|
| 657 |
+
["credential_barriers", "access_hierarchies"],
|
| 658 |
+
["barrier_strength", "hierarchy_rigidity"],
|
| 659 |
+
{"strength": 0.85, "rigidity": 0.8}, False)
|
| 660 |
+
|
| 661 |
+
methods[30] = SuppressionMethod(30, "Classification Creep", Primitive.ACCESS_CONTROL,
|
| 662 |
+
["expanding_classification", "access_erosion"],
|
| 663 |
+
["expansion_rate", "erosion_extent"],
|
| 664 |
+
{"expansion": 0.75, "erosion": 0.7}, False)
|
| 665 |
+
|
| 666 |
+
methods[31] = SuppressionMethod(31, "Evidence Dependency Lock", Primitive.ACCESS_CONTROL,
|
| 667 |
+
["circular_dependencies", "evidence_chains"],
|
| 668 |
+
["dependency_complexity", "chain_length"],
|
| 669 |
+
{"complexity": 0.8, "length": 0.75}, False)
|
| 670 |
+
|
| 671 |
+
# TEMPORAL methods
|
| 672 |
+
methods[32] = SuppressionMethod(32, "Temporal Dilution", Primitive.TEMPORAL,
|
| 673 |
+
["time_dispersal", "urgency_dissipation"],
|
| 674 |
+
["dispersal_rate", "dissipation_speed"],
|
| 675 |
+
{"dispersal": 0.7, "speed": 0.65}, False)
|
| 676 |
+
|
| 677 |
+
methods[33] = SuppressionMethod(33, "Historical Rebasing", Primitive.TEMPORAL,
|
| 678 |
+
["timeline_revision", "context_reshuffling"],
|
| 679 |
+
["revision_extent", "reshuffling_completeness"],
|
| 680 |
+
{"extent": 0.8, "completeness": 0.75}, False)
|
| 681 |
+
|
| 682 |
+
methods[34] = SuppressionMethod(34, "Delay Until Irrelevance", Primitive.TEMPORAL,
|
| 683 |
+
["strategic_delay", "relevance_expiration"],
|
| 684 |
+
["delay_duration", "expiration_completeness"],
|
| 685 |
+
{"duration": 0.85, "completeness": 0.8}, False)
|
| 686 |
+
|
| 687 |
+
# CONDITIONING methods
|
| 688 |
+
methods[35] = SuppressionMethod(35, "Entertainment Conditioning", Primitive.CONDITIONING,
|
| 689 |
+
["entertainment_framing", "seriousness_erosion"],
|
| 690 |
+
["framing_intensity", "erosion_rate"],
|
| 691 |
+
{"intensity": 0.7, "rate": 0.65}, False)
|
| 692 |
+
|
| 693 |
+
methods[36] = SuppressionMethod(36, "Preemptive Normalization", Primitive.CONDITIONING,
|
| 694 |
+
["preemptive_framing", "expectation_setting"],
|
| 695 |
+
["framing_completeness", "expectation_rigidity"],
|
| 696 |
+
{"completeness": 0.75, "rigidity": 0.7}, False)
|
| 697 |
+
|
| 698 |
+
methods[37] = SuppressionMethod(37, "Conditioned Disbelief", Primitive.CONDITIONING,
|
| 699 |
+
["disbelief_training", "skepticism_conditioning"],
|
| 700 |
+
["training_intensity", "conditioning_success"],
|
| 701 |
+
{"intensity": 0.8, "success": 0.75}, False)
|
| 702 |
+
|
| 703 |
+
# META methods
|
| 704 |
+
methods[38] = SuppressionMethod(38, "Pattern Denial", Primitive.META,
|
| 705 |
+
["pattern_rejection", "coincidence_insistence"],
|
| 706 |
+
["rejection_rate", "insistence_frequency"],
|
| 707 |
+
{"rejection": 0.85, "frequency": 0.8}, True)
|
| 708 |
+
|
| 709 |
+
methods[39] = SuppressionMethod(39, "Suppression Impossibility Framing", Primitive.META,
|
| 710 |
+
["impossibility_argument", "system_idealization"],
|
| 711 |
+
["argument_strength", "idealization_extent"],
|
| 712 |
+
{"strength": 0.8, "extent": 0.75}, True)
|
| 713 |
+
|
| 714 |
+
methods[40] = SuppressionMethod(40, "Meta-Disclosure Loop", Primitive.META,
|
| 715 |
+
["recursive_disclosure", "transparency_performance"],
|
| 716 |
+
["recursion_depth", "performance_extent"],
|
| 717 |
+
{"depth": 0.7, "extent": 0.65}, False)
|
| 718 |
+
|
| 719 |
+
methods[41] = SuppressionMethod(41, "Isolated Incident Recycling", Primitive.META,
|
| 720 |
+
["incident_containment", "pattern_resistance"],
|
| 721 |
+
["containment_success", "resistance_strength"],
|
| 722 |
+
{"success": 0.75, "strength": 0.7}, True)
|
| 723 |
+
|
| 724 |
+
methods[42] = SuppressionMethod(42, "Negative Space Occupation", Primitive.META,
|
| 725 |
+
["absence_filling", "gap_narrative"],
|
| 726 |
+
["filling_completeness", "narrative_coherence"],
|
| 727 |
+
{"completeness": 0.8, "coherence": 0.75}, True)
|
| 728 |
+
|
| 729 |
+
methods[43] = SuppressionMethod(43, "Novelty Illusion", Primitive.META,
|
| 730 |
+
["superficial_novelty", "substantive_repetition"],
|
| 731 |
+
["novelty_appearance", "repetition_extent"],
|
| 732 |
+
{"appearance": 0.7, "extent": 0.65}, True)
|
| 733 |
+
|
| 734 |
+
return methods
|
| 735 |
+
|
| 736 |
+
def _derive_signatures_from_methods(self) -> Dict[str, List[int]]:
|
| 737 |
+
"""Map evidence signatures to the methods they indicate"""
|
| 738 |
+
signatures = defaultdict(list)
|
| 739 |
+
|
| 740 |
+
for method_id, method in self.methods.items():
|
| 741 |
+
for signature in method.observable_signatures:
|
| 742 |
+
signatures[signature].append(method_id)
|
| 743 |
+
|
| 744 |
+
return dict(signatures)
|
| 745 |
+
|
| 746 |
+
def trace_detection_path(self, signature: str) -> Dict:
|
| 747 |
+
"""Show hierarchical trace from evidence to concepts"""
|
| 748 |
+
methods = self.signatures.get(signature, [])
|
| 749 |
+
primitives_used = set()
|
| 750 |
+
lenses_used = set()
|
| 751 |
+
|
| 752 |
+
for method_id in methods:
|
| 753 |
+
method = self.methods[method_id]
|
| 754 |
+
primitives_used.add(method.primitive)
|
| 755 |
+
|
| 756 |
+
# Get lenses for this primitive
|
| 757 |
+
lens_ids = self.primitives.get(method.primitive, [])
|
| 758 |
+
lenses_used.update(lens_ids)
|
| 759 |
+
|
| 760 |
+
return {
|
| 761 |
+
"evidence": signature,
|
| 762 |
+
"indicates_methods": [self.methods[mid].name for mid in methods],
|
| 763 |
+
"method_count": len(methods),
|
| 764 |
+
"primitives": [p.value for p in primitives_used],
|
| 765 |
+
"lens_count": len(lenses_used),
|
| 766 |
+
"lens_names": [self.lenses[lid].name for lid in sorted(lenses_used)[:3]]
|
| 767 |
+
}
|
| 768 |
+
|
| 769 |
+
def export_ontology(self, path: str):
|
| 770 |
+
"""Export the complete hierarchy"""
|
| 771 |
+
ontology = {
|
| 772 |
+
"hierarchy": {
|
| 773 |
+
"total_lenses": len(self.lenses),
|
| 774 |
+
"total_primitives": len(self.primitives),
|
| 775 |
+
"total_methods": len(self.methods),
|
| 776 |
+
"total_signatures": len(self.signatures)
|
| 777 |
+
},
|
| 778 |
+
"primitives": {
|
| 779 |
+
primitive.value: {
|
| 780 |
+
"lens_count": len(lens_ids),
|
| 781 |
+
"method_count": len([m for m in self.methods.values() if m.primitive == primitive]),
|
| 782 |
+
"lens_examples": [self.lenses[lid].name for lid in lens_ids[:2]]
|
| 783 |
+
}
|
| 784 |
+
for primitive, lens_ids in self.primitives.items()
|
| 785 |
+
}
|
| 786 |
+
}
|
| 787 |
+
|
| 788 |
+
with open(path, 'w') as f:
|
| 789 |
+
json.dump(ontology, f, indent=2, default=str)
|
| 790 |
+
|
| 791 |
+
==================== LEDGER ====================
|
| 792 |
+
|
| 793 |
+
class Crypto:
|
| 794 |
+
def init(self, key_path: str):
|
| 795 |
+
self.key_path = key_path
|
| 796 |
+
os.makedirs(key_path, exist_ok=True)
|
| 797 |
+
|
| 798 |
+
def hash(self, data: str) -> str:
|
| 799 |
+
return hashlib.sha3_512(data.encode()).hexdigest()
|
| 800 |
+
|
| 801 |
+
def hash_dict(self, data: Dict) -> str:
|
| 802 |
+
canonical = json.dumps(data, sort_keys=True, separators=(',', ':'))
|
| 803 |
+
return self.hash(canonical)
|
| 804 |
+
|
| 805 |
+
def sign(self, data: bytes, key_id: str) -> str:
|
| 806 |
+
return f"sig_{key_id}_{hashlib.sha256(data).hexdigest()[:16]}"
|
| 807 |
+
|
| 808 |
+
def verify(self, data: bytes, signature: str, key_id: str) -> bool:
|
| 809 |
+
return signature.startswith(f"sig_{key_id}")
|
| 810 |
+
|
| 811 |
+
class Ledger:
|
| 812 |
+
def init(self, path: str, crypto: Crypto):
|
| 813 |
+
self.path = path
|
| 814 |
+
self.crypto = crypto
|
| 815 |
+
self.chain: List[Dict] = []
|
| 816 |
+
self.index: Dict[str, List[str]] = defaultdict(list)
|
| 817 |
+
self.temporal: Dict[str, List[str]] = defaultdict(list)
|
| 818 |
+
self._load()
|
| 819 |
+
|
| 820 |
+
def _load(self):
|
| 821 |
+
if os.path.exists(self.path):
|
| 822 |
+
try:
|
| 823 |
+
with open(self.path, 'r') as f:
|
| 824 |
+
data = json.load(f)
|
| 825 |
+
self.chain = data.get("chain", [])
|
| 826 |
+
self._rebuild_index()
|
| 827 |
+
except:
|
| 828 |
+
self._create_genesis()
|
| 829 |
+
else:
|
| 830 |
+
self._create_genesis()
|
| 831 |
+
|
| 832 |
+
def _create_genesis(self):
|
| 833 |
+
genesis = {
|
| 834 |
+
"id": "genesis",
|
| 835 |
+
"prev": "0" * 64,
|
| 836 |
+
"time": datetime.utcnow().isoformat() + "Z",
|
| 837 |
+
"nodes": [],
|
| 838 |
+
"signatures": [],
|
| 839 |
+
"hash": self.crypto.hash("genesis"),
|
| 840 |
+
"distance": 0.0,
|
| 841 |
+
"resistance": 1.0
|
| 842 |
+
}
|
| 843 |
+
self.chain.append(genesis)
|
| 844 |
+
self._save()
|
| 845 |
+
|
| 846 |
+
def _rebuild_index(self):
|
| 847 |
+
for block in self.chain:
|
| 848 |
+
for node in block.get("nodes", []):
|
| 849 |
+
node_hash = node["hash"]
|
| 850 |
+
self.index[node_hash].append(block["id"])
|
| 851 |
+
date = block["time"][:10]
|
| 852 |
+
self.temporal[date].append(block["id"])
|
| 853 |
+
|
| 854 |
+
def _save(self):
|
| 855 |
+
data = {
|
| 856 |
+
"chain": self.chain,
|
| 857 |
+
"metadata": {
|
| 858 |
+
"updated": datetime.utcnow().isoformat() + "Z",
|
| 859 |
+
"blocks": len(self.chain),
|
| 860 |
+
"nodes": sum(len(b.get("nodes", [])) for b in self.chain)
|
| 861 |
+
}
|
| 862 |
+
}
|
| 863 |
+
with open(self.path + '.tmp', 'w') as f:
|
| 864 |
+
json.dump(data, f, indent=2)
|
| 865 |
+
os.replace(self.path + '.tmp', self.path)
|
| 866 |
+
|
| 867 |
+
def add(self, node: RealityNode, validators: List[Tuple[str, Any]]) -> str:
|
| 868 |
+
block_data = {
|
| 869 |
+
"id": f"blk_{int(datetime.utcnow().timestamp())}_{hashlib.sha256(node.hash.encode()).hexdigest()[:8]}",
|
| 870 |
+
"prev": self.chain[-1]["hash"] if self.chain else "0" * 64,
|
| 871 |
+
"time": datetime.utcnow().isoformat() + "Z",
|
| 872 |
+
"nodes": [node.canonical()],
|
| 873 |
+
"signatures": self._get_signatures(block_data, validators),
|
| 874 |
+
"meta": {
|
| 875 |
+
"node_count": 1,
|
| 876 |
+
"validator_count": len(validators)
|
| 877 |
+
}
|
| 878 |
+
}
|
| 879 |
+
|
| 880 |
+
block_data["hash"] = self.crypto.hash_dict(block_data)
|
| 881 |
+
block_data["distance"] = self._calc_distance(block_data)
|
| 882 |
+
block_data["resistance"] = self._calc_resistance(block_data)
|
| 883 |
+
|
| 884 |
+
if not self._verify_signatures(block_data, validators):
|
| 885 |
+
raise ValueError("Invalid signatures")
|
| 886 |
+
|
| 887 |
+
self.chain.append(block_data)
|
| 888 |
+
|
| 889 |
+
for node_dict in block_data["nodes"]:
|
| 890 |
+
node_hash = node_dict["hash"]
|
| 891 |
+
self.index[node_hash].append(block_data["id"])
|
| 892 |
+
date = block_data["time"][:10]
|
| 893 |
+
self.temporal[date].append(block_data["id"])
|
| 894 |
+
|
| 895 |
+
self._save()
|
| 896 |
+
return block_data["id"]
|
| 897 |
+
|
| 898 |
+
def _get_signatures(self, data: Dict, validators: List[Tuple[str, Any]]) -> List[Dict]:
|
| 899 |
+
signatures = []
|
| 900 |
+
data_bytes = json.dumps(data, sort_keys=True).encode()
|
| 901 |
+
|
| 902 |
+
for val_id, _ in validators:
|
| 903 |
+
sig = self.crypto.sign(data_bytes, val_id)
|
| 904 |
+
signatures.append({
|
| 905 |
+
"validator": val_id,
|
| 906 |
+
"signature": sig,
|
| 907 |
+
"time": datetime.utcnow().isoformat() + "Z"
|
| 908 |
+
})
|
| 909 |
+
|
| 910 |
+
return signatures
|
| 911 |
+
|
| 912 |
+
def _verify_signatures(self, block: Dict, validators: List[Tuple[str, Any]]) -> bool:
|
| 913 |
+
block_copy = block.copy()
|
| 914 |
+
signatures = block_copy.pop("signatures", [])
|
| 915 |
+
block_bytes = json.dumps(block_copy, sort_keys=True).encode()
|
| 916 |
+
|
| 917 |
+
for sig_info in signatures:
|
| 918 |
+
val_id = sig_info["validator"]
|
| 919 |
+
signature = sig_info["signature"]
|
| 920 |
+
|
| 921 |
+
if not self.crypto.verify(block_bytes, signature, val_id):
|
| 922 |
+
return False
|
| 923 |
+
|
| 924 |
+
return True
|
| 925 |
+
|
| 926 |
+
def _calc_distance(self, block: Dict) -> float:
|
| 927 |
+
val_count = len(block.get("signatures", []))
|
| 928 |
+
node_count = len(block.get("nodes", []))
|
| 929 |
+
|
| 930 |
+
if val_count == 0 or node_count == 0:
|
| 931 |
+
return 0.0
|
| 932 |
+
|
| 933 |
+
return min(1.0, (val_count * 0.25) + (node_count * 0.05))
|
| 934 |
+
|
| 935 |
+
def _calc_resistance(self, block: Dict) -> float:
|
| 936 |
+
factors = []
|
| 937 |
+
|
| 938 |
+
val_count = len(block.get("signatures", []))
|
| 939 |
+
factors.append(min(1.0, val_count / 7.0))
|
| 940 |
+
|
| 941 |
+
total_refs = 0
|
| 942 |
+
for node in block.get("nodes", []):
|
| 943 |
+
for refs in node.get("refs", {}).values():
|
| 944 |
+
total_refs += len(refs)
|
| 945 |
+
factors.append(min(1.0, total_refs / 15.0))
|
| 946 |
+
|
| 947 |
+
total_wits = sum(len(node.get("witnesses", [])) for node in block.get("nodes", []))
|
| 948 |
+
factors.append(min(1.0, total_wits / 10.0))
|
| 949 |
+
|
| 950 |
+
return sum(factors) / len(factors) if factors else 0.0
|
| 951 |
+
|
| 952 |
+
def verify(self) -> Dict:
|
| 953 |
+
if not self.chain:
|
| 954 |
+
return {"valid": False, "error": "Empty"}
|
| 955 |
+
|
| 956 |
+
for i in range(1, len(self.chain)):
|
| 957 |
+
curr = self.chain[i]
|
| 958 |
+
prev = self.chain[i-1]
|
| 959 |
+
|
| 960 |
+
if curr["prev"] != prev["hash"]:
|
| 961 |
+
return {"valid": False, "error": f"Chain break at {i}"}
|
| 962 |
+
|
| 963 |
+
curr_copy = curr.copy()
|
| 964 |
+
curr_copy.pop("hash", None)
|
| 965 |
+
expected = self.crypto.hash_dict(curr_copy)
|
| 966 |
+
|
| 967 |
+
if curr["hash"] != expected:
|
| 968 |
+
return {"valid": False, "error": f"Hash mismatch at {i}"}
|
| 969 |
+
|
| 970 |
+
return {
|
| 971 |
+
"valid": True,
|
| 972 |
+
"blocks": len(self.chain),
|
| 973 |
+
"nodes": sum(len(b.get("nodes", [])) for b in self.chain),
|
| 974 |
+
"avg_resistance": statistics.mean(b.get("resistance", 0) for b in self.chain) if self.chain else 0
|
| 975 |
+
}
|
| 976 |
+
|
| 977 |
+
==================== SEPARATOR ====================
|
| 978 |
+
|
| 979 |
+
class Separator:
|
| 980 |
+
def init(self, ledger: Ledger, path: str):
|
| 981 |
+
self.ledger = ledger
|
| 982 |
+
self.path = path
|
| 983 |
+
self.graph = defaultdict(list)
|
| 984 |
+
self._load()
|
| 985 |
+
|
| 986 |
+
def _load(self):
|
| 987 |
+
graph_path = os.path.join(self.path, "graph.pkl")
|
| 988 |
+
if os.path.exists(graph_path):
|
| 989 |
+
try:
|
| 990 |
+
with open(graph_path, 'rb') as f:
|
| 991 |
+
self.graph = pickle.load(f)
|
| 992 |
+
except:
|
| 993 |
+
self.graph = defaultdict(list)
|
| 994 |
+
|
| 995 |
+
def _save(self):
|
| 996 |
+
os.makedirs(self.path, exist_ok=True)
|
| 997 |
+
graph_path = os.path.join(self.path, "graph.pkl")
|
| 998 |
+
with open(graph_path, 'wb') as f:
|
| 999 |
+
pickle.dump(self.graph, f)
|
| 1000 |
+
|
| 1001 |
+
def add(self, node_hashes: List[str], interpretation: Dict, interpreter: str, confidence: float = 0.5) -> str:
|
| 1002 |
+
for h in node_hashes:
|
| 1003 |
+
if h not in self.ledger.index:
|
| 1004 |
+
raise ValueError(f"Node {h[:16]}... not found")
|
| 1005 |
+
|
| 1006 |
+
int_id = f"int_{hashlib.sha256(json.dumps(interpretation, sort_keys=True).encode()).hexdigest()[:16]}"
|
| 1007 |
+
|
| 1008 |
+
int_node = {
|
| 1009 |
+
"id": int_id,
|
| 1010 |
+
"nodes": node_hashes,
|
| 1011 |
+
"content": interpretation,
|
| 1012 |
+
"interpreter": interpreter,
|
| 1013 |
+
"confidence": max(0.0, min(1.0, confidence)),
|
| 1014 |
+
"time": datetime.utcnow().isoformat() + "Z",
|
| 1015 |
+
"provenance": self._get_provenance(node_hashes)
|
| 1016 |
+
}
|
| 1017 |
+
|
| 1018 |
+
self.graph[int_id] = int_node
|
| 1019 |
+
|
| 1020 |
+
for node_hash in node_hashes:
|
| 1021 |
+
if "refs" not in self.graph:
|
| 1022 |
+
self.graph["refs"] = {}
|
| 1023 |
+
if node_hash not in self.graph["refs"]:
|
| 1024 |
+
self.graph["refs"][node_hash] = []
|
| 1025 |
+
self.graph["refs"][node_hash].append(int_id)
|
| 1026 |
+
|
| 1027 |
+
self._save()
|
| 1028 |
+
return int_id
|
| 1029 |
+
|
| 1030 |
+
def _get_provenance(self, node_hashes: List[str]) -> List[Dict]:
|
| 1031 |
+
provenance = []
|
| 1032 |
+
for h in node_hashes:
|
| 1033 |
+
block_ids = self.ledger.index.get(h, [])
|
| 1034 |
+
if block_ids:
|
| 1035 |
+
provenance.append({
|
| 1036 |
+
"node": h,
|
| 1037 |
+
"blocks": len(block_ids),
|
| 1038 |
+
"first": block_ids[0] if block_ids else None
|
| 1039 |
+
})
|
| 1040 |
+
return provenance
|
| 1041 |
+
|
| 1042 |
+
def get_conflicts(self, node_hash: str) -> Dict:
|
| 1043 |
+
int_ids = self.graph.get("refs", {}).get(node_hash, [])
|
| 1044 |
+
interpretations = [self.graph[i] for i in int_ids if i in self.graph]
|
| 1045 |
+
|
| 1046 |
+
if not interpretations:
|
| 1047 |
+
return {"node": node_hash, "count": 0, "groups": []}
|
| 1048 |
+
|
| 1049 |
+
groups = self._group_interpretations(interpretations)
|
| 1050 |
+
|
| 1051 |
+
return {
|
| 1052 |
+
"node": node_hash,
|
| 1053 |
+
"count": len(interpretations),
|
| 1054 |
+
"groups": groups,
|
| 1055 |
+
"plurality": self._calc_plurality(interpretations),
|
| 1056 |
+
"confidence_range": {
|
| 1057 |
+
"min": min(i.get("confidence", 0) for i in interpretations),
|
| 1058 |
+
"max": max(i.get("confidence", 0) for i in interpretations),
|
| 1059 |
+
"avg": statistics.mean(i.get("confidence", 0) for i in interpretations)
|
| 1060 |
+
}
|
| 1061 |
+
}
|
| 1062 |
+
|
| 1063 |
+
def _group_interpretations(self, interpretations: List[Dict]) -> List[List[Dict]]:
|
| 1064 |
+
if len(interpretations) <= 1:
|
| 1065 |
+
return [interpretations] if interpretations else []
|
| 1066 |
+
|
| 1067 |
+
groups_dict = defaultdict(list)
|
| 1068 |
+
for intp in interpretations:
|
| 1069 |
+
content_hash = hashlib.sha256(
|
| 1070 |
+
json.dumps(intp["content"], sort_keys=True).encode()
|
| 1071 |
+
).hexdigest()[:8]
|
| 1072 |
+
groups_dict[content_hash].append(intp)
|
| 1073 |
+
|
| 1074 |
+
return list(groups_dict.values())
|
| 1075 |
+
|
| 1076 |
+
def _calc_plurality(self, interpretations: List[Dict]) -> float:
|
| 1077 |
+
if len(interpretations) <= 1:
|
| 1078 |
+
return 0.0
|
| 1079 |
+
|
| 1080 |
+
unique = set()
|
| 1081 |
+
for intp in interpretations:
|
| 1082 |
+
content_hash = hashlib.sha256(
|
| 1083 |
+
json.dumps(intp["content"], sort_keys=True).encode()
|
| 1084 |
+
).hexdigest()
|
| 1085 |
+
unique.add(content_hash)
|
| 1086 |
+
|
| 1087 |
+
return min(1.0, len(unique) / len(interpretations))
|
| 1088 |
+
|
| 1089 |
+
def stats(self) -> Dict:
|
| 1090 |
+
int_nodes = [v for k, v in self.graph.items() if k != "refs"]
|
| 1091 |
+
|
| 1092 |
+
if not int_nodes:
|
| 1093 |
+
return {"count": 0, "interpreters": 0, "avg_conf": 0.0, "nodes_covered": 0}
|
| 1094 |
+
|
| 1095 |
+
interpreters = set()
|
| 1096 |
+
confidences = []
|
| 1097 |
+
nodes_covered = set()
|
| 1098 |
+
|
| 1099 |
+
for node in int_nodes:
|
| 1100 |
+
interpreters.add(node.get("interpreter", "unknown"))
|
| 1101 |
+
confidences.append(node.get("confidence", 0.5))
|
| 1102 |
+
nodes_covered.update(node.get("nodes", []))
|
| 1103 |
+
|
| 1104 |
+
return {
|
| 1105 |
+
"count": len(int_nodes),
|
| 1106 |
+
"interpreters": len(interpreters),
|
| 1107 |
+
"avg_conf": statistics.mean(confidences) if confidences else 0.0,
|
| 1108 |
+
"nodes_covered": len(nodes_covered),
|
| 1109 |
+
"interpreter_list": list(interpreters)
|
| 1110 |
+
}
|
| 1111 |
+
|
| 1112 |
+
==================== HIERARCHICAL DETECTOR ====================
|
| 1113 |
+
|
| 1114 |
+
class HierarchicalDetector:
|
| 1115 |
+
def init(self, hierarchy: SuppressionHierarchy, ledger: Ledger, separator: Separator):
|
| 1116 |
+
self.hierarchy = hierarchy
|
| 1117 |
+
self.ledger = ledger
|
| 1118 |
+
self.separator = separator
|
| 1119 |
+
|
| 1120 |
+
def detect_from_ledger(self) -> Dict:
|
| 1121 |
+
"""Bottom-up detection: Evidence β Methods β Primitives β Lenses"""
|
| 1122 |
+
|
| 1123 |
+
# Step 1: Find evidence signatures
|
| 1124 |
+
found_signatures = self._scan_for_signatures()
|
| 1125 |
+
|
| 1126 |
+
# Step 2: Map signatures to methods
|
| 1127 |
+
method_results = self._signatures_to_methods(found_signatures)
|
| 1128 |
+
|
| 1129 |
+
# Step 3: Group by primitives
|
| 1130 |
+
primitive_analysis = self._analyze_primitives(method_results)
|
| 1131 |
+
|
| 1132 |
+
# Step 4: Infer lenses
|
| 1133 |
+
lens_inference = self._infer_lenses(primitive_analysis)
|
| 1134 |
+
|
| 1135 |
+
return {
|
| 1136 |
+
"detection_timestamp": datetime.utcnow().isoformat() + "Z",
|
| 1137 |
+
"evidence_found": len(found_signatures),
|
| 1138 |
+
"signatures": found_signatures,
|
| 1139 |
+
"method_results": method_results,
|
| 1140 |
+
"primitive_analysis": primitive_analysis,
|
| 1141 |
+
"lens_inference": lens_inference,
|
| 1142 |
+
"hierarchical_trace": [
|
| 1143 |
+
self.hierarchy.trace_detection_path(sig)
|
| 1144 |
+
for sig in found_signatures[:3]
|
| 1145 |
+
]
|
| 1146 |
+
}
|
| 1147 |
+
|
| 1148 |
+
def _scan_for_signatures(self) -> List[str]:
|
| 1149 |
+
"""Look for evidence patterns in the ledger"""
|
| 1150 |
+
found = []
|
| 1151 |
+
|
| 1152 |
+
# Check for entity disappearance (Total Erasure signature)
|
| 1153 |
+
for i in range(len(self.ledger.chain) - 1):
|
| 1154 |
+
curr_block = self.ledger.chain[i]
|
| 1155 |
+
next_block = self.ledger.chain[i + 1]
|
| 1156 |
+
|
| 1157 |
+
curr_entities = self._extract_entities(curr_block)
|
| 1158 |
+
next_entities = self._extract_entities(next_block)
|
| 1159 |
+
|
| 1160 |
+
if curr_entities and not next_entities:
|
| 1161 |
+
found.append("entity_present_then_absent")
|
| 1162 |
+
|
| 1163 |
+
# Check for single explanation (Official Narrative Closure)
|
| 1164 |
+
stats = self.separator.stats()
|
| 1165 |
+
if stats["interpreters"] == 1 and stats["count"] > 3:
|
| 1166 |
+
found.append("single_explanation")
|
| 1167 |
+
|
| 1168 |
+
# Check for gradual fading (Soft Erasure)
|
| 1169 |
+
decay = self._analyze_decay_pattern()
|
| 1170 |
+
if decay > 0.5:
|
| 1171 |
+
found.append("gradual_fading")
|
| 1172 |
+
|
| 1173 |
+
# Check for information clusters (Compartmentalization)
|
| 1174 |
+
clusters = self._analyze_information_clusters()
|
| 1175 |
+
if clusters > 0.7:
|
| 1176 |
+
found.append("information_clusters")
|
| 1177 |
+
|
| 1178 |
+
# Check for narrowed focus (Scope Contraction)
|
| 1179 |
+
focus = self._analyze_scope_focus()
|
| 1180 |
+
if focus > 0.6:
|
| 1181 |
+
found.append("narrowed_focus")
|
| 1182 |
+
|
| 1183 |
+
return list(set(found))
|
| 1184 |
+
|
| 1185 |
+
def _extract_entities(self, block: Dict) -> Set[str]:
|
| 1186 |
+
entities = set()
|
| 1187 |
+
for node in block.get("nodes", []):
|
| 1188 |
+
content = json.dumps(node)
|
| 1189 |
+
if "entity" in content or "name" in content:
|
| 1190 |
+
entities.add(f"ent_{hashlib.sha256(content.encode()).hexdigest()[:8]}")
|
| 1191 |
+
return entities
|
| 1192 |
+
|
| 1193 |
+
def _analyze_decay_pattern(self) -> float:
|
| 1194 |
+
ref_counts = []
|
| 1195 |
+
for block in self.ledger.chain[-10:]:
|
| 1196 |
+
count = 0
|
| 1197 |
+
for node in block.get("nodes", []):
|
| 1198 |
+
for refs in node.get("refs", {}).values():
|
| 1199 |
+
count += len(refs)
|
| 1200 |
+
ref_counts.append(count)
|
| 1201 |
+
|
| 1202 |
+
if len(ref_counts) < 3:
|
| 1203 |
+
return 0.0
|
| 1204 |
+
|
| 1205 |
+
first_half = ref_counts[:len(ref_counts)//2]
|
| 1206 |
+
second_half = ref_counts[len(ref_counts)//2:]
|
| 1207 |
+
|
| 1208 |
+
if not first_half or not second_half:
|
| 1209 |
+
return 0.0
|
| 1210 |
+
|
| 1211 |
+
avg_first = statistics.mean(first_half)
|
| 1212 |
+
avg_second = statistics.mean(second_half)
|
| 1213 |
+
|
| 1214 |
+
if avg_first == 0:
|
| 1215 |
+
return 0.0
|
| 1216 |
+
|
| 1217 |
+
return max(0.0, (avg_first - avg_second) / avg_first)
|
| 1218 |
+
|
| 1219 |
+
def _analyze_information_clusters(self) -> float:
|
| 1220 |
+
total_links = 0
|
| 1221 |
+
possible_links = 0
|
| 1222 |
+
|
| 1223 |
+
for block in self.ledger.chain[-5:]:
|
| 1224 |
+
nodes = block.get("nodes", [])
|
| 1225 |
+
for i in range(len(nodes)):
|
| 1226 |
+
for j in range(i + 1, len(nodes)):
|
| 1227 |
+
possible_links += 1
|
| 1228 |
+
if self._are_nodes_linked(nodes[i], nodes[j]):
|
| 1229 |
+
total_links += 1
|
| 1230 |
+
|
| 1231 |
+
return 1.0 - (total_links / possible_links if possible_links > 0 else 0.0)
|
| 1232 |
+
|
| 1233 |
+
def _are_nodes_linked(self, node1: Dict, node2: Dict) -> bool:
|
| 1234 |
+
refs1 = set()
|
| 1235 |
+
refs2 = set()
|
| 1236 |
+
|
| 1237 |
+
for ref_list in node1.get("refs", {}).values():
|
| 1238 |
+
refs1.update(ref_list)
|
| 1239 |
+
|
| 1240 |
+
for ref_list in node2.get("refs", {}).values():
|
| 1241 |
+
refs2.update(ref_list)
|
| 1242 |
+
|
| 1243 |
+
return bool(refs1 & refs2)
|
| 1244 |
+
|
| 1245 |
+
def _analyze_scope_focus(self) -> float:
|
| 1246 |
+
type_counts = defaultdict(int)
|
| 1247 |
+
total = 0
|
| 1248 |
+
|
| 1249 |
+
for block in self.ledger.chain:
|
| 1250 |
+
for node in block.get("nodes", []):
|
| 1251 |
+
node_type = node.get("type", "unknown")
|
| 1252 |
+
type_counts[node_type] += 1
|
| 1253 |
+
total += 1
|
| 1254 |
+
|
| 1255 |
+
if total == 0:
|
| 1256 |
+
return 0.0
|
| 1257 |
+
|
| 1258 |
+
# Calculate concentration (higher = more focused on few types)
|
| 1259 |
+
max_type = max(type_counts.values(), default=0)
|
| 1260 |
+
return max_type / total if total > 0 else 0.0
|
| 1261 |
+
|
| 1262 |
+
def _signatures_to_methods(self, signatures: List[str]) -> List[Dict]:
|
| 1263 |
+
"""Map evidence signatures to detected methods"""
|
| 1264 |
+
results = []
|
| 1265 |
+
|
| 1266 |
+
for sig in signatures:
|
| 1267 |
+
method_ids = self.hierarchy.signatures.get(sig, [])
|
| 1268 |
+
for method_id in method_ids:
|
| 1269 |
+
method = self.hierarchy.methods[method_id]
|
| 1270 |
+
|
| 1271 |
+
# Calculate confidence based on evidence strength
|
| 1272 |
+
confidence = self._calculate_method_confidence(method, sig)
|
| 1273 |
+
|
| 1274 |
+
if method.implemented and confidence > 0.5:
|
| 1275 |
+
results.append({
|
| 1276 |
+
"method_id": method.id,
|
| 1277 |
+
"method_name": method.name,
|
| 1278 |
+
"primitive": method.primitive.value,
|
| 1279 |
+
"confidence": round(confidence, 3),
|
| 1280 |
+
"evidence_signature": sig,
|
| 1281 |
+
"implemented": True
|
| 1282 |
+
})
|
| 1283 |
+
|
| 1284 |
+
return sorted(results, key=lambda x: x["confidence"], reverse=True)
|
| 1285 |
+
|
| 1286 |
+
def _calculate_method_confidence(self, method: SuppressionMethod, signature: str) -> float:
|
| 1287 |
+
"""Calculate detection confidence for a method"""
|
| 1288 |
+
base_confidence = 0.7 if method.implemented else 0.3
|
| 1289 |
+
|
| 1290 |
+
# Adjust based on evidence strength
|
| 1291 |
+
if "entity_present_then_absent" in signature:
|
| 1292 |
+
return min(0.9, base_confidence + 0.2)
|
| 1293 |
+
elif "single_explanation" in signature:
|
| 1294 |
+
return min(0.85, base_confidence + 0.15)
|
| 1295 |
+
elif "gradual_fading" in signature:
|
| 1296 |
+
return min(0.8, base_confidence + 0.1)
|
| 1297 |
+
|
| 1298 |
+
return base_confidence
|
| 1299 |
+
|
| 1300 |
+
def _analyze_primitives(self, method_results: List[Dict]) -> Dict:
|
| 1301 |
+
"""Analyze which primitives are most active"""
|
| 1302 |
+
primitive_counts = defaultdict(int)
|
| 1303 |
+
primitive_confidence = defaultdict(list)
|
| 1304 |
+
|
| 1305 |
+
for result in method_results:
|
| 1306 |
+
primitive = result["primitive"]
|
| 1307 |
+
primitive_counts[primitive] += 1
|
| 1308 |
+
primitive_confidence[primitive].append(result["confidence"])
|
| 1309 |
+
|
| 1310 |
+
analysis = {}
|
| 1311 |
+
for primitive, count in primitive_counts.items():
|
| 1312 |
+
confidences = primitive_confidence[primitive]
|
| 1313 |
+
analysis[primitive] = {
|
| 1314 |
+
"method_count": count,
|
| 1315 |
+
"average_confidence": round(statistics.mean(confidences), 3) if confidences else 0.0,
|
| 1316 |
+
"dominant_methods": [
|
| 1317 |
+
r["method_name"] for r in method_results
|
| 1318 |
+
if r["primitive"] == primitive
|
| 1319 |
+
][:2]
|
| 1320 |
+
}
|
| 1321 |
+
|
| 1322 |
+
return analysis
|
| 1323 |
+
|
| 1324 |
+
def _infer_lenses(self, primitive_analysis: Dict) -> Dict:
|
| 1325 |
+
"""Infer which conceptual lenses might be active"""
|
| 1326 |
+
active_primitives = [p for p, data in primitive_analysis.items() if data["method_count"] > 0]
|
| 1327 |
+
active_lenses = set()
|
| 1328 |
+
|
| 1329 |
+
for primitive_str in active_primitives:
|
| 1330 |
+
primitive = Primitive(primitive_str)
|
| 1331 |
+
lens_ids = self.hierarchy.primitives.get(primitive, [])
|
| 1332 |
+
active_lenses.update(lens_ids)
|
| 1333 |
+
|
| 1334 |
+
lens_details = []
|
| 1335 |
+
for lens_id in sorted(active_lenses)[:10]: # Top 10 lenses
|
| 1336 |
+
lens = self.hierarchy.lenses.get(lens_id)
|
| 1337 |
+
if lens:
|
| 1338 |
+
lens_details.append({
|
| 1339 |
+
"id": lens.id,
|
| 1340 |
+
"name": lens.name,
|
| 1341 |
+
"archetype": lens.archetype,
|
| 1342 |
+
"mechanism": lens.suppression_mechanism
|
| 1343 |
+
})
|
| 1344 |
+
|
| 1345 |
+
return {
|
| 1346 |
+
"active_lens_count": len(active_lenses),
|
| 1347 |
+
"active_primitives": active_primitives,
|
| 1348 |
+
"lens_details": lens_details,
|
| 1349 |
+
"architecture_analysis": self._analyze_architecture(active_primitives, active_lenses)
|
| 1350 |
+
}
|
| 1351 |
+
|
| 1352 |
+
def _analyze_architecture(self, active_primitives: List[str], active_lenses: Set[int]) -> str:
|
| 1353 |
+
"""Analyze the suppression architecture complexity"""
|
| 1354 |
+
analysis = []
|
| 1355 |
+
|
| 1356 |
+
primitive_count = len(active_primitives)
|
| 1357 |
+
lens_count = len(active_lenses)
|
| 1358 |
+
|
| 1359 |
+
if primitive_count >= 3:
|
| 1360 |
+
analysis.append(f"Complex suppression architecture ({primitive_count} primitives)")
|
| 1361 |
+
elif primitive_count > 0:
|
| 1362 |
+
analysis.append(f"Basic suppression patterns detected")
|
| 1363 |
+
|
| 1364 |
+
if lens_count > 20:
|
| 1365 |
+
analysis.append("Deep conceptual framework active")
|
| 1366 |
+
elif lens_count > 10:
|
| 1367 |
+
analysis.append("Multiple conceptual layers active")
|
| 1368 |
+
|
| 1369 |
+
# Check for composite patterns
|
| 1370 |
+
if Primitive.ERASURE.value in active_primitives and Primitive.NARRATIVE_CAPTURE.value in active_primitives:
|
| 1371 |
+
analysis.append("Erasure + Narrative patterns suggest coordinated suppression")
|
| 1372 |
+
|
| 1373 |
+
if Primitive.META.value in active_primitives:
|
| 1374 |
+
analysis.append("Meta-suppression patterns detected (self-referential control)")
|
| 1375 |
+
|
| 1376 |
+
return " | ".join(analysis) if analysis else "Minimal suppression patterns"
|
| 1377 |
+
|
| 1378 |
+
==================== MAIN ENGINE ====================
|
| 1379 |
+
|
| 1380 |
+
class CompleteEngine:
|
| 1381 |
+
def init(self, path: str = "complete_engine"):
|
| 1382 |
+
os.makedirs(path, exist_ok=True)
|
| 1383 |
+
|
| 1384 |
+
print("=" * 80)
|
| 1385 |
+
print("HIERARCHICAL SUPPRESSION DETECTION ENGINE")
|
| 1386 |
+
print("73 Lenses β 10 Primitives β 43 Methods β Evidence Signatures")
|
| 1387 |
+
print("=" * 80)
|
| 1388 |
+
|
| 1389 |
+
# Initialize hierarchy
|
| 1390 |
+
self.hierarchy = SuppressionHierarchy()
|
| 1391 |
+
|
| 1392 |
+
# Initialize ledger and separator
|
| 1393 |
+
self.crypto = Crypto(f"{path}/keys")
|
| 1394 |
+
self.ledger = Ledger(f"{path}/ledger.json", self.crypto)
|
| 1395 |
+
self.separator = Separator(self.ledger, f"{path}/interpretations")
|
| 1396 |
+
|
| 1397 |
+
# Initialize detector
|
| 1398 |
+
self.detector = HierarchicalDetector(self.hierarchy, self.ledger, self.separator)
|
| 1399 |
+
|
| 1400 |
+
# Export ontology
|
| 1401 |
+
self.hierarchy.export_ontology(f"{path}/suppression_hierarchy.json")
|
| 1402 |
+
|
| 1403 |
+
print(f"β Hierarchy initialized: {len(self.hierarchy.lenses)} lenses")
|
| 1404 |
+
print(f"β Primitives defined: {len(self.hierarchy.primitives)}")
|
| 1405 |
+
print(f"β Methods available: {len(self.hierarchy.methods)}")
|
| 1406 |
+
print(f"β Evidence signatures: {len(self.hierarchy.signatures)}")
|
| 1407 |
+
print(f"β Ledger ready: {len(self.ledger.chain)} blocks")
|
| 1408 |
+
|
| 1409 |
+
def record_reality(self, content: str, type: str, source: str,
|
| 1410 |
+
witnesses: List[str] = None, refs: Dict[str, List[str]] = None) -> str:
|
| 1411 |
+
"""Record immutable reality node"""
|
| 1412 |
+
content_hash = self.crypto.hash(content)
|
| 1413 |
+
signature = self.crypto.sign(content.encode(), source)
|
| 1414 |
+
|
| 1415 |
+
node = RealityNode(
|
| 1416 |
+
hash=content_hash,
|
| 1417 |
+
type=type,
|
| 1418 |
+
source=source,
|
| 1419 |
+
signature=signature,
|
| 1420 |
+
timestamp=datetime.utcnow().isoformat() + "Z",
|
| 1421 |
+
witnesses=witnesses or [],
|
| 1422 |
+
refs=refs or {}
|
| 1423 |
+
)
|
| 1424 |
+
|
| 1425 |
+
# Use dummy validators for demo
|
| 1426 |
+
validators = [("validator_1", None), ("validator_2", None)]
|
| 1427 |
+
block_id = self.ledger.add(node, validators)
|
| 1428 |
+
|
| 1429 |
+
print(f"β Recorded: {content_hash[:16]}... in block {block_id}")
|
| 1430 |
+
return content_hash
|
| 1431 |
+
|
| 1432 |
+
def add_interpretation(self, node_hashes: List[str], interpretation: Dict,
|
| 1433 |
+
interpreter: str, confidence: float = 0.5) -> str:
|
| 1434 |
+
"""Add interpretation (separate from reality)"""
|
| 1435 |
+
int_id = self.separator.add(node_hashes, interpretation, interpreter, confidence)
|
| 1436 |
+
print(f"β Interpretation added: {int_id} by {interpreter}")
|
| 1437 |
+
return int_id
|
| 1438 |
+
|
| 1439 |
+
def detect_suppression(self) -> Dict:
|
| 1440 |
+
"""Run hierarchical suppression detection"""
|
| 1441 |
+
print("\nπ Detecting suppression patterns...")
|
| 1442 |
+
results = self.detector.detect_from_ledger()
|
| 1443 |
+
|
| 1444 |
+
print(f"β Evidence found: {results['evidence_found']} signatures")
|
| 1445 |
+
print(f"β Methods detected: {len(results['method_results'])}")
|
| 1446 |
+
print(f"β Primitives active: {len(results['primitive_analysis'])}")
|
| 1447 |
+
print(f"β Lenses inferred: {results['lens_inference']['active_lens_count']}")
|
| 1448 |
+
|
| 1449 |
+
if results['method_results']:
|
| 1450 |
+
print("\nTop detected methods:")
|
| 1451 |
+
for method in results['method_results'][:5]:
|
| 1452 |
+
print(f" β’ {method['method_name']}: {method['confidence']:.1%}")
|
| 1453 |
+
|
| 1454 |
+
architecture = results['lens_inference']['architecture_analysis']
|
| 1455 |
+
if architecture:
|
| 1456 |
+
print(f"\nArchitecture: {architecture}")
|
| 1457 |
+
|
| 1458 |
+
return results
|
| 1459 |
+
|
| 1460 |
+
def get_system_status(self) -> Dict:
|
| 1461 |
+
"""Get complete system status"""
|
| 1462 |
+
ledger_status = self.ledger.verify()
|
| 1463 |
+
separator_stats = self.separator.stats()
|
| 1464 |
+
|
| 1465 |
+
implemented_methods = sum(1 for m in self.hierarchy.methods.values() if m.implemented)
|
| 1466 |
+
|
| 1467 |
+
return {
|
| 1468 |
+
"system": {
|
| 1469 |
+
"lenses": len(self.hierarchy.lenses),
|
| 1470 |
+
"primitives": len(self.hierarchy.primitives),
|
| 1471 |
+
"methods": len(self.hierarchy.methods),
|
| 1472 |
+
"methods_implemented": implemented_methods,
|
| 1473 |
+
"signatures": len(self.hierarchy.signatures)
|
| 1474 |
+
},
|
| 1475 |
+
"ledger": {
|
| 1476 |
+
"valid": ledger_status["valid"],
|
| 1477 |
+
"blocks": ledger_status.get("blocks", 0),
|
| 1478 |
+
"nodes": ledger_status.get("nodes", 0),
|
| 1479 |
+
"avg_resistance": ledger_status.get("avg_resistance", 0)
|
| 1480 |
+
},
|
| 1481 |
+
"interpretations": separator_stats,
|
| 1482 |
+
"hierarchical_ready": True
|
| 1483 |
+
}
|
| 1484 |
+
|
| 1485 |
+
==================== DEMONSTRATION ====================
|
| 1486 |
+
|
| 1487 |
+
def demonstrate_hierarchical_detection():
|
| 1488 |
+
"""Demonstrate the complete hierarchical system"""
|
| 1489 |
+
|
| 1490 |
+
engine = CompleteEngine("hierarchical_demo")
|
| 1491 |
+
|
| 1492 |
+
print("\nπ Recording reality nodes...")
|
| 1493 |
+
|
| 1494 |
+
# Record historical events
|
| 1495 |
+
h1 = engine.record_reality(
|
| 1496 |
+
"J.P. Morgan withdrew Tesla funding in 1903",
|
| 1497 |
+
"historical_event",
|
| 1498 |
+
"financial_archive_001",
|
| 1499 |
+
witnesses=["bank_record_1903", "correspondence_archive"],
|
| 1500 |
+
refs={"financial": ["morgan_ledgers"], "news": ["ny_times_1903"]}
|
| 1501 |
+
)
|
| 1502 |
+
|
| 1503 |
+
h2 = engine.record_reality(
|
| 1504 |
+
"FBI seized Tesla papers in 1943",
|
| 1505 |
+
"historical_event",
|
| 1506 |
+
"foia_document_001",
|
| 1507 |
+
witnesses=["inventory_1943", "hotel_records"],
|
| 1508 |
+
refs={"government": ["fbi_files"], "legal": ["property_records"]}
|
| 1509 |
+
)
|
| 1510 |
+
|
| 1511 |
+
h3 = engine.record_reality(
|
| 1512 |
+
"Witness disappeared after testimony in 1952",
|
| 1513 |
+
"historical_event",
|
| 1514 |
+
"court_archive_001",
|
| 1515 |
+
witnesses=["court_record_1952"],
|
| 1516 |
+
refs={"legal": ["court_documents"]}
|
| 1517 |
+
)
|
| 1518 |
+
|
| 1519 |
+
print("\nπ Adding interpretations...")
|
| 1520 |
+
|
| 1521 |
+
# Official interpretation
|
| 1522 |
+
engine.add_interpretation(
|
| 1523 |
+
[h1, h2],
|
| 1524 |
+
{"narrative": "Standard business and government operations", "agency": "normal"},
|
| 1525 |
+
"official_historian",
|
| 1526 |
+
0.85
|
| 1527 |
+
)
|
| 1528 |
+
|
| 1529 |
+
# Alternative interpretation
|
| 1530 |
+
engine.add_interpretation(
|
| 1531 |
+
[h1, h2, h3],
|
| 1532 |
+
{"narrative": "Pattern of suppression across generations", "agency": "coordinated"},
|
| 1533 |
+
"independent_researcher",
|
| 1534 |
+
0.65
|
| 1535 |
+
)
|
| 1536 |
+
|
| 1537 |
+
print("\nπ Running hierarchical suppression detection...")
|
| 1538 |
+
results = engine.detect_suppression()
|
| 1539 |
+
|
| 1540 |
+
print("\nπ System Status:")
|
| 1541 |
+
status = engine.get_system_status()
|
| 1542 |
+
|
| 1543 |
+
print(f" β’ Lenses: {status['system']['lenses']}")
|
| 1544 |
+
print(f" β’ Primitives: {status['system']['primitives']}")
|
| 1545 |
+
print(f" β’ Methods: {status['system']['methods']} ({status['system']['methods_implemented']} implemented)")
|
| 1546 |
+
print(f" β’ Ledger blocks: {status['ledger']['blocks']}")
|
| 1547 |
+
print(f" β’ Reality nodes: {status['ledger']['nodes']}")
|
| 1548 |
+
print(f" β’ Interpretations: {status['interpretations']['count']}")
|
| 1549 |
+
print(f" β’ Unique interpreters: {status['interpretations']['interpreters']}")
|
| 1550 |
+
|
| 1551 |
+
print("\n" + "=" * 80)
|
| 1552 |
+
print("β
HIERARCHICAL SYSTEM OPERATIONAL")
|
| 1553 |
+
print("Evidence β Methods β Primitives β Lenses")
|
| 1554 |
+
print("No circular references, clean abstraction layers")
|
| 1555 |
+
print("=" * 80)
|
| 1556 |
+
|
| 1557 |
+
if name == "main":
|
| 1558 |
+
demonstrate_hierarchical_detection()
|