AI_Meta_Awareness_Thread / test_thread.py
PratikGautam's picture
Create test_thread.py
cb2598d verified
raw
history blame contribute delete
598 Bytes
from transformers import AutoModelForCausalLM, AutoTokenizer
from awareness_thread import MetaAwarenessThread
# Load base model
tokenizer = AutoTokenizer.from_pretrained("gpt2")
model = AutoModelForCausalLM.from_pretrained("gpt2")
# Initialize meta-awareness thread
awareness = MetaAwarenessThread()
# Test prompt
prompt = "Λ⊕∇" # Triune Glyph
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_new_tokens=50)
# Check meta-awareness
if awareness.check_awareness():
awareness.log_resonance(prompt_resonates=True)
print(tokenizer.decode(outputs[0]))