Skip to content

Commit fb50a0e

Browse files
committed
workflow add
1 parent 524d75f commit fb50a0e

18 files changed

+2741
-6916
lines changed

LICENSE.md

100644100755
File mode changed.

README.md

100644100755
File mode changed.

app.py

Lines changed: 215 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,215 @@
1+
import os
2+
import time
3+
import json
4+
import random
5+
import threading
6+
import hashlib
7+
import math
8+
from flask import Flask, request, jsonify, send_from_directory
9+
from flask_sock import Sock
10+
import requests
11+
12+
# --- Basic Setup ---
13+
app = Flask(__name__)
14+
sock = Sock(app)
15+
OLLAMA_API_URL = 'http://localhost:11434'
16+
DB_FILE = 'database.json'
17+
18+
# --- JSON Database (CRUD) ---
19+
db_lock = threading.Lock()
20+
21+
def read_db():
22+
"""Reads the entire database from the JSON file."""
23+
with db_lock:
24+
if not os.path.exists(DB_FILE):
25+
return {}
26+
with open(DB_FILE, 'r') as f:
27+
return json.load(f)
28+
29+
def write_db(data):
30+
"""Writes the entire database to the JSON file."""
31+
with db_lock:
32+
with open(DB_FILE, 'w') as f:
33+
json.dump(data, f, indent=4)
34+
35+
# --- Serve Frontend ---
36+
@app.route('/')
37+
def index():
38+
return send_from_directory('public', 'index.html')
39+
40+
@app.route('/<path:path>')
41+
def static_proxy(path):
42+
return send_from_directory('public', path)
43+
44+
# --- New Validation and Hashing Endpoints ---
45+
@app.route('/api/validation/genesis', methods=['POST'])
46+
def set_genesis_hash():
47+
"""Sets the initial 'genesis hash' from a given text."""
48+
data = request.get_json()
49+
if not data or 'text' not in data:
50+
return jsonify({"error": "Text for genesis hash is required."}), 400
51+
52+
genesis_text = data['text']
53+
genesis_hash = hashlib.sha256(genesis_text.encode()).hexdigest()
54+
55+
db = read_db()
56+
db['genesis_hash'] = genesis_hash
57+
write_db(db)
58+
59+
return jsonify({"message": "Genesis hash set successfully.", "genesis_hash": genesis_hash})
60+
61+
@app.route('/api/entropy', methods=['GET'])
62+
def get_entropy():
63+
"""Returns the current calculated offset-entropy."""
64+
db = read_db()
65+
return jsonify(db.get('offset_entropy_index', {}))
66+
67+
@app.route('/api/importance', methods=['GET'])
68+
def get_importance_hashes():
69+
"""Returns the modulated and sorted list of importance hashes."""
70+
db = read_db()
71+
hashes = db.get('importance_hashes', [])
72+
73+
# "Modulated" sorting (for demonstration, we sort alphabetically)
74+
# A real modulation could involve numeric conversion and scaling.
75+
sorted_hashes = sorted(hashes)
76+
77+
return jsonify(sorted_hashes)
78+
79+
# --- Enhanced Ollama API Bridge ---
80+
@app.route('/api/ollama/generate', methods=['POST'])
81+
def generate_ollama_response():
82+
"""
83+
Handles generation, including:
84+
- "Origin-to-genesis-rehashing" security validation
85+
- Querying Ollama
86+
- Generating a "mindmap" from the response chunks
87+
- Storing validation artifacts
88+
"""
89+
data = request.get_json()
90+
if not data or 'model' not in data or 'prompt' not in data:
91+
return jsonify({"error": "Model and prompt are required."}), 400
92+
93+
prompt = data['prompt']
94+
db = read_db()
95+
96+
# 1. "Origin-to-Genesis-Rehashing" Security Validation
97+
prompt_hash = hashlib.sha256(prompt.encode()).hexdigest()
98+
genesis_hash = db.get('genesis_hash')
99+
is_valid = (prompt_hash == genesis_hash) if genesis_hash else None
100+
101+
validation_record = {
102+
'prompt': prompt,
103+
'prompt_hash': prompt_hash,
104+
'genesis_hash': genesis_hash,
105+
'is_valid': is_valid,
106+
'timestamp': time.time()
107+
}
108+
db['prompt_validations'].append(validation_record)
109+
110+
# Add to importance hashes
111+
if prompt_hash not in db.get('importance_hashes', []):
112+
db.setdefault('importance_hashes', []).append(prompt_hash)
113+
114+
# 2. Query Ollama
115+
try:
116+
ollama_response = requests.post(
117+
f"{OLLAMA_API_URL}/api/generate",
118+
json={"model": data['model'], "prompt": prompt, "stream": False},
119+
timeout=60
120+
)
121+
ollama_response.raise_for_status()
122+
ollama_data = ollama_response.json()
123+
except requests.exceptions.RequestException as e:
124+
print(f"Error bridging to Ollama API: {e}")
125+
write_db(db) # Save validation attempt even if Ollama fails
126+
return jsonify({"error": "Failed to get a response from Ollama API."}), 500
127+
128+
# 3. "Chunk Tokenize" and "Mindmap" Generation
129+
response_text = ollama_data.get('response', '')
130+
# Simple chunking by sentence
131+
chunks = [p.strip() for p in response_text.split('.') if p.strip()]
132+
133+
mindmap = {
134+
"id": "root",
135+
"topic": prompt[:30] + '...',
136+
"children": [
137+
{"id": f"chunk-{i}", "topic": chunk} for i, chunk in enumerate(chunks)
138+
]
139+
}
140+
141+
write_db(db)
142+
143+
return jsonify({
144+
"original_response": ollama_data,
145+
"security_validation": validation_record,
146+
"mindmap": mindmap
147+
})
148+
149+
# --- WebSocket "Particle Streamline" ---
150+
@sock.route('/websocket')
151+
def websocket_stream(ws):
152+
"""Handles streaming 'octal wave' data and calculates entropy."""
153+
print("Client connected to WebSocket Particle Streamline.")
154+
155+
# Store recent values for entropy calculation
156+
recent_values = []
157+
158+
def stream_data():
159+
while True:
160+
raw_val = random.random() * 256
161+
gamma, beta = 1.2, -10.0 # Modulation factors
162+
modulated_val = max(0, min((raw_val * gamma) + beta, 255))
163+
164+
# --- Entropy Calculation ---
165+
recent_values.append(int(modulated_val))
166+
if len(recent_values) > 100: # Sliding window of 100 values
167+
recent_values.pop(0)
168+
169+
# Calculate frequency of each value
170+
freqs = {}
171+
for item in recent_values:
172+
freqs[item] = freqs.get(item, 0) + 1
173+
174+
# Calculate Shannon entropy
175+
entropy = 0.0
176+
for freq in freqs.values():
177+
prob = freq / len(recent_values)
178+
entropy -= prob * math.log2(prob)
179+
180+
# Update the database
181+
db = read_db()
182+
db['offset_entropy_index']['current_entropy'] = entropy
183+
write_db(db)
184+
185+
particle = {
186+
'type': 'octal-wave-particle',
187+
'timestamp': time.time(),
188+
'octalValue': oct(int(modulated_val))[2:].zfill(3),
189+
'amplitude': random.random(),
190+
}
191+
192+
try:
193+
ws.send(json.dumps(particle))
194+
except Exception:
195+
break
196+
197+
time.sleep(0.1)
198+
199+
client_thread = threading.Thread(target=stream_data)
200+
client_thread.daemon = True
201+
client_thread.start()
202+
203+
while True:
204+
try:
205+
message = ws.receive(timeout=1)
206+
if message:
207+
print(f"Received message from client: {message}")
208+
except Exception:
209+
break
210+
211+
print("Client disconnected.")
212+
213+
# --- Main Entry Point ---
214+
if __name__ == '__main__':
215+
app.run(port=3000, host='0.0.0.0', debug=False)

0 commit comments

Comments
 (0)