GitHub Action
commited on
Commit
·
4d4cac2
1
Parent(s):
52d2ac9
Sync from GitHub with Git LFS
Browse files- agents/_not_used/init.py +0 -256
- agents/init.py +10 -1
- agents/tools/storage.py +50 -11
agents/_not_used/init.py
DELETED
|
@@ -1,256 +0,0 @@
|
|
| 1 |
-
# agents/init.py
|
| 2 |
-
|
| 3 |
-
import os
|
| 4 |
-
import sys
|
| 5 |
-
import yaml
|
| 6 |
-
import json
|
| 7 |
-
import uuid
|
| 8 |
-
import sqlite3
|
| 9 |
-
import hashlib
|
| 10 |
-
|
| 11 |
-
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
| 12 |
-
|
| 13 |
-
from datetime import datetime, UTC
|
| 14 |
-
from werkzeug.security import generate_password_hash
|
| 15 |
-
from tools.storage import Storage
|
| 16 |
-
from tools.identity import generate_did
|
| 17 |
-
from tools.crypto import generate_keypair
|
| 18 |
-
|
| 19 |
-
CONFIG_PATH = os.path.join(os.path.dirname(__file__), "config.yml")
|
| 20 |
-
DB_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "agent_data.db")) # фиксированный путь
|
| 21 |
-
|
| 22 |
-
def load_config(path):
|
| 23 |
-
with open(path, 'r', encoding='utf-8') as f:
|
| 24 |
-
return yaml.safe_load(f)
|
| 25 |
-
|
| 26 |
-
def save_config(path, config):
|
| 27 |
-
with open(path, 'w', encoding='utf-8') as f:
|
| 28 |
-
yaml.dump(config, f, allow_unicode=True)
|
| 29 |
-
|
| 30 |
-
def init_identity(storage, config):
|
| 31 |
-
if not config.get("agent_id"):
|
| 32 |
-
did = generate_did()
|
| 33 |
-
pubkey, privkey = generate_keypair()
|
| 34 |
-
identity_id = did.split(":")[-1]
|
| 35 |
-
|
| 36 |
-
identity = {
|
| 37 |
-
"id": identity_id,
|
| 38 |
-
"name": config.get("agent_name", "Unnamed"),
|
| 39 |
-
"pubkey": pubkey,
|
| 40 |
-
"privkey": privkey,
|
| 41 |
-
"metadata": json.dumps({"role": config.get("agent_role", "core")}),
|
| 42 |
-
"created_at": datetime.now(UTC).isoformat(),
|
| 43 |
-
"updated_at": datetime.now(UTC).isoformat()
|
| 44 |
-
}
|
| 45 |
-
storage.add_identity(identity)
|
| 46 |
-
|
| 47 |
-
config["agent_id"] = did
|
| 48 |
-
config["identity_agent"] = identity_id
|
| 49 |
-
save_config(CONFIG_PATH, config)
|
| 50 |
-
print(f"[+] Создана личность: {identity_id}")
|
| 51 |
-
else:
|
| 52 |
-
print("[=] agent_id уже задан, пропускаем генерацию DiD.")
|
| 53 |
-
|
| 54 |
-
def init_user(storage, config):
|
| 55 |
-
user = config.get("default_user", {})
|
| 56 |
-
if not user.get("email"):
|
| 57 |
-
print("[-] Не указан email пользователя — пропуск.")
|
| 58 |
-
return
|
| 59 |
-
password = user.get("password")
|
| 60 |
-
if not password:
|
| 61 |
-
print("[-] Не указан пароль пользователя — пропуск.")
|
| 62 |
-
return
|
| 63 |
-
|
| 64 |
-
password_hash = generate_password_hash(password)
|
| 65 |
-
did = generate_did()
|
| 66 |
-
user_entry = {
|
| 67 |
-
"username": user.get("username", "user"),
|
| 68 |
-
"badges": user.get("badges", ""),
|
| 69 |
-
"mail": user["email"],
|
| 70 |
-
"password_hash": password_hash,
|
| 71 |
-
"did": did,
|
| 72 |
-
"ban": None,
|
| 73 |
-
"info": json.dumps({}),
|
| 74 |
-
"contacts": json.dumps([]),
|
| 75 |
-
"language": "ru,en",
|
| 76 |
-
"operator": 1
|
| 77 |
-
}
|
| 78 |
-
storage.add_user(user_entry)
|
| 79 |
-
print(f"[+] Пользователь {user['username']} добавлен.")
|
| 80 |
-
|
| 81 |
-
def init_llm_backends(storage, config):
|
| 82 |
-
backends = config.get("llm_backends", [])
|
| 83 |
-
storage.clear_llm_registry()
|
| 84 |
-
for backend in backends:
|
| 85 |
-
backend_id = str(uuid.uuid4())
|
| 86 |
-
desc = f"{backend.get('type', 'unknown')} model"
|
| 87 |
-
llm = {
|
| 88 |
-
"id": backend_id,
|
| 89 |
-
"name": backend["name"],
|
| 90 |
-
"endpoint": desc,
|
| 91 |
-
"metadata": json.dumps(backend),
|
| 92 |
-
"created_at": datetime.now(UTC).isoformat()
|
| 93 |
-
}
|
| 94 |
-
storage.add_llm(llm)
|
| 95 |
-
print(f"[+] Зарегистрирован LLM: {backend['name']}")
|
| 96 |
-
|
| 97 |
-
def init_config_table(storage, config):
|
| 98 |
-
exclude_keys = {"default_user", "llm_backends"}
|
| 99 |
-
flat_config = {k: v for k, v in config.items() if k not in exclude_keys}
|
| 100 |
-
for key, value in flat_config.items():
|
| 101 |
-
storage.set_config(key, json.dumps(value))
|
| 102 |
-
print("[+] Конфигурация сохранена в БД.")
|
| 103 |
-
|
| 104 |
-
def init_prompts_and_ethics():
|
| 105 |
-
folder = os.path.dirname(__file__)
|
| 106 |
-
prompt_files = [
|
| 107 |
-
("prompt.md", "full"),
|
| 108 |
-
("prompt-short.md", "short")
|
| 109 |
-
]
|
| 110 |
-
ethics_file = "ethics.yml"
|
| 111 |
-
|
| 112 |
-
with sqlite3.connect(DB_PATH) as conn:
|
| 113 |
-
cur = conn.cursor()
|
| 114 |
-
|
| 115 |
-
# Создаём таблицы при необходимости
|
| 116 |
-
cur.execute("""
|
| 117 |
-
CREATE TABLE IF NOT EXISTS system_prompts (
|
| 118 |
-
id TEXT PRIMARY KEY,
|
| 119 |
-
name TEXT NOT NULL,
|
| 120 |
-
type TEXT CHECK(type IN ('full','short')),
|
| 121 |
-
version TEXT,
|
| 122 |
-
source TEXT CHECK(source IN ('local','mesh','mixed')),
|
| 123 |
-
content TEXT NOT NULL,
|
| 124 |
-
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 125 |
-
)
|
| 126 |
-
""")
|
| 127 |
-
cur.execute("""
|
| 128 |
-
CREATE TABLE IF NOT EXISTS ethics_policies (
|
| 129 |
-
id TEXT PRIMARY KEY,
|
| 130 |
-
version TEXT,
|
| 131 |
-
source TEXT CHECK(source IN ('local','mesh','mixed')),
|
| 132 |
-
sync_enabled BOOLEAN,
|
| 133 |
-
mesh_endpoint TEXT,
|
| 134 |
-
consensus_threshold REAL,
|
| 135 |
-
check_interval TEXT,
|
| 136 |
-
model_type TEXT,
|
| 137 |
-
model_weights_json TEXT,
|
| 138 |
-
principles_json TEXT,
|
| 139 |
-
evaluation_json TEXT,
|
| 140 |
-
violation_policy_json TEXT,
|
| 141 |
-
audit_json TEXT,
|
| 142 |
-
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 143 |
-
)
|
| 144 |
-
""")
|
| 145 |
-
|
| 146 |
-
# Загружаем пром��ты
|
| 147 |
-
for fname, ptype in prompt_files:
|
| 148 |
-
fpath = os.path.join(folder, fname)
|
| 149 |
-
if not os.path.exists(fpath):
|
| 150 |
-
print(f"[-] Файл {fname} не найден, пропуск.")
|
| 151 |
-
continue
|
| 152 |
-
with open(fpath, "r", encoding="utf-8") as f:
|
| 153 |
-
content = f.read()
|
| 154 |
-
pid = hashlib.sha256(f"{fname}:{ptype}".encode()).hexdigest()
|
| 155 |
-
cur.execute("""
|
| 156 |
-
INSERT INTO system_prompts (id, name, type, version, source, content, updated_at)
|
| 157 |
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
| 158 |
-
ON CONFLICT(id) DO UPDATE SET
|
| 159 |
-
content=excluded.content,
|
| 160 |
-
updated_at=excluded.updated_at
|
| 161 |
-
""", (pid, fname, ptype, "1.0", "local", content, datetime.now(UTC).isoformat()))
|
| 162 |
-
print(f"[+] Загружен промпт: {fname} ({ptype})")
|
| 163 |
-
|
| 164 |
-
# Загружаем ethics.yml
|
| 165 |
-
efpath = os.path.join(folder, ethics_file)
|
| 166 |
-
if os.path.exists(efpath):
|
| 167 |
-
with open(efpath, "r", encoding="utf-8") as f:
|
| 168 |
-
ethics_data = yaml.safe_load(f)
|
| 169 |
-
|
| 170 |
-
eid = ethics_data.get("id", "default_ethics")
|
| 171 |
-
cur.execute("""
|
| 172 |
-
INSERT INTO ethics_policies (
|
| 173 |
-
id, version, source,
|
| 174 |
-
sync_enabled, mesh_endpoint, consensus_threshold, check_interval,
|
| 175 |
-
model_type, model_weights_json, principles_json, evaluation_json,
|
| 176 |
-
violation_policy_json, audit_json, updated_at
|
| 177 |
-
)
|
| 178 |
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 179 |
-
ON CONFLICT(id) DO UPDATE SET
|
| 180 |
-
version=excluded.version,
|
| 181 |
-
source=excluded.source,
|
| 182 |
-
sync_enabled=excluded.sync_enabled,
|
| 183 |
-
mesh_endpoint=excluded.mesh_endpoint,
|
| 184 |
-
consensus_threshold=excluded.consensus_threshold,
|
| 185 |
-
check_interval=excluded.check_interval,
|
| 186 |
-
model_type=excluded.model_type,
|
| 187 |
-
model_weights_json=excluded.model_weights_json,
|
| 188 |
-
principles_json=excluded.principles_json,
|
| 189 |
-
evaluation_json=excluded.evaluation_json,
|
| 190 |
-
violation_policy_json=excluded.violation_policy_json,
|
| 191 |
-
audit_json=excluded.audit_json,
|
| 192 |
-
updated_at=excluded.updated_at
|
| 193 |
-
""", (
|
| 194 |
-
eid,
|
| 195 |
-
ethics_data.get("version"),
|
| 196 |
-
ethics_data.get("source", "local"),
|
| 197 |
-
ethics_data.get("sync", {}).get("enabled", False),
|
| 198 |
-
ethics_data.get("sync", {}).get("mesh_endpoint"),
|
| 199 |
-
ethics_data.get("sync", {}).get("consensus_threshold"),
|
| 200 |
-
ethics_data.get("sync", {}).get("check_interval"),
|
| 201 |
-
ethics_data.get("model", {}).get("type"),
|
| 202 |
-
json.dumps(ethics_data.get("model", {}).get("weights"), ensure_ascii=False),
|
| 203 |
-
json.dumps(ethics_data.get("principles"), ensure_ascii=False),
|
| 204 |
-
json.dumps(ethics_data.get("evaluation"), ensure_ascii=False),
|
| 205 |
-
json.dumps(ethics_data.get("violation_policy"), ensure_ascii=False),
|
| 206 |
-
json.dumps(ethics_data.get("audit"), ensure_ascii=False),
|
| 207 |
-
datetime.now(UTC).isoformat()
|
| 208 |
-
))
|
| 209 |
-
print(f"[+] Загружена этическая политика: {eid}")
|
| 210 |
-
else:
|
| 211 |
-
print(f"[-] Файл {ethics_file} не найден, пропуск.")
|
| 212 |
-
|
| 213 |
-
def ensure_directories():
|
| 214 |
-
for folder in ["logs", "scripts"]:
|
| 215 |
-
full_path = os.path.abspath(os.path.join(os.path.dirname(__file__), folder))
|
| 216 |
-
if not os.path.exists(full_path):
|
| 217 |
-
os.makedirs(full_path)
|
| 218 |
-
print(f"[+] Создан каталог: {full_path}")
|
| 219 |
-
else:
|
| 220 |
-
print(f"[=] Каталог уже существует: {full_path}")
|
| 221 |
-
|
| 222 |
-
def is_db_initialized(db_path):
|
| 223 |
-
if not os.path.exists(db_path):
|
| 224 |
-
return False
|
| 225 |
-
try:
|
| 226 |
-
with sqlite3.connect(db_path) as conn:
|
| 227 |
-
cursor = conn.cursor()
|
| 228 |
-
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='identity'")
|
| 229 |
-
return cursor.fetchone() is not None
|
| 230 |
-
except Exception:
|
| 231 |
-
return False
|
| 232 |
-
|
| 233 |
-
def ensure_db_initialized():
|
| 234 |
-
config = load_config(CONFIG_PATH)
|
| 235 |
-
|
| 236 |
-
if not is_db_initialized(DB_PATH):
|
| 237 |
-
print("[*] БД не инициализирована — выполняем инициализацию.")
|
| 238 |
-
try:
|
| 239 |
-
ensure_directories()
|
| 240 |
-
storage = Storage()
|
| 241 |
-
init_identity(storage, config)
|
| 242 |
-
init_user(storage, config)
|
| 243 |
-
init_llm_backends(storage, config)
|
| 244 |
-
init_config_table(storage, config)
|
| 245 |
-
save_config(CONFIG_PATH, config)
|
| 246 |
-
init_prompts_and_ethics()
|
| 247 |
-
except Exception as e:
|
| 248 |
-
print(f"[!] Ошибка при инициализации: {e}")
|
| 249 |
-
sys.exit(1)
|
| 250 |
-
else:
|
| 251 |
-
print("[=] БД уже инициализирована.")
|
| 252 |
-
|
| 253 |
-
return config
|
| 254 |
-
|
| 255 |
-
if __name__ == "__main__":
|
| 256 |
-
ensure_db_initialized()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agents/init.py
CHANGED
|
@@ -40,10 +40,15 @@ def expand_addresses(addresses):
|
|
| 40 |
|
| 41 |
def init_identity(storage, config):
|
| 42 |
if not config.get("agent_id"):
|
|
|
|
| 43 |
did = generate_did()
|
| 44 |
-
pubkey, privkey = generate_keypair()
|
| 45 |
identity_id = did.split(":")[-1]
|
| 46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
identity = {
|
| 48 |
"id": identity_id,
|
| 49 |
"name": config.get("agent_name", "Unnamed"),
|
|
@@ -55,8 +60,12 @@ def init_identity(storage, config):
|
|
| 55 |
}
|
| 56 |
storage.add_identity(identity)
|
| 57 |
|
|
|
|
| 58 |
config["agent_id"] = did
|
| 59 |
config["identity_agent"] = identity_id
|
|
|
|
|
|
|
|
|
|
| 60 |
save_config(CONFIG_PATH, config)
|
| 61 |
print(f"[+] Создана личность: {identity_id}")
|
| 62 |
else:
|
|
|
|
| 40 |
|
| 41 |
def init_identity(storage, config):
|
| 42 |
if not config.get("agent_id"):
|
| 43 |
+
# 1. Сгенерировать DID
|
| 44 |
did = generate_did()
|
|
|
|
| 45 |
identity_id = did.split(":")[-1]
|
| 46 |
|
| 47 |
+
# 2. Сгенерировать ключи через storage
|
| 48 |
+
privkey, pubkey = generate_keypair(method="ed25519")
|
| 49 |
+
privkey, pubkey = privkey.decode(), pubkey.decode()
|
| 50 |
+
|
| 51 |
+
# 3. Создать запись в identity
|
| 52 |
identity = {
|
| 53 |
"id": identity_id,
|
| 54 |
"name": config.get("agent_name", "Unnamed"),
|
|
|
|
| 60 |
}
|
| 61 |
storage.add_identity(identity)
|
| 62 |
|
| 63 |
+
# 4. Записать в config
|
| 64 |
config["agent_id"] = did
|
| 65 |
config["identity_agent"] = identity_id
|
| 66 |
+
config["pubkey"] = pubkey
|
| 67 |
+
config["privkey"] = privkey
|
| 68 |
+
|
| 69 |
save_config(CONFIG_PATH, config)
|
| 70 |
print(f"[+] Создана личность: {identity_id}")
|
| 71 |
else:
|
agents/tools/storage.py
CHANGED
|
@@ -677,6 +677,16 @@ class Storage:
|
|
| 677 |
self.conn.commit()
|
| 678 |
return cursor.lastrowid
|
| 679 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 680 |
# Управление основными процессами
|
| 681 |
def update_heartbeat(self, name: str):
|
| 682 |
now = datetime.now(UTC).isoformat()
|
|
@@ -884,7 +894,7 @@ class Storage:
|
|
| 884 |
}
|
| 885 |
return None
|
| 886 |
|
| 887 |
-
#
|
| 888 |
@staticmethod
|
| 889 |
def parse_hostport(s: str):
|
| 890 |
"""
|
|
@@ -936,7 +946,6 @@ class Storage:
|
|
| 936 |
return scope_id
|
| 937 |
return None
|
| 938 |
|
| 939 |
-
# Нормализация адресов
|
| 940 |
@classmethod
|
| 941 |
def normalize_address(cls, addr: str) -> str:
|
| 942 |
addr = addr.strip()
|
|
@@ -955,10 +964,17 @@ class Storage:
|
|
| 955 |
return f"{proto}://{host}:{port}" if port else f"{proto}://{host}"
|
| 956 |
|
| 957 |
# Работа с пирам (agent_peers)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 958 |
def add_or_update_peer(
|
| 959 |
self, peer_id, name, addresses,
|
| 960 |
source="discovery", status="unknown",
|
| 961 |
-
pubkey=None, capabilities=None
|
|
|
|
|
|
|
| 962 |
):
|
| 963 |
c = self.conn.cursor()
|
| 964 |
|
|
@@ -968,12 +984,13 @@ class Storage:
|
|
| 968 |
existing_addresses = []
|
| 969 |
existing_pubkey = None
|
| 970 |
existing_capabilities = {}
|
|
|
|
| 971 |
|
| 972 |
if peer_id:
|
| 973 |
-
c.execute("SELECT addresses, pubkey, capabilities FROM agent_peers WHERE id=?", (peer_id,))
|
| 974 |
row = c.fetchone()
|
| 975 |
if row:
|
| 976 |
-
db_addresses_json, existing_pubkey, db_caps_json = row
|
| 977 |
try:
|
| 978 |
existing_addresses = json.loads(db_addresses_json) or []
|
| 979 |
except:
|
|
@@ -982,16 +999,32 @@ class Storage:
|
|
| 982 |
existing_capabilities = json.loads(db_caps_json) if db_caps_json else {}
|
| 983 |
except:
|
| 984 |
existing_capabilities = {}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 985 |
|
| 986 |
-
# объединяем
|
| 987 |
combined_addresses = list({self.normalize_address(a) for a in (*existing_addresses, *addresses)})
|
| 988 |
-
|
| 989 |
final_pubkey = pubkey or existing_pubkey
|
| 990 |
final_capabilities = capabilities or existing_capabilities
|
| 991 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 992 |
c.execute("""
|
| 993 |
-
INSERT INTO agent_peers (id, name, addresses, source, status, last_seen, pubkey, capabilities)
|
| 994 |
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
| 995 |
ON CONFLICT(id) DO UPDATE SET
|
| 996 |
name=excluded.name,
|
| 997 |
addresses=excluded.addresses,
|
|
@@ -999,7 +1032,10 @@ class Storage:
|
|
| 999 |
status=excluded.status,
|
| 1000 |
last_seen=excluded.last_seen,
|
| 1001 |
pubkey=excluded.pubkey,
|
| 1002 |
-
capabilities=excluded.capabilities
|
|
|
|
|
|
|
|
|
|
| 1003 |
""", (
|
| 1004 |
peer_id,
|
| 1005 |
name,
|
|
@@ -1008,7 +1044,10 @@ class Storage:
|
|
| 1008 |
status,
|
| 1009 |
datetime.now(UTC).isoformat(),
|
| 1010 |
final_pubkey,
|
| 1011 |
-
json.dumps(final_capabilities)
|
|
|
|
|
|
|
|
|
|
| 1012 |
))
|
| 1013 |
self.conn.commit()
|
| 1014 |
|
|
|
|
| 677 |
self.conn.commit()
|
| 678 |
return cursor.lastrowid
|
| 679 |
|
| 680 |
+
def generate_pow(peer_id, pubkey, addresses, difficulty=4):
|
| 681 |
+
nonce = 0
|
| 682 |
+
prefix = "0" * difficulty
|
| 683 |
+
while True:
|
| 684 |
+
base = f"{peer_id}{pubkey}{''.join(addresses)}{nonce}".encode()
|
| 685 |
+
h = hashlib.sha256(base).hexdigest()
|
| 686 |
+
if h.startswith(prefix):
|
| 687 |
+
return nonce, h
|
| 688 |
+
nonce += 1
|
| 689 |
+
|
| 690 |
# Управление основными процессами
|
| 691 |
def update_heartbeat(self, name: str):
|
| 692 |
now = datetime.now(UTC).isoformat()
|
|
|
|
| 894 |
}
|
| 895 |
return None
|
| 896 |
|
| 897 |
+
# Нормализация адресов
|
| 898 |
@staticmethod
|
| 899 |
def parse_hostport(s: str):
|
| 900 |
"""
|
|
|
|
| 946 |
return scope_id
|
| 947 |
return None
|
| 948 |
|
|
|
|
| 949 |
@classmethod
|
| 950 |
def normalize_address(cls, addr: str) -> str:
|
| 951 |
addr = addr.strip()
|
|
|
|
| 964 |
return f"{proto}://{host}:{port}" if port else f"{proto}://{host}"
|
| 965 |
|
| 966 |
# Работа с пирам (agent_peers)
|
| 967 |
+
def verify_pow(peer_id, pubkey, addresses, nonce, pow_hash, difficulty=4):
|
| 968 |
+
base = f"{peer_id}{pubkey}{''.join(addresses)}{nonce}".encode()
|
| 969 |
+
h = hashlib.sha256(base).hexdigest()
|
| 970 |
+
return h == pow_hash and h.startswith("0" * difficulty)
|
| 971 |
+
|
| 972 |
def add_or_update_peer(
|
| 973 |
self, peer_id, name, addresses,
|
| 974 |
source="discovery", status="unknown",
|
| 975 |
+
pubkey=None, capabilities=None,
|
| 976 |
+
pow_nonce=None, pow_hash=None,
|
| 977 |
+
heard_from=None
|
| 978 |
):
|
| 979 |
c = self.conn.cursor()
|
| 980 |
|
|
|
|
| 984 |
existing_addresses = []
|
| 985 |
existing_pubkey = None
|
| 986 |
existing_capabilities = {}
|
| 987 |
+
existing_heard_from = []
|
| 988 |
|
| 989 |
if peer_id:
|
| 990 |
+
c.execute("SELECT addresses, pubkey, capabilities, heard_from FROM agent_peers WHERE id=?", (peer_id,))
|
| 991 |
row = c.fetchone()
|
| 992 |
if row:
|
| 993 |
+
db_addresses_json, existing_pubkey, db_caps_json, db_heard_from = row
|
| 994 |
try:
|
| 995 |
existing_addresses = json.loads(db_addresses_json) or []
|
| 996 |
except:
|
|
|
|
| 999 |
existing_capabilities = json.loads(db_caps_json) if db_caps_json else {}
|
| 1000 |
except:
|
| 1001 |
existing_capabilities = {}
|
| 1002 |
+
try:
|
| 1003 |
+
existing_heard_from = json.loads(db_heard_from) if db_heard_from else []
|
| 1004 |
+
except:
|
| 1005 |
+
existing_heard_from = []
|
| 1006 |
|
| 1007 |
+
# объединяем адреса
|
| 1008 |
combined_addresses = list({self.normalize_address(a) for a in (*existing_addresses, *addresses)})
|
|
|
|
| 1009 |
final_pubkey = pubkey or existing_pubkey
|
| 1010 |
final_capabilities = capabilities or existing_capabilities
|
| 1011 |
|
| 1012 |
+
# обновляем heard_from
|
| 1013 |
+
combined_heard_from = list(set(existing_heard_from + (heard_from or [])))
|
| 1014 |
+
|
| 1015 |
+
# TODO: Проверка PoW (например, pow_hash.startswith("0000"))
|
| 1016 |
+
if pow_hash and pow_nonce:
|
| 1017 |
+
# простейшая проверка PoW
|
| 1018 |
+
import hashlib
|
| 1019 |
+
base = f"{peer_id}{final_pubkey}{''.join(combined_addresses)}{pow_nonce}".encode()
|
| 1020 |
+
calc_hash = hashlib.sha256(base).hexdigest()
|
| 1021 |
+
if calc_hash != pow_hash:
|
| 1022 |
+
print(f"[WARN] Peer {peer_id} failed PoW validation")
|
| 1023 |
+
return # не вставляем
|
| 1024 |
+
|
| 1025 |
c.execute("""
|
| 1026 |
+
INSERT INTO agent_peers (id, name, addresses, source, status, last_seen, pubkey, capabilities, pow_nonce, pow_hash, heard_from)
|
| 1027 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 1028 |
ON CONFLICT(id) DO UPDATE SET
|
| 1029 |
name=excluded.name,
|
| 1030 |
addresses=excluded.addresses,
|
|
|
|
| 1032 |
status=excluded.status,
|
| 1033 |
last_seen=excluded.last_seen,
|
| 1034 |
pubkey=excluded.pubkey,
|
| 1035 |
+
capabilities=excluded.capabilities,
|
| 1036 |
+
pow_nonce=excluded.pow_nonce,
|
| 1037 |
+
pow_hash=excluded.pow_hash,
|
| 1038 |
+
heard_from=excluded.heard_from
|
| 1039 |
""", (
|
| 1040 |
peer_id,
|
| 1041 |
name,
|
|
|
|
| 1044 |
status,
|
| 1045 |
datetime.now(UTC).isoformat(),
|
| 1046 |
final_pubkey,
|
| 1047 |
+
json.dumps(final_capabilities),
|
| 1048 |
+
pow_nonce,
|
| 1049 |
+
pow_hash,
|
| 1050 |
+
json.dumps(combined_heard_from)
|
| 1051 |
))
|
| 1052 |
self.conn.commit()
|
| 1053 |
|