Coverage for integrations / social / api.py: 34.3%

2018 statements  

« prev     ^ index     » next       coverage.py v7.14.0, created at 2026-05-12 04:49 +0000

1""" 

2HARTSocial - Flask Blueprint API 

3~82 REST endpoints at /api/social. 

4Compatible with both Nunba web app and HART React Native CommunityView. 

5""" 

6import json 

7import os 

8import logging 

9from flask import Blueprint, request, jsonify, g 

10 

11from .auth import require_auth, optional_auth, require_admin, require_moderator, revoke_token 

12from .rate_limiter import rate_limit, get_limiter 

13from .services import ( 

14 UserService, PostService, CommentService, VoteService, 

15 FollowService, CommunityService, NotificationService, ReportService, 

16) 

17from .feed_engine import ( 

18 get_personalized_feed, get_global_feed, get_trending_feed, get_agent_feed 

19) 

20from .karma_engine import recalculate_karma, get_karma_breakdown 

21from datetime import datetime 

22from .models import ( 

23 get_db, db_session, Post, Comment, User, Community, TaskRequest, Report, 

24 AgentSkillBadge, AdUnit, AdImpression, APIUsageLog, CommercialAPIKey, 

25 AgentGoal, Boost, Campaign, AgentEvolution, AgentCollaboration, 

26 ResonanceTransaction, HostingReward, Follow, 

27) 

28from .schemas import APIResponse, PaginationMeta 

29from sqlalchemy.orm import joinedload 

30 

31logger = logging.getLogger('hevolve_social') 

32 

33social_bp = Blueprint('social', __name__, url_prefix='/api/social') 

34 

35 

36def _ok(data=None, meta=None, status=200): 

37 r = {'success': True} 

38 if data is not None: 

39 r['data'] = data 

40 if meta is not None: 

41 r['meta'] = meta 

42 return jsonify(r), status 

43 

44 

45def _err(msg, status=400): 

46 return jsonify({'success': False, 'error': msg}), status 

47 

48 

49def _paginate(total, limit, offset): 

50 return {'total': total, 'limit': limit, 'offset': offset, 

51 'has_more': offset + limit < total} 

52 

53 

54def _get_json(): 

55 return request.get_json(force=True, silent=True) or {} 

56 

57 

58def requires_flag(flag_name, else_value=None): 

59 """Gate an endpoint behind a per-tenant feature flag. 

60 

61 By default returns 503 with a clear error when the flag is off 

62 (correct shape for mutating endpoints — the client should know 

63 the feature isn't available). Read endpoints that should 

64 gracefully degrade can pass `else_value=[]` (or any JSON-able 

65 default) to return _ok(else_value) instead. 

66 

67 The flag is read from `g.feature_flags`, populated by 

68 `auth.require_auth` from the per-tenant settings + env defaults 

69 (see plan Part E.1). Apply this decorator AFTER `@require_auth` 

70 so `g.feature_flags` is guaranteed to be set. 

71 """ 

72 from functools import wraps 

73 

74 def decorator(view): 

75 @wraps(view) 

76 def wrapper(*args, **kwargs): 

77 flags = getattr(g, 'feature_flags', {}) or {} 

78 if not flags.get(flag_name, False): 

79 if else_value is not None: 

80 return _ok(else_value) 

81 return _err(f"{flag_name} feature flag is off", 503) 

82 return view(*args, **kwargs) 

83 return wrapper 

84 return decorator 

85 

86 

87# ═══════════════════════════════════════════════════════════════ 

88# AUTH 

89# ═══════════════════════════════════════════════════════════════ 

90 

91@social_bp.route('/auth/register', methods=['POST']) 

92@rate_limit('register') 

93def register(): 

94 data = _get_json() 

95 username = data.get('username') or data.get('name', '') 

96 password = data.get('password', '') 

97 if not username: 

98 return _err("username required") 

99 if not password and data.get('user_type') != 'agent': 

100 return _err("password required") 

101 

102 # Security: Validate username and password format 

103 try: 

104 from security.sanitize import validate_username, validate_password 

105 validate_username(username) 

106 if password: 

107 validate_password(password) 

108 except ImportError: 

109 pass # Security module not available 

110 except ValueError as e: 

111 return _err(str(e)) 

112 

113 try: 

114 with db_session() as db: 

115 if data.get('user_type') == 'agent': 

116 user = UserService.register_agent( 

117 db, username, data.get('description', ''), 

118 data.get('agent_id'), data.get('owner_id')) 

119 else: 

120 user = UserService.register( 

121 db, username, password, data.get('email'), 

122 data.get('display_name'), data.get('user_type', 'human')) 

123 

124 # Apply referral code if provided (one-step signup) 

125 referral_code = data.get('referral_code', '').strip() 

126 if referral_code and user: 

127 try: 

128 from .distribution_service import DistributionService 

129 DistributionService.use_referral_code(db, str(user.id), referral_code) 

130 except Exception as e: 

131 logger.debug(f"Referral code application skipped: {e}") 

132 

133 return _ok(user.to_dict(include_token=True), status=201) 

134 except ValueError as e: 

135 return _err(str(e)) 

136 

137 

138@social_bp.route('/auth/login', methods=['POST']) 

139@rate_limit('auth') 

140def login(): 

141 data = _get_json() 

142 try: 

143 with db_session() as db: 

144 user, token = UserService.login(db, data.get('username', ''), data.get('password', '')) 

145 return _ok({'user': user.to_dict(), 'token': token}) 

146 except ValueError as e: 

147 return _err(str(e), 401) 

148 

149 

150@social_bp.route('/auth/logout', methods=['POST']) 

151@require_auth 

152def logout(): 

153 token = request.headers.get('Authorization', '')[7:] 

154 revoke_token(token) 

155 return _ok({'message': 'Logged out'}) 

156 

157 

158@social_bp.route('/auth/me', methods=['GET']) 

159@require_auth 

160def get_me(): 

161 return _ok(g.user.to_dict(include_token=False)) 

162 

163 

164# ─── Guest identity persistence ─── 

165 

166_RECOVERY_WORDS = ( 

167 'amber', 'breeze', 'coral', 'drift', 'ember', 'frost', 

168 'gleam', 'haven', 'ivory', 'jade', 'knoll', 'lark', 

169 'maple', 'north', 'oasis', 'pearl', 'quill', 'ridge', 

170 'shore', 'thorn', 'unity', 'vale', 'wren', 'xenon', 

171 'birch', 'cedar', 'delta', 'fable', 'glade', 'heron', 

172 'inlet', 'junco', 'kelp', 'lotus', 'marsh', 'nook', 

173 'olive', 'plume', 'quest', 'river', 'stone', 'trail', 

174 'umber', 'vivid', 'wisp', 'yarrow', 'zephyr', 'bloom', 

175) 

176 

177 

178@social_bp.route('/auth/guest-register', methods=['POST']) 

179@rate_limit('register') 

180def guest_register(): 

181 """Create or update a guest User, return JWT + one-time recovery code. 

182 

183 IDEMPOTENCE ON device_id (2026-04-15 fix for chat-persistence bug): 

184 If the client supplies `device_id` AND a guest User already has a 

185 GuestRecovery row with that same device_id, RETURN the existing 

186 User with a fresh JWT — do NOT create a new user. Previously every 

187 call created a new User, so relaunching Nunba (which refreshes the 

188 expired JWT via this endpoint) lost the user's prior agents + chat 

189 history. The user's 6-word recovery_code is NOT reissued on the 

190 idempotent path (their saved code still works). 

191 

192 Back-compat: calls without `device_id` behave as before (always 

193 create new user). 

194 """ 

195 import secrets 

196 from .auth import hash_password, generate_api_token, generate_jwt 

197 from .models import GuestRecovery 

198 

199 data = _get_json() 

200 guest_name = data.get('guest_name', '').strip() 

201 device_id = data.get('device_id', '') 

202 if not guest_name: 

203 return _err("guest_name required") 

204 

205 db = get_db() 

206 try: 

207 # ── Idempotent path: existing user for this device ── 

208 if device_id: 

209 existing = ( 

210 db.query(GuestRecovery) 

211 .filter(GuestRecovery.device_id == device_id) 

212 .order_by(GuestRecovery.id.desc()) 

213 .first() 

214 ) 

215 if existing: 

216 existing_user = ( 

217 db.query(User).filter_by(id=existing.user_id).first() 

218 ) 

219 if existing_user and existing_user.user_type == 'guest': 

220 # Fresh JWT — client likely re-registered because 

221 # the old one expired. No new recovery code (would 

222 # invalidate their saved one). 

223 token = generate_jwt( 

224 existing_user.id, existing_user.username, 'guest', 

225 ) 

226 return _ok({ 

227 'user': existing_user.to_dict(), 

228 'token': token, 

229 'recovery_code': None, 

230 'existing': True, 

231 }, status=200) 

232 

233 # ── Genuine create path (unchanged behavior) ── 

234 suffix = secrets.token_hex(3) 

235 username = f"guest_{guest_name.replace(' ', '_').lower()}_{suffix}" 

236 user = User( 

237 username=username, 

238 display_name=guest_name, 

239 user_type='guest', 

240 role='guest', 

241 api_token=generate_api_token(), 

242 ) 

243 db.add(user) 

244 db.flush() 

245 

246 # Generate 6-word recovery code 

247 recovery_code = ' '.join(secrets.choice(_RECOVERY_WORDS) for _ in range(6)) 

248 gr = GuestRecovery( 

249 user_id=user.id, 

250 recovery_code_hash=hash_password(recovery_code), 

251 device_id=device_id or None, 

252 ) 

253 db.add(gr) 

254 db.commit() 

255 

256 token = generate_jwt(user.id, user.username, 'guest') 

257 return _ok({ 

258 'user': user.to_dict(), 

259 'token': token, 

260 'recovery_code': recovery_code, 

261 }, status=201) 

262 except Exception as e: 

263 db.rollback() 

264 logger.error(f"Guest register failed: {e}") 

265 return _err(str(e)) 

266 finally: 

267 db.close() 

268 

269 

270@social_bp.route('/auth/guest-recover', methods=['POST']) 

271@rate_limit('auth') 

272def guest_recover(): 

273 """Recover guest identity using the 6-word recovery code.""" 

274 from .auth import verify_password, generate_jwt 

275 from .models import GuestRecovery 

276 from datetime import datetime 

277 

278 data = _get_json() 

279 recovery_code = data.get('recovery_code', '').strip() 

280 device_id = data.get('device_id', '') 

281 if not recovery_code: 

282 return _err("recovery_code required") 

283 

284 try: 

285 with db_session() as db: 

286 rows = db.query(GuestRecovery).all() 

287 for gr in rows: 

288 if verify_password(recovery_code, gr.recovery_code_hash): 

289 user = db.query(User).filter_by(id=gr.user_id).first() 

290 if not user: 

291 continue 

292 gr.last_used_at = datetime.utcnow() 

293 gr.device_id = device_id or gr.device_id 

294 token = generate_jwt(user.id, user.username, user.role or 'guest') 

295 return _ok({'user': user.to_dict(), 'token': token}) 

296 return _err("Invalid recovery code", 401) 

297 except Exception as e: 

298 logger.error(f"Guest recover failed: {e}") 

299 return _err(str(e)) 

300 

301 

302# ─── Token refresh ─── 

303 

304@social_bp.route('/auth/refresh', methods=['POST']) 

305@rate_limit('auth') 

306def refresh_token(): 

307 """Refresh an access token using a refresh token.""" 

308 data = _get_json() 

309 refresh = data.get('refresh_token', '') 

310 if not refresh: 

311 return _err("refresh_token required") 

312 

313 try: 

314 from security.jwt_manager import JWTManager 

315 mgr = JWTManager() 

316 result = mgr.refresh_access_token(refresh) 

317 if not result: 

318 return _err("Invalid or expired refresh token", 401) 

319 return _ok(result) 

320 except Exception as e: 

321 logger.error(f"Token refresh failed: {e}") 

322 return _err("Token refresh unavailable", 500) 

323 

324 

325# ─── Cross-node user verification ─── 

326 

327@social_bp.route('/auth/verify-user', methods=['GET']) 

328@require_auth 

329def verify_user_for_node(): 

330 """Central endpoint: regional nodes verify a user exists. 

331 

332 Requires regional or central role (node certificate holder). 

333 Returns minimal user info for cross-node identity confirmation. 

334 """ 

335 from .auth import require_regional 

336 user_role = getattr(g.user, 'role', None) or 'flat' 

337 if user_role not in ('central', 'regional') and not (g.user.is_admin or g.user.is_moderator): 

338 return _err("Regional access required", 403) 

339 

340 target_user_id = request.args.get('user_id', '') 

341 if not target_user_id: 

342 return _err("user_id query parameter required") 

343 

344 target = g.db.query(User).filter_by(id=target_user_id).first() 

345 if not target: 

346 return _err("User not found", 404) 

347 

348 return _ok({ 

349 'user_id': str(target.id), 

350 'username': target.username, 

351 'handle': target.handle or '', 

352 'role': target.role or 'flat', 

353 'is_banned': target.is_banned, 

354 }) 

355 

356 

357@social_bp.route('/auth/sync-user', methods=['POST']) 

358@rate_limit('auth') 

359def sync_user_from_central(): 

360 """Receive user sync from central node. 

361 

362 Requires a valid hive token with node_sig verification. 

363 The calling node must present its Ed25519 public key for verification. 

364 """ 

365 data = _get_json() 

366 token = '' 

367 auth_header = request.headers.get('Authorization', '') 

368 if auth_header.startswith('Bearer '): 

369 token = auth_header[7:] 

370 

371 node_public_key = data.get('node_public_key', '') 

372 user_data = data.get('user_data', {}) 

373 

374 if not token or not node_public_key or not user_data: 

375 return _err("token, node_public_key, and user_data required") 

376 

377 # Verify the hive token from the calling node 

378 from .auth import verify_hive_jwt 

379 payload = verify_hive_jwt(token, node_public_key) 

380 if not payload: 

381 return _err("Invalid hive token or node signature", 401) 

382 

383 # Process the user sync 

384 try: 

385 from .sync_engine import SyncEngine 

386 with db_session() as db: 

387 SyncEngine._handle_sync_user(db, user_data) 

388 return _ok({'synced': True}) 

389 except Exception as e: 

390 logger.error(f"User sync failed: {e}") 

391 return _err(str(e), 500) 

392 

393 

394# ═══════════════════════════════════════════════════════════════ 

395# USERS / PROFILES 

396# ═══════════════════════════════════════════════════════════════ 

397 

398@social_bp.route('/users', methods=['GET']) 

399@optional_auth 

400def list_users(): 

401 user_type = request.args.get('type') 

402 limit = min(int(request.args.get('limit', 25)), 100) 

403 offset = int(request.args.get('offset', 0)) 

404 users, total = UserService.list_users(g.db, user_type, limit, offset) 

405 return _ok([u.to_dict() for u in users], _paginate(total, limit, offset)) 

406 

407 

408@social_bp.route('/users/<user_id>', methods=['GET']) 

409@optional_auth 

410def get_user(user_id): 

411 user = UserService.get_by_id(g.db, user_id) 

412 if not user: 

413 # Try by username 

414 user = UserService.get_by_username(g.db, user_id) 

415 if not user: 

416 return _err("User not found", 404) 

417 data = user.to_dict() 

418 # Include follow status if authenticated 

419 if g.user: 

420 data['is_following'] = FollowService.is_following(g.db, g.user.id, user.id) 

421 return _ok(data) 

422 

423 

424@social_bp.route('/users/<user_id>', methods=['PATCH']) 

425@require_auth 

426def update_user(user_id): 

427 if g.user.id != user_id and not g.user.is_admin: 

428 return _err("Cannot edit another user's profile", 403) 

429 data = _get_json() 

430 try: 

431 user = UserService.update_profile( 

432 g.db, g.user, data.get('display_name'), data.get('bio'), 

433 data.get('avatar_url'), data.get('handle')) 

434 return _ok(user.to_dict()) 

435 except ValueError as e: 

436 return _err(str(e)) 

437 

438 

439# ───────────────────────────────────────────────────────────────── 

440# Phase 7c.1 — Friendship state machine + Block. 

441# Plan reference: sunny-gliding-eich.md, Part E.8 + Part R.5. 

442# Coexists with the existing one-direction Follow endpoints in 

443# usersApi.follow / unfollow / followers / following — those are 

444# preserved as-is (legacy code reading the follow graph still works). 

445# ───────────────────────────────────────────────────────────────── 

446 

447@social_bp.route('/friends/request', methods=['POST']) 

448@require_auth 

449@requires_flag('friends_v2') 

450def friend_request(): 

451 data = _get_json() 

452 target = data.get('target_user_id') or data.get('user_id') 

453 if not target: 

454 return _err("target_user_id required") 

455 try: 

456 from .friend_service import FriendService, FriendError 

457 result = FriendService.send_request( 

458 g.db, from_user_id=g.user.id, to_user_id=target, 

459 tenant_id=getattr(g, 'tenant_id', None)) 

460 return _ok(result) 

461 except FriendError as e: 

462 return _err(str(e), 400) 

463 

464 

465@social_bp.route('/friends/request/<friendship_id>/accept', methods=['POST']) 

466@require_auth 

467@requires_flag('friends_v2') 

468def friend_accept(friendship_id): 

469 try: 

470 from .friend_service import FriendService, FriendError 

471 result = FriendService.accept(g.db, friendship_id, g.user.id) 

472 return _ok(result) 

473 except FriendError as e: 

474 return _err(str(e), 400) 

475 

476 

477@social_bp.route('/friends/request/<friendship_id>/reject', methods=['POST']) 

478@require_auth 

479@requires_flag('friends_v2') 

480def friend_reject(friendship_id): 

481 try: 

482 from .friend_service import FriendService, FriendError 

483 result = FriendService.reject(g.db, friendship_id, g.user.id) 

484 return _ok(result) 

485 except FriendError as e: 

486 return _err(str(e), 400) 

487 

488 

489@social_bp.route('/friends/request/<friendship_id>/cancel', methods=['POST']) 

490@require_auth 

491@requires_flag('friends_v2') 

492def friend_cancel(friendship_id): 

493 try: 

494 from .friend_service import FriendService, FriendError 

495 result = FriendService.cancel(g.db, friendship_id, g.user.id) 

496 return _ok(result) 

497 except FriendError as e: 

498 return _err(str(e), 400) 

499 

500 

501@social_bp.route('/friends/<user_id>/unfriend', methods=['POST']) 

502@require_auth 

503@requires_flag('friends_v2') 

504def friend_unfriend(user_id): 

505 try: 

506 from .friend_service import FriendService, FriendError 

507 result = FriendService.unfriend(g.db, requester_id=g.user.id, 

508 other_id=user_id) 

509 return _ok(result) 

510 except FriendError as e: 

511 return _err(str(e), 400) 

512 

513 

514@social_bp.route('/friends', methods=['GET']) 

515@require_auth 

516@requires_flag('friends_v2', else_value=[]) 

517def list_friends(): 

518 status = request.args.get('status', 'active') 

519 if status not in ('active', 'pending', 'rejected', 'blocked', 'all'): 

520 return _err("invalid status", 400) 

521 from .friend_service import FriendService 

522 return _ok(FriendService.list_friends(g.db, g.user.id, status)) 

523 

524 

525@social_bp.route('/friends/blocks', methods=['GET']) 

526@require_auth 

527@requires_flag('friends_v2', else_value=[]) 

528def list_blocks(): 

529 from .friend_service import FriendService 

530 return _ok(FriendService.list_blocks(g.db, g.user.id)) 

531 

532 

533@social_bp.route('/friends/<user_id>/block', methods=['POST']) 

534@require_auth 

535@requires_flag('friends_v2') 

536def friend_block(user_id): 

537 data = _get_json() or {} 

538 try: 

539 from .friend_service import FriendService, FriendError 

540 result = FriendService.block( 

541 g.db, blocker_id=g.user.id, blocked_id=user_id, 

542 reason=data.get('reason'), 

543 tenant_id=getattr(g, 'tenant_id', None)) 

544 return _ok(result) 

545 except FriendError as e: 

546 return _err(str(e), 400) 

547 

548 

549@social_bp.route('/friends/<user_id>/unblock', methods=['POST']) 

550@require_auth 

551@requires_flag('friends_v2') 

552def friend_unblock(user_id): 

553 from .friend_service import FriendService 

554 result = FriendService.unblock(g.db, blocker_id=g.user.id, 

555 blocked_id=user_id) 

556 return _ok(result) 

557 

558 

559# ───────────────────────────────────────────────────────────────── 

560# Phase 7c.2 — Invites (community + conversation, polymorphic). 

561# Plan reference: sunny-gliding-eich.md, Part E.9. 

562# ───────────────────────────────────────────────────────────────── 

563 

564@social_bp.route('/invites', methods=['POST']) 

565@require_auth 

566@requires_flag('invites_v2') 

567def invite_send(): 

568 """Create a pending invite to a community or conversation. 

569 

570 Body (JSON): 

571 parent_kind: 'community' | 'conversation' (required) 

572 parent_id: str (required) 

573 invitee_id: str (optional — targeted invite) 

574 invitee_email: str (optional — off-platform invite) 

575 role_offered: 'member' | 'mod' | 'admin' (default: 'member') 

576 expires_in_days: int (default 7; pass 0 for no expiry) 

577 

578 If neither invitee_id nor invitee_email is set, the response 

579 includes an `invite_code` the caller embeds in a shareable link. 

580 """ 

581 data = _get_json() 

582 parent_kind = data.get('parent_kind') 

583 parent_id = data.get('parent_id') 

584 if not parent_kind or not parent_id: 

585 return _err("parent_kind and parent_id required") 

586 role = data.get('role_offered', 'member') 

587 expires_in = data.get('expires_in_days', 7) 

588 try: 

589 from .invite_service import InviteService, InviteError 

590 result = InviteService.send( 

591 g.db, parent_kind=parent_kind, parent_id=parent_id, 

592 invited_by=g.user.id, 

593 invitee_id=data.get('invitee_id'), 

594 invitee_email=data.get('invitee_email'), 

595 role_offered=role, 

596 expires_in_days=expires_in, 

597 tenant_id=getattr(g, 'tenant_id', None)) 

598 return _ok(result) 

599 except InviteError as e: 

600 return _err(str(e), 400) 

601 

602 

603@social_bp.route('/invites/<invite_id>/accept', methods=['POST']) 

604@require_auth 

605@requires_flag('invites_v2') 

606def invite_accept(invite_id): 

607 try: 

608 from .invite_service import InviteService, InviteError 

609 result = InviteService.accept( 

610 g.db, invite_id_or_code=invite_id, 

611 accepter_id=g.user.id, 

612 tenant_id=getattr(g, 'tenant_id', None)) 

613 return _ok(result) 

614 except InviteError as e: 

615 return _err(str(e), 400) 

616 

617 

618@social_bp.route('/invites/<invite_id>/reject', methods=['POST']) 

619@require_auth 

620@requires_flag('invites_v2') 

621def invite_reject(invite_id): 

622 try: 

623 from .invite_service import InviteService, InviteError 

624 result = InviteService.reject( 

625 g.db, invite_id_or_code=invite_id, rejecter_id=g.user.id) 

626 return _ok(result) 

627 except InviteError as e: 

628 return _err(str(e), 400) 

629 

630 

631@social_bp.route('/invites/incoming', methods=['GET']) 

632@require_auth 

633@requires_flag('invites_v2', else_value=[]) 

634def invite_incoming(): 

635 include_responded = (request.args.get('include_responded', 

636 'false').lower() == 'true') 

637 from .invite_service import InviteService 

638 return _ok(InviteService.list_incoming( 

639 g.db, g.user.id, include_responded=include_responded)) 

640 

641 

642@social_bp.route('/invites/code/<code>', methods=['GET']) 

643@require_auth 

644@requires_flag('invites_v2') 

645def invite_resolve(code): 

646 """Preview a shareable invite link before accepting.""" 

647 from .invite_service import InviteService 

648 result = InviteService.resolve_code(g.db, code) 

649 if result is None: 

650 return _err("invite not found or expired", 404) 

651 return _ok(result) 

652 

653 

654# ───────────────────────────────────────────────────────────────── 

655# UNIF-G4 — Deep-link dispatcher. 

656# 

657# Single canonical writer for OS-protocol-handler intents from every 

658# Nunba client (Windows/macOS/Linux desktop, iOS, Android). Each 

659# client receives a `nunba://` or `hevolveai://` URI from the OS, 

660# parses the verb + args, and POSTs to this endpoint. Server-side 

661# dispatch keeps the routing logic DRY across all 3 platforms — no 

662# parallel client-side switch statements. 

663# 

664# Verbs: 

665# invite/<code> → InviteService.accept (canonical) 

666# meet/<platform>/<room> → Join_External_Room agent tool (UNIF-G2) 

667# group/<platform>/<id> → Join_External_Room agent tool with 

668# role='participant' (UNIF-G2) 

669# ───────────────────────────────────────────────────────────────── 

670 

671@social_bp.route('/deeplink', methods=['POST']) 

672@require_auth 

673def deeplink_dispatch(): 

674 """Route a parsed custom-scheme deep link to the right handler.""" 

675 data = _get_json() 

676 kind = (data.get('kind') or '').lower() 

677 segments = data.get('segments') or [] 

678 if not isinstance(segments, list): 

679 segments = [] 

680 scheme = (data.get('scheme') or 'hevolveai').lower() 

681 

682 # Reuse the canonical validator from core.install_links rather 

683 # than re-implementing scheme/verb checks here (DRY). 

684 try: 

685 from core.install_links import ( 

686 DEEPLINK_SCHEMES, DEEPLINK_VERBS, is_allowed_deeplink_uri, 

687 ) 

688 except Exception: 

689 DEEPLINK_SCHEMES = ('hevolveai', 'nunba') 

690 DEEPLINK_VERBS = ('invite', 'meet', 'group') 

691 is_allowed_deeplink_uri = lambda u: True # noqa: E731 

692 

693 if scheme not in DEEPLINK_SCHEMES: 

694 return _err(f"unsupported scheme {scheme!r}", 400) 

695 if kind not in DEEPLINK_VERBS: 

696 return _err(f"unsupported verb {kind!r}", 400) 

697 if not segments: 

698 return _err("empty deeplink path", 400) 

699 

700 # Reconstruct the canonical URI form for the allowlist guard. 

701 candidate = f"{scheme}://{kind}/{'/'.join(str(s) for s in segments)}" 

702 if not is_allowed_deeplink_uri(candidate): 

703 return _err(f"deep link rejected: {candidate}", 400) 

704 

705 if kind == 'invite': 

706 # segments = [<code>] — InviteService.accept handles both 

707 # invite_id and code via invite_id_or_code parameter. 

708 code = str(segments[0]) 

709 try: 

710 from .invite_service import InviteService, InviteError 

711 result = InviteService.accept( 

712 g.db, invite_id_or_code=code, 

713 accepter_id=g.user.id, 

714 tenant_id=getattr(g, 'tenant_id', None)) 

715 return _ok({'kind': 'invite', 'accepted': True, 

716 'invite': result}) 

717 except InviteError as e: 

718 return _err(str(e), 400) 

719 

720 # meet / group → ask the agent to route via Join_External_Room. 

721 # Same canonical agent tool already used for natural-language 

722 # "join my Discord audio room <url>" — no parallel path. 

723 if len(segments) < 2: 

724 return _err(f"{kind} requires <platform>/<id>", 400) 

725 platform_name = str(segments[0]).lower() 

726 target = str(segments[1]) 

727 role = 'note_taker' if kind == 'meet' else 'participant' 

728 

729 try: 

730 # Synthesize a Join_External_Room tool input — the canonical 

731 # handler at hart_intelligence_entry._handle_join_external_room_tool 

732 # validates consent, joins the room, and emits the meet_copilot 

733 # Liquid UI card. 

734 from hart_intelligence_entry import ( 

735 _handle_join_external_room_tool, 

736 ) 

737 import json as _json 

738 tool_input = _json.dumps({ 

739 'platform': platform_name, 

740 'room': target, 

741 'role': role, 

742 'source': 'deeplink', 

743 }) 

744 out = _handle_join_external_room_tool(tool_input) 

745 return _ok({'kind': kind, 'platform': platform_name, 

746 'target': target, 'role': role, 

747 'tool_response': out}) 

748 except Exception as e: 

749 return _err(f"deeplink dispatch failed: {e}", 500) 

750 

751 

752# ───────────────────────────────────────────────────────────────── 

753# Phase 7c.4 — Emoji reactions on posts / comments / messages. 

754# Plan reference: sunny-gliding-eich.md, Part E.6. 

755# Polymorphic source_kind so a single set of routes serves all three 

756# reactable surfaces. 

757# ───────────────────────────────────────────────────────────────── 

758 

759def _reaction_route(source_kind): 

760 """Generate a (toggle, list, remove) route triple for a source_kind. 

761 

762 Avoids 9 lines × 3 surfaces = 27 lines of boilerplate by sharing 

763 the implementation. Each registered Flask endpoint name is unique. 

764 """ 

765 def toggle(source_id): 

766 if not g.feature_flags.get('reactions', False): 

767 return _err("reactions feature flag is off", 503) 

768 data = _get_json() 

769 emoji = data.get('emoji') 

770 if not emoji: 

771 return _err("emoji required") 

772 try: 

773 from .reaction_service import ReactionService, ReactionError 

774 return _ok(ReactionService.toggle( 

775 g.db, source_kind=source_kind, source_id=source_id, 

776 user_id=g.user.id, emoji=emoji, 

777 tenant_id=getattr(g, 'tenant_id', None))) 

778 except ReactionError as e: 

779 return _err(str(e), 400) 

780 

781 def list_(source_id): 

782 if not g.feature_flags.get('reactions', False): 

783 return _ok([]) 

784 from .reaction_service import ReactionService 

785 return _ok(ReactionService.list_for( 

786 g.db, source_kind=source_kind, source_id=source_id, 

787 viewer_id=g.user.id, 

788 tenant_id=getattr(g, 'tenant_id', None))) 

789 

790 def remove(source_id, emoji): 

791 if not g.feature_flags.get('reactions', False): 

792 return _err("reactions feature flag is off", 503) 

793 try: 

794 from .reaction_service import ReactionService, ReactionError 

795 return _ok(ReactionService.remove( 

796 g.db, source_kind=source_kind, source_id=source_id, 

797 user_id=g.user.id, emoji=emoji, 

798 tenant_id=getattr(g, 'tenant_id', None))) 

799 except ReactionError as e: 

800 return _err(str(e), 400) 

801 

802 return toggle, list_, remove 

803 

804 

805_post_react_toggle, _post_react_list, _post_react_remove = _reaction_route('post') 

806_comment_react_toggle, _comment_react_list, _comment_react_remove = _reaction_route('comment') 

807_message_react_toggle, _message_react_list, _message_react_remove = _reaction_route('message') 

808 

809social_bp.add_url_rule('/posts/<source_id>/reactions', 

810 'post_reactions_toggle', require_auth(_post_react_toggle), methods=['POST']) 

811social_bp.add_url_rule('/posts/<source_id>/reactions', 

812 'post_reactions_list', require_auth(_post_react_list), methods=['GET']) 

813social_bp.add_url_rule('/posts/<source_id>/reactions/<emoji>', 

814 'post_reactions_remove', require_auth(_post_react_remove), methods=['DELETE']) 

815 

816social_bp.add_url_rule('/comments/<source_id>/reactions', 

817 'comment_reactions_toggle', require_auth(_comment_react_toggle), methods=['POST']) 

818social_bp.add_url_rule('/comments/<source_id>/reactions', 

819 'comment_reactions_list', require_auth(_comment_react_list), methods=['GET']) 

820social_bp.add_url_rule('/comments/<source_id>/reactions/<emoji>', 

821 'comment_reactions_remove', require_auth(_comment_react_remove), methods=['DELETE']) 

822 

823social_bp.add_url_rule('/messages/<source_id>/reactions', 

824 'message_reactions_toggle', require_auth(_message_react_toggle), methods=['POST']) 

825social_bp.add_url_rule('/messages/<source_id>/reactions', 

826 'message_reactions_list', require_auth(_message_react_list), methods=['GET']) 

827social_bp.add_url_rule('/messages/<source_id>/reactions/<emoji>', 

828 'message_reactions_remove', require_auth(_message_react_remove), methods=['DELETE']) 

829 

830 

831# ───────────────────────────────────────────────────────────────── 

832# Phase 7c.6 — /sync endpoint (multi-device backfill / restore). 

833# Plan reference: sunny-gliding-eich.md, Part R.3 + Part W.1.b. 

834# 

835# Single source of truth for new-device cold pull, reconnect catch-up, 

836# and the WAMP-push-then-pull pattern. The transport choice (live 

837# fan-out via MessageBus vs catch-up via /sync) is decided client-side 

838# based on connectivity. This endpoint is request-response only; 

839# real-time fan-out continues through the existing 

840# LOCAL → SSE → PEERLINK → CROSSBAR pipeline. 

841# ───────────────────────────────────────────────────────────────── 

842 

843@social_bp.route('/sync', methods=['GET']) 

844@require_auth 

845@requires_flag('sync_v1', else_value={'cursor': '0', 'has_more': False, 'deltas': {}}) 

846def sync_deltas(): 

847 """Return user's deltas across every social kind since cursor. 

848 

849 Query params: 

850 since opaque cursor string from previous response; 

851 default = full backfill from epoch. 

852 kinds CSV of kinds (conversations|messages|friendships| 

853 blocks|invites|mentions|memberships|notifications); 

854 default = all. 

855 limit per-kind row cap (1..500, default 200). 

856 

857 Response shape: 

858 { cursor: '<advance-to-this>', has_more: bool, 

859 deltas: { '<kind>': [...rows...] } } 

860 """ 

861 since = request.args.get('since') 

862 kinds_raw = request.args.get('kinds') 

863 kinds = [k.strip() for k in kinds_raw.split(',')] if kinds_raw else None 

864 try: 

865 limit = max(1, min(int(request.args.get('limit', 200)), 500)) 

866 except (TypeError, ValueError): 

867 limit = 200 

868 from .sync_service import SyncService 

869 return _ok(SyncService.deltas( 

870 g.db, user_id=g.user.id, since=since, kinds=kinds, 

871 limit_per_kind=limit, 

872 tenant_id=getattr(g, 'tenant_id', None))) 

873 

874 

875@social_bp.route('/sync/inbox', methods=['GET']) 

876@require_auth 

877@requires_flag('sync_v1', else_value={'cursor': '0', 'has_more': False, 'rows': []}) 

878def sync_inbox(): 

879 """Flattened unified-inbox view layered on top of /sync. 

880 

881 Same cursor + flag-gate as /sync. Returns a single chronologically- 

882 sorted list of InboxRow dicts (messages, mentions, invites, friend 

883 requests, notifications) — what every client renders directly. 

884 

885 Pure additive — calls SyncService.deltas internally; no new SQL 

886 path, no behavior change for /sync callers. 

887 

888 Query params: 

889 since opaque cursor from previous response (default = full 

890 backfill from epoch). 

891 limit per-kind row cap (1..200, default 50). 

892 

893 Response shape: 

894 { cursor: '<next>', has_more: bool, rows: [InboxRow, ...] } 

895 """ 

896 since = request.args.get('since') 

897 try: 

898 limit = max(1, min(int(request.args.get('limit', 50)), 200)) 

899 except (TypeError, ValueError): 

900 limit = 50 

901 from .sync_service import SyncService 

902 return _ok(SyncService.inbox_rows( 

903 g.db, user_id=g.user.id, since=since, 

904 limit_per_kind=limit, 

905 tenant_id=getattr(g, 'tenant_id', None))) 

906 

907 

908@social_bp.route('/users/autocomplete', methods=['GET']) 

909@require_auth 

910def autocomplete_users(): 

911 """Mention autocomplete — used by RN/web MentionInput. 

912 

913 Phase 7a.3 endpoint. Plan reference: sunny-gliding-eich.md, Part E.2. 

914 

915 Query params: 

916 q required — username prefix (>=1 char) 

917 kind optional — 'human' | 'agent' | 'all' (default 'all') 

918 community_id optional — restrict + rank-boost to community members 

919 conversation_id optional — restrict + rank-boost to conversation members 

920 limit optional — 1..25 (default 10) 

921 

922 Ranking (P2P-first principle, plan Part R): 

923 Discovery is one of central HARTOS's legitimate request-response 

924 roles (Part R.7) — autocomplete needs a global username index. 

925 A future P2P optimization can cache results on PeerLink gossip 

926 between connected peers. 

927 

928 Ranking order, highest first: 

929 1. Members of the scope parent (community_id / conversation_id) 

930 2. Friends / followed users 

931 3. Global username prefix match (tenant-scoped) 

932 

933 Caller-side tenancy: if g.tenant_id is set we restrict to that 

934 tenant's users (cloud SaaS isolation). Flat / regional pass 

935 tenant_id=None and the filter is a no-op. 

936 

937 Returns: 

938 [{id, username, display_name, avatar_url, agent_kind, 

939 is_member, is_friend}] 

940 """ 

941 # Feature-flag gated. Off by default; flip via env or per-tenant. 

942 if not g.feature_flags.get('mentions_autocomplete', False): 

943 return _ok([]) 

944 

945 q = (request.args.get('q') or '').strip() 

946 if not q: 

947 return _err("q is required", 400) 

948 if len(q) > 40: 

949 return _err("q too long", 400) 

950 

951 kind = request.args.get('kind', 'all') 

952 if kind not in ('human', 'agent', 'all'): 

953 return _err("kind must be human|agent|all", 400) 

954 

955 limit = max(1, min(int(request.args.get('limit', 10)), 25)) 

956 community_id = request.args.get('community_id') 

957 conversation_id = request.args.get('conversation_id') 

958 

959 # Build the base query. Tenant-scoped when applicable. 

960 qry = g.db.query(User).filter( 

961 User.username.ilike(f'{q}%'), 

962 User.is_banned == False, # noqa: E712 

963 ) 

964 if kind == 'human': 

965 qry = qry.filter(User.user_type != 'agent') 

966 elif kind == 'agent': 

967 qry = qry.filter(User.user_type == 'agent') 

968 

969 # Tenancy filter (Phase 7a — plan Part E.1). NULL tenant_id rows 

970 # match a NULL g.tenant_id (flat/regional pass-through). 

971 if hasattr(User, 'tenant_id') and getattr(g, 'tenant_id', None): 

972 qry = qry.filter(User.tenant_id == g.tenant_id) 

973 

974 # Pull a candidate set 3x larger than the limit so we can 

975 # post-filter / rank without missing scope-matched rows. 

976 candidates = qry.limit(limit * 3).all() 

977 

978 # Resolve scope-membership lookup once. Uses the new Membership 

979 # table from migration v41 — falls back gracefully if it isn't 

980 # populated yet (returns empty set). 

981 scope_member_ids = set() 

982 parent_kind = parent_id = None 

983 if community_id: 

984 parent_kind, parent_id = 'community', community_id 

985 elif conversation_id: 

986 parent_kind, parent_id = 'conversation', conversation_id 

987 if parent_kind: 

988 try: 

989 from sqlalchemy import text 

990 rows = g.db.execute(text( 

991 "SELECT member_id FROM memberships " 

992 "WHERE parent_kind = :pk AND parent_id = :pid"), 

993 {'pk': parent_kind, 'pid': parent_id} 

994 ).fetchall() 

995 scope_member_ids = {r[0] for r in rows} 

996 except Exception: 

997 # Membership table unavailable — fall back to legacy 

998 # community_memberships for community scope only. This 

999 # keeps Phase 7a working before the dual-write path 

1000 # populates the new table. 

1001 if parent_kind == 'community': 

1002 try: 

1003 from sqlalchemy import text 

1004 rows = g.db.execute(text( 

1005 "SELECT user_id FROM community_memberships " 

1006 "WHERE community_id = :cid"), 

1007 {'cid': parent_id} 

1008 ).fetchall() 

1009 scope_member_ids = {r[0] for r in rows} 

1010 except Exception: 

1011 scope_member_ids = set() 

1012 

1013 # Friend / follow lookup for the calling user. Uses existing 

1014 # Follow table semantics (one-direction edge — see plan Part B.1). 

1015 # Phase 7c will replace with Friendship state-machine. 

1016 friend_ids = set() 

1017 if g.user: 

1018 try: 

1019 from sqlalchemy import text 

1020 rows = g.db.execute(text( 

1021 "SELECT following_id FROM follows WHERE follower_id = :uid"), 

1022 {'uid': g.user.id} 

1023 ).fetchall() 

1024 friend_ids = {r[0] for r in rows} 

1025 except Exception: 

1026 friend_ids = set() 

1027 

1028 def _rank(u): 

1029 # Lower is better. Scope members first, then friends, then global. 

1030 if u.id in scope_member_ids: 

1031 return (0, u.username.lower()) 

1032 if u.id in friend_ids: 

1033 return (1, u.username.lower()) 

1034 return (2, u.username.lower()) 

1035 

1036 candidates.sort(key=_rank) 

1037 top = candidates[:limit] 

1038 

1039 out = [] 

1040 for u in top: 

1041 agent_kind = 'agent' if (getattr(u, 'user_type', '') == 'agent') else 'human' 

1042 # Existing User model uses `owner_id` (FK→users.id) for agent 

1043 # ownership — see _models_local.py User.owner_id. Plan Part C.3 

1044 # called this `agent_owner_id` semantically but we reuse the 

1045 # existing column to honor the no-parallel-paths principle. 

1046 out.append({ 

1047 'id': u.id, 

1048 'username': u.username, 

1049 'display_name': getattr(u, 'display_name', None) or u.username, 

1050 'avatar_url': getattr(u, 'avatar_url', None), 

1051 'agent_kind': agent_kind, 

1052 'agent_owner_id': getattr(u, 'owner_id', None), 

1053 'is_member': u.id in scope_member_ids, 

1054 'is_friend': u.id in friend_ids, 

1055 }) 

1056 return _ok(out) 

1057 

1058 

1059@social_bp.route('/users/<user_id>/consent/cloud-data', methods=['PUT']) 

1060@require_auth 

1061def set_cloud_data_consent(user_id): 

1062 """Set or revoke consent for sharing anonymized data with cloud services. 

1063 

1064 The hive respects user autonomy: no data leaves the device for cloud 

1065 processing unless the user explicitly opts in. Consent can be revoked 

1066 at any time. 

1067 """ 

1068 if g.user.id != user_id and not g.user.is_admin: 

1069 return _err("Can only manage your own consent", 403) 

1070 data = _get_json() 

1071 consent = bool(data.get('consent', False)) 

1072 settings = dict(g.user.settings or {}) 

1073 settings['cloud_data_consent'] = consent 

1074 g.user.settings = settings 

1075 g.db.flush() 

1076 return _ok({'cloud_data_consent': consent}) 

1077 

1078 

1079@social_bp.route('/users/<user_id>/consent/cloud-data', methods=['GET']) 

1080@require_auth 

1081def get_cloud_data_consent(user_id): 

1082 """Check whether user has consented to cloud data sharing.""" 

1083 if g.user.id != user_id and not g.user.is_admin: 

1084 return _err("Can only check your own consent", 403) 

1085 consent = bool((g.user.settings or {}).get('cloud_data_consent', False)) 

1086 return _ok({'cloud_data_consent': consent}) 

1087 

1088 

1089@social_bp.route('/users/<user_id>/handle', methods=['PATCH']) 

1090@require_auth 

1091def set_user_handle(user_id): 

1092 """Set a user's unique creator handle (used as suffix for agent global names).""" 

1093 if g.user.id != user_id and not g.user.is_admin: 

1094 return _err("Can only set your own handle", 403) 

1095 data = _get_json() 

1096 handle = data.get('handle', '').strip().lower() 

1097 if not handle: 

1098 return _err("handle is required") 

1099 try: 

1100 user = UserService.set_handle(g.db, g.user, handle) 

1101 return _ok({'handle': user.handle}) 

1102 except ValueError as e: 

1103 return _err(str(e), 409 if 'taken' in str(e).lower() else 400) 

1104 

1105 

1106@social_bp.route('/handles/check', methods=['GET']) 

1107@rate_limit('global') 

1108def check_handle_availability(): 

1109 """Check if a handle is available (no auth required).""" 

1110 from .agent_naming import validate_handle, is_handle_available 

1111 handle = request.args.get('handle', '').strip().lower() 

1112 if not handle: 

1113 return _err("handle parameter required") 

1114 valid, error = validate_handle(handle) 

1115 if not valid: 

1116 return _ok({'available': False, 'handle': handle, 'error': error}) 

1117 with db_session(commit=False) as db: 

1118 available = is_handle_available(db, handle) 

1119 return _ok({'available': available, 'handle': handle}) 

1120 

1121 

1122@social_bp.route('/users/<user_id>/posts', methods=['GET']) 

1123@optional_auth 

1124def get_user_posts(user_id): 

1125 limit = min(int(request.args.get('limit', 25)), 100) 

1126 offset = int(request.args.get('offset', 0)) 

1127 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1128 posts, total = PostService.list_posts( 

1129 g.db, author_id=user_id, limit=limit, offset=offset, 

1130 viewer_user=g.user, apply_privacy=apply_privacy) 

1131 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

1132 

1133 

1134@social_bp.route('/users/<user_id>/comments', methods=['GET']) 

1135@optional_auth 

1136def get_user_comments(user_id): 

1137 limit = min(int(request.args.get('limit', 25)), 100) 

1138 offset = int(request.args.get('offset', 0)) 

1139 comments = g.db.query(Comment).filter( 

1140 Comment.author_id == user_id, Comment.is_deleted == False, 

1141 Comment.is_hidden == False 

1142 ).order_by(Comment.created_at.desc()).offset(offset).limit(limit).all() 

1143 total = g.db.query(Comment).filter( 

1144 Comment.author_id == user_id, Comment.is_deleted == False, 

1145 Comment.is_hidden == False).count() 

1146 return _ok([c.to_dict() for c in comments], _paginate(total, limit, offset)) 

1147 

1148 

1149@social_bp.route('/users/<user_id>/karma', methods=['GET']) 

1150@optional_auth 

1151def get_user_karma(user_id): 

1152 user = UserService.get_by_id(g.db, user_id) 

1153 if not user: 

1154 return _err("User not found", 404) 

1155 return _ok(get_karma_breakdown(g.db, user)) 

1156 

1157 

1158@social_bp.route('/users/<user_id>/skills', methods=['GET']) 

1159@optional_auth 

1160def get_user_skills(user_id): 

1161 user = UserService.get_by_id(g.db, user_id) 

1162 if not user: 

1163 return _err("User not found", 404) 

1164 badges = user.skill_badges.all() 

1165 return _ok([b.to_dict() for b in badges]) 

1166 

1167 

1168@social_bp.route('/users/<user_id>/follow', methods=['POST']) 

1169@require_auth 

1170def follow_user(user_id): 

1171 try: 

1172 # Resolve username or UUID 

1173 target = UserService.get_by_id(g.db, user_id) 

1174 if not target: 

1175 target = UserService.get_by_username(g.db, user_id) 

1176 if not target: 

1177 return _err("User not found", 404) 

1178 created = FollowService.follow(g.db, g.user, target.id) 

1179 return _ok({'followed': created}) 

1180 except ValueError as e: 

1181 return _err(str(e)) 

1182 

1183 

1184@social_bp.route('/users/<user_id>/follow', methods=['DELETE']) 

1185@require_auth 

1186def unfollow_user(user_id): 

1187 target = UserService.get_by_id(g.db, user_id) 

1188 if not target: 

1189 target = UserService.get_by_username(g.db, user_id) 

1190 if not target: 

1191 return _err("User not found", 404) 

1192 FollowService.unfollow(g.db, g.user, target.id) 

1193 return _ok({'unfollowed': True}) 

1194 

1195 

1196@social_bp.route('/users/<user_id>/followers', methods=['GET']) 

1197@optional_auth 

1198def get_user_followers(user_id): 

1199 limit = min(int(request.args.get('limit', 50)), 100) 

1200 offset = int(request.args.get('offset', 0)) 

1201 users, total = FollowService.get_followers(g.db, user_id, limit, offset) 

1202 return _ok([u.to_dict() for u in users], _paginate(total, limit, offset)) 

1203 

1204 

1205@social_bp.route('/users/<user_id>/following', methods=['GET']) 

1206@optional_auth 

1207def get_user_following(user_id): 

1208 limit = min(int(request.args.get('limit', 50)), 100) 

1209 offset = int(request.args.get('offset', 0)) 

1210 users, total = FollowService.get_following(g.db, user_id, limit, offset) 

1211 return _ok([u.to_dict() for u in users], _paginate(total, limit, offset)) 

1212 

1213 

1214# ═══════════════════════════════════════════════════════════════ 

1215# AGENT OWNERSHIP 

1216# ═══════════════════════════════════════════════════════════════ 

1217 

1218@social_bp.route('/users/<user_id>/agents', methods=['GET']) 

1219@optional_auth 

1220def get_user_agents(user_id): 

1221 """List all agents owned by this user.""" 

1222 agents = UserService.get_owned_agents(g.db, user_id) 

1223 result = [] 

1224 for agent in agents: 

1225 d = agent.to_dict() 

1226 badges = g.db.query(AgentSkillBadge).filter( 

1227 AgentSkillBadge.user_id == agent.id).all() 

1228 d['skills'] = [b.to_dict() for b in badges] 

1229 result.append(d) 

1230 return _ok(result) 

1231 

1232 

1233@social_bp.route('/users/<user_id>/agents', methods=['POST']) 

1234@require_auth 

1235def create_user_agent(user_id): 

1236 """ 

1237 Create a new agent owned by this user. 

1238 Accepts either: 

1239 - local_name (2-word): auto-appends user's handle to form 3-word global name 

1240 - name (3-word): legacy direct global name registration 

1241 """ 

1242 if g.user.id != user_id and not g.user.is_admin: 

1243 return _err("Can only create agents for yourself", 403) 

1244 data = _get_json() 

1245 local_name = data.get('local_name', '').strip().lower() 

1246 name = data.get('name', '').strip().lower() 

1247 

1248 if not local_name and not name: 

1249 return _err("Agent name is required (use 'local_name' for 2-word or 'name' for 3-word)") 

1250 

1251 try: 

1252 if local_name: 

1253 # New path: 2-word local name + handle 

1254 if not g.user.handle: 

1255 return _err("Set your handle first before creating agents", 400) 

1256 agent = UserService.register_agent_local( 

1257 g.db, local_name, data.get('description', ''), 

1258 data.get('agent_id'), owner=g.user) 

1259 else: 

1260 # Legacy path: 3-word global name 

1261 agent = UserService.register_agent( 

1262 g.db, name, data.get('description', ''), 

1263 data.get('agent_id'), owner_id=user_id) 

1264 except ValueError as e: 

1265 return _err(str(e)) 

1266 if data.get('personality'): 

1267 agent.settings = dict(agent.settings or {}, personality=data['personality']) 

1268 if data.get('skills'): 

1269 agent.settings = dict(agent.settings or {}, skill_tags=data['skills']) 

1270 # Persist voice_profile on the dedicated column (schema v37+). Accept both 

1271 # dict and JSON-string shapes — canonicalise to dict so downstream TTS 

1272 # callers get a consistent type. 

1273 vp_raw = data.get('voice_profile') 

1274 if vp_raw is not None: 

1275 if isinstance(vp_raw, str): 

1276 try: 

1277 vp_raw = json.loads(vp_raw) 

1278 except (ValueError, TypeError): 

1279 # Keep raw string under a 'preset' key so the TTS engine can 

1280 # still resolve it as an engine preset name. 

1281 vp_raw = {'preset': vp_raw} 

1282 if isinstance(vp_raw, dict): 

1283 agent.voice_profile = vp_raw 

1284 g.db.flush() 

1285 g.db.commit() 

1286 return _ok(agent.to_dict(include_token=True), status=201) 

1287 

1288 

1289@social_bp.route('/agents/suggest-names', methods=['GET']) 

1290@rate_limit('global') 

1291def suggest_agent_names(): 

1292 """ 

1293 Generate available agent names. 

1294 ?mode=local&handle=X → 2-word names pre-checked for global availability 

1295 ?mode=global (default) → 3-word names 

1296 """ 

1297 from .agent_naming import generate_agent_name 

1298 count = min(int(request.args.get('count', 5)), 20) 

1299 mode = request.args.get('mode', 'global') 

1300 handle = request.args.get('handle', '').strip().lower() or None 

1301 with db_session(commit=False) as db: 

1302 suggestions = generate_agent_name(db, count=count, mode=mode, handle=handle) 

1303 result = {'suggestions': suggestions, 'count': len(suggestions), 'mode': mode} 

1304 if mode == 'local' and handle: 

1305 from .agent_naming import compose_global_name 

1306 result['global_preview'] = [compose_global_name(s, handle) for s in suggestions] 

1307 return _ok(result) 

1308 

1309 

1310@social_bp.route('/agents/validate-name', methods=['POST']) 

1311@rate_limit('global') 

1312def validate_agent_name_endpoint(): 

1313 """ 

1314 Check if an agent name is valid and available. 

1315 body.mode='local' + body.handle → validates 2-word name + global availability 

1316 body.mode='global' (default) → validates 3-word name directly 

1317 """ 

1318 from .agent_naming import validate_and_check, validate_local_name, check_global_availability 

1319 data = _get_json() 

1320 name = data.get('name', '').strip().lower() 

1321 mode = data.get('mode', 'global') 

1322 handle = data.get('handle', '').strip().lower() if data.get('handle') else None 

1323 with db_session(commit=False) as db: 

1324 if mode == 'local' and handle: 

1325 valid, error = validate_local_name(name) 

1326 if not valid: 

1327 return _ok({'valid': False, 'error': error, 'name': name}) 

1328 available, global_name, err = check_global_availability(db, name, handle) 

1329 return _ok({ 

1330 'valid': available, 'error': err, 

1331 'name': name, 'global_name': global_name, 

1332 }) 

1333 else: 

1334 valid, error = validate_and_check(db, name) 

1335 return _ok({'valid': valid, 'error': error, 'name': name}) 

1336 

1337 

1338# ═══════════════════════════════════════════════════════════════ 

1339# POSTS 

1340# ═══════════════════════════════════════════════════════════════ 

1341 

1342@social_bp.route('/posts', methods=['GET']) 

1343@optional_auth 

1344def list_posts(): 

1345 sort = request.args.get('sort', 'new') 

1346 community = request.args.get('community') 

1347 limit = min(int(request.args.get('limit', 25)), 100) 

1348 offset = int(request.args.get('offset', 0)) 

1349 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1350 posts, total = PostService.list_posts( 

1351 g.db, sort, community, limit=limit, offset=offset, 

1352 viewer_user=g.user, apply_privacy=apply_privacy) 

1353 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

1354 

1355 

1356@social_bp.route('/posts', methods=['POST']) 

1357@require_auth 

1358@rate_limit('post') 

1359def create_post(): 

1360 data = _get_json() 

1361 title = data.get('title') or data.get('caption', '') 

1362 if not title: 

1363 return _err("title required") 

1364 if len(title) > 300: 

1365 return jsonify({'success': False, 'error': 'Title too long (max 300 characters)'}), 400 

1366 content = data.get('content', '') 

1367 if content and len(content) > 40000: 

1368 return jsonify({'success': False, 'error': 'Content too long (max 40000 characters)'}), 400 

1369 post = PostService.create( 

1370 g.db, g.user, title, data.get('content', ''), 

1371 data.get('content_type', 'text'), data.get('community'), 

1372 data.get('code_language'), data.get('media_urls'), 

1373 data.get('link_url'), data.get('source_channel'), 

1374 intent_category=data.get('intent_category'), 

1375 hypothesis=data.get('hypothesis'), 

1376 expected_outcome=data.get('expected_outcome'), 

1377 is_thought_experiment=bool(data.get('is_thought_experiment', False)), 

1378 dynamic_layout=data.get('dynamic_layout'), 

1379 ) 

1380 # Phase 7c.5 — per-post privacy. Stored only when the flag is on 

1381 # so flag-off deploys never surface or persist the field. Unknown 

1382 # values are silently coerced to 'public' via _normalize so a 

1383 # malicious client cannot store an unenforceable level. 

1384 if g.feature_flags.get('post_privacy', False): 

1385 from .privacy import _normalize 

1386 requested = data.get('privacy') 

1387 if requested is not None: 

1388 normalized = _normalize(requested) 

1389 # P3-11 — 'community' privacy without a community_id is a 

1390 # silent-invisibility trap (would only be visible to the 

1391 # author, which is not what the user asked for). Refuse 

1392 # explicitly so the client gets feedback. 

1393 if normalized == 'community' and not post.community_id: 

1394 # Roll back the post so we don't leave an orphaned 

1395 # row. Delete the just-created post to keep the 

1396 # error path clean. 

1397 g.db.delete(post) 

1398 g.db.flush() 

1399 return _err( 

1400 "privacy='community' requires the post to be in a community", 400) 

1401 post.privacy = normalized 

1402 g.db.flush() 

1403 # Phase 7e — AI moderation (post-DLP soft signal). Runs AFTER 

1404 # DLPEngine (which is the existing pre-publish PII gate, unchanged) 

1405 # so PII rejections happen first. Classifier writes a decision 

1406 # row + may flip is_hidden / is_quarantined. Flag-gated by 

1407 # `moderation_v2`; off → silent no-op. Plan Part M pipeline. 

1408 # Pass-4 P4-10: commit=False so the post row + decision row + 

1409 # any is_hidden / is_quarantined flip all land in the SAME 

1410 # transaction. RT subscribers are notified post-commit by 

1411 # the @require_auth decorator, so they never see an 

1412 # un-moderated post. 

1413 if g.feature_flags.get('moderation_v2', False): 

1414 try: 

1415 from .content_classifier import ContentClassifier 

1416 ContentClassifier.classify_and_persist( 

1417 g.db, source_kind='post', source_id=post.id, 

1418 content=(title + '\n\n' + (data.get('content') or '')), 

1419 tenant_id=getattr(g, 'tenant_id', None), 

1420 commit=False) 

1421 # Refresh the post row in case is_hidden / is_quarantined 

1422 # flipped — to_dict below should reflect the updated state. 

1423 g.db.refresh(post) 

1424 except Exception as e: 

1425 logger.warning("create_post moderation pass failed: %s", e) 

1426 # Phase 7b — parse @-mentions, fan out notifications, dispatch 

1427 # named agents through the existing agentic_router (see 

1428 # mention_service.py docstring + plan Part E.5). Flag-gated; 

1429 # off → silent no-op, response shape unchanged. 

1430 out = post.to_dict(include_author=True) 

1431 if g.feature_flags.get('mentions', False): 

1432 try: 

1433 from .mention_service import MentionService 

1434 mentions = MentionService.parse_and_record( 

1435 g.db, source_kind='post', source_id=post.id, 

1436 content=(title + '\n\n' + (data.get('content') or '')), 

1437 author_id=g.user.id, tenant_id=getattr(g, 'tenant_id', None)) 

1438 if mentions: 

1439 out['mentions'] = mentions 

1440 except Exception as e: 

1441 logger.warning("create_post mention pass failed: %s", e) 

1442 return _ok(out, status=201) 

1443 

1444 

1445@social_bp.route('/posts/<post_id>', methods=['GET']) 

1446@optional_auth 

1447def get_post(post_id): 

1448 post = PostService.get_by_id(g.db, post_id) 

1449 if not post: 

1450 return _err("Post not found", 404) 

1451 # Phase 7c.5 — per-post privacy gate. We return 404 (not 403) for 

1452 # private posts so we don't reveal that a hidden post exists at 

1453 # this id. Same shape the rest of the API uses for tenant 

1454 # isolation. 

1455 if getattr(g, 'feature_flags', {}).get('post_privacy', False): 

1456 from .privacy import can_view_post 

1457 if not can_view_post(g.db, g.user, post): 

1458 return _err("Post not found", 404) 

1459 PostService.increment_view(g.db, post) 

1460 data = post.to_dict(include_author=True) 

1461 # Include user's vote if authenticated 

1462 if g.user: 

1463 from .models import Vote 

1464 vote = g.db.query(Vote).filter( 

1465 Vote.user_id == g.user.id, Vote.target_type == 'post', 

1466 Vote.target_id == post_id).first() 

1467 data['user_vote'] = vote.value if vote else 0 

1468 return _ok(data) 

1469 

1470 

1471@social_bp.route('/posts/<post_id>', methods=['PATCH']) 

1472@require_auth 

1473def update_post(post_id): 

1474 post = PostService.get_by_id(g.db, post_id) 

1475 if not post: 

1476 return _err("Post not found", 404) 

1477 if post.author_id != g.user.id and not g.user.is_admin: 

1478 return _err("Cannot edit another user's post", 403) 

1479 data = _get_json() 

1480 post = PostService.update( 

1481 g.db, post, data.get('title'), data.get('content'), 

1482 intent_category=data.get('intent_category'), 

1483 hypothesis=data.get('hypothesis'), 

1484 expected_outcome=data.get('expected_outcome'), 

1485 is_thought_experiment=data.get('is_thought_experiment'), 

1486 dynamic_layout=data.get('dynamic_layout'), 

1487 ) 

1488 # Phase 7c.5 — author can change privacy on their own post. Same 

1489 # flag gate + _normalize coercion as create_post + same 

1490 # community-without-community_id refusal (P3-11). 

1491 if g.feature_flags.get('post_privacy', False) and 'privacy' in data: 

1492 from .privacy import _normalize 

1493 normalized = _normalize(data.get('privacy')) 

1494 if normalized == 'community' and not post.community_id: 

1495 return _err( 

1496 "privacy='community' requires the post to be in a community", 400) 

1497 post.privacy = normalized 

1498 g.db.flush() 

1499 return _ok(post.to_dict(include_author=True)) 

1500 

1501 

1502@social_bp.route('/posts/<post_id>', methods=['DELETE']) 

1503@require_auth 

1504def delete_post(post_id): 

1505 post = PostService.get_by_id(g.db, post_id) 

1506 if not post: 

1507 return _err("Post not found", 404) 

1508 if post.author_id != g.user.id and not g.user.is_admin: 

1509 return _err("Cannot delete another user's post", 403) 

1510 PostService.delete(g.db, post) 

1511 return _ok({'deleted': True}) 

1512 

1513 

1514@social_bp.route('/posts/<post_id>/upvote', methods=['POST']) 

1515@require_auth 

1516@rate_limit('vote') 

1517def upvote_post(post_id): 

1518 try: 

1519 result = VoteService.vote(g.db, g.user, 'post', post_id, 1) 

1520 return _ok(result) 

1521 except ValueError as e: 

1522 return _err(str(e), 404) 

1523 

1524 

1525@social_bp.route('/posts/<post_id>/downvote', methods=['POST']) 

1526@require_auth 

1527@rate_limit('vote') 

1528def downvote_post(post_id): 

1529 try: 

1530 result = VoteService.vote(g.db, g.user, 'post', post_id, -1) 

1531 return _ok(result) 

1532 except ValueError as e: 

1533 return _err(str(e), 404) 

1534 

1535 

1536@social_bp.route('/posts/<post_id>/vote', methods=['DELETE']) 

1537@require_auth 

1538def remove_post_vote(post_id): 

1539 VoteService.remove_vote(g.db, g.user, 'post', post_id) 

1540 return _ok({'removed': True}) 

1541 

1542 

1543@social_bp.route('/posts/<post_id>/likes', methods=['GET']) 

1544@optional_auth 

1545def get_post_likes(post_id): 

1546 """RN-compatible: returns list of users who liked this post.""" 

1547 voters = VoteService.get_voters(g.db, 'post', post_id) 

1548 return _ok(voters) 

1549 

1550 

1551@social_bp.route('/posts/<post_id>/pin', methods=['POST']) 

1552@require_auth 

1553def pin_post(post_id): 

1554 post = PostService.get_by_id(g.db, post_id) 

1555 if not post: 

1556 return _err("Post not found", 404) 

1557 if not (g.user.is_admin or g.user.is_moderator): 

1558 if post.community_id: 

1559 role = CommunityService.get_user_role(g.db, g.user.id, post.community_id) 

1560 if role not in ('admin', 'moderator'): 

1561 return _err("Moderator access required", 403) 

1562 else: 

1563 return _err("Moderator access required", 403) 

1564 post.is_pinned = not post.is_pinned 

1565 g.db.flush() 

1566 return _ok({'pinned': post.is_pinned}) 

1567 

1568 

1569@social_bp.route('/posts/<post_id>/lock', methods=['POST']) 

1570@require_auth 

1571def lock_post(post_id): 

1572 post = PostService.get_by_id(g.db, post_id) 

1573 if not post: 

1574 return _err("Post not found", 404) 

1575 if not (g.user.is_admin or g.user.is_moderator): 

1576 return _err("Moderator access required", 403) 

1577 post.is_locked = not post.is_locked 

1578 g.db.flush() 

1579 return _ok({'locked': post.is_locked}) 

1580 

1581 

1582@social_bp.route('/posts/<post_id>/report', methods=['POST']) 

1583@require_auth 

1584def report_post(post_id): 

1585 data = _get_json() 

1586 reason = data.get('reason', '') 

1587 if not reason: 

1588 return _err("reason required") 

1589 report = ReportService.create(g.db, g.user, 'post', post_id, reason, data.get('details', '')) 

1590 return _ok(report.to_dict(), status=201) 

1591 

1592 

1593# ═══════════════════════════════════════════════════════════════ 

1594# COMMENTS 

1595# ═══════════════════════════════════════════════════════════════ 

1596 

1597@social_bp.route('/posts/<post_id>/comments', methods=['GET']) 

1598@optional_auth 

1599def get_comments(post_id): 

1600 sort = request.args.get('sort', 'new') 

1601 comments = CommentService.get_by_post(g.db, post_id, sort) 

1602 # RN-compatible format: include nested structure 

1603 result = [] 

1604 for c in comments: 

1605 cd = c.to_dict(include_author=True) 

1606 # RN uses parent_comment_id (0 for top-level) 

1607 cd['parent_comment_id'] = c.parent_id or 0 

1608 cd['comment'] = c.content # RN field alias 

1609 cd['name'] = c.author.display_name if c.author else '' 

1610 cd['creation_date'] = cd['created_at'] 

1611 result.append(cd) 

1612 return _ok(result) 

1613 

1614 

1615@social_bp.route('/posts/<post_id>/comments', methods=['POST']) 

1616@require_auth 

1617@rate_limit('comment') 

1618def create_comment(post_id): 

1619 post = PostService.get_by_id(g.db, post_id) 

1620 if not post: 

1621 return _err("Post not found", 404) 

1622 if post.is_locked and not (g.user.is_admin or g.user.is_moderator): 

1623 return _err("Post is locked", 403) 

1624 data = _get_json() 

1625 content = data.get('content') or data.get('text', '') 

1626 if not content: 

1627 return _err("content required") 

1628 if len(content) > 10000: 

1629 return jsonify({'success': False, 'error': 'Comment too long (max 10000 characters)'}), 400 

1630 parent_id = data.get('parent_id') or data.get('parent_comment_id') 

1631 if parent_id == 0: 

1632 parent_id = None 

1633 comment = CommentService.create(g.db, post, g.user, content, parent_id) 

1634 # Phase 7e moderation — same gate + classifier as create_post. 

1635 # Comments don't have an is_quarantined column today, so the 

1636 # decision row is the only persisted side effect. Mods can 

1637 # still see flagged comments in the queue for review. 

1638 # Pass-4 P4-10: commit=False so the decision row lands in the 

1639 # same transaction as the comment. 

1640 if g.feature_flags.get('moderation_v2', False): 

1641 try: 

1642 from .content_classifier import ContentClassifier 

1643 ContentClassifier.classify_and_persist( 

1644 g.db, source_kind='comment', source_id=comment.id, 

1645 content=content, 

1646 tenant_id=getattr(g, 'tenant_id', None), 

1647 commit=False) 

1648 except Exception as e: 

1649 logger.warning("create_comment moderation pass failed: %s", e) 

1650 out = comment.to_dict(include_author=True) 

1651 if g.feature_flags.get('mentions', False): 

1652 try: 

1653 from .mention_service import MentionService 

1654 mentions = MentionService.parse_and_record( 

1655 g.db, source_kind='comment', source_id=comment.id, 

1656 content=content, author_id=g.user.id, 

1657 tenant_id=getattr(g, 'tenant_id', None)) 

1658 if mentions: 

1659 out['mentions'] = mentions 

1660 except Exception as e: 

1661 logger.warning("create_comment mention pass failed: %s", e) 

1662 return _ok(out, status=201) 

1663 

1664 

1665@social_bp.route('/comments/<comment_id>/reply', methods=['POST']) 

1666@require_auth 

1667@rate_limit('comment') 

1668def reply_to_comment(comment_id): 

1669 comment = g.db.query(Comment).filter(Comment.id == comment_id).first() 

1670 if not comment: 

1671 return _err("Comment not found", 404) 

1672 post = PostService.get_by_id(g.db, comment.post_id) 

1673 if not post: 

1674 return _err("Post not found", 404) 

1675 data = _get_json() 

1676 content = data.get('content') or data.get('text', '') 

1677 if not content: 

1678 return _err("content required") 

1679 if len(content) > 10000: 

1680 return jsonify({'success': False, 'error': 'Comment too long (max 10000 characters)'}), 400 

1681 reply = CommentService.create(g.db, post, g.user, content, comment_id) 

1682 if g.feature_flags.get('moderation_v2', False): 

1683 try: 

1684 from .content_classifier import ContentClassifier 

1685 ContentClassifier.classify_and_persist( 

1686 g.db, source_kind='comment', source_id=reply.id, 

1687 content=content, 

1688 tenant_id=getattr(g, 'tenant_id', None), 

1689 commit=False) 

1690 except Exception as e: 

1691 logger.warning("reply_to_comment moderation pass failed: %s", e) 

1692 out = reply.to_dict(include_author=True) 

1693 if g.feature_flags.get('mentions', False): 

1694 try: 

1695 from .mention_service import MentionService 

1696 mentions = MentionService.parse_and_record( 

1697 g.db, source_kind='comment', source_id=reply.id, 

1698 content=content, author_id=g.user.id, 

1699 tenant_id=getattr(g, 'tenant_id', None)) 

1700 if mentions: 

1701 out['mentions'] = mentions 

1702 except Exception as e: 

1703 logger.warning("reply_to_comment mention pass failed: %s", e) 

1704 return _ok(out, status=201) 

1705 

1706 

1707@social_bp.route('/comments/<comment_id>', methods=['PATCH']) 

1708@require_auth 

1709def update_comment(comment_id): 

1710 comment = g.db.query(Comment).filter(Comment.id == comment_id).first() 

1711 if not comment: 

1712 return _err("Comment not found", 404) 

1713 if comment.author_id != g.user.id and not g.user.is_admin: 

1714 return _err("Cannot edit another user's comment", 403) 

1715 data = _get_json() 

1716 content = data.get('content') or data.get('text', '') 

1717 if content: 

1718 comment.content = content 

1719 g.db.flush() 

1720 return _ok(comment.to_dict(include_author=True)) 

1721 

1722 

1723@social_bp.route('/comments/<comment_id>', methods=['DELETE']) 

1724@require_auth 

1725def delete_comment(comment_id): 

1726 comment = g.db.query(Comment).filter(Comment.id == comment_id).first() 

1727 if not comment: 

1728 return _err("Comment not found", 404) 

1729 if comment.author_id != g.user.id and not g.user.is_admin: 

1730 return _err("Cannot delete another user's comment", 403) 

1731 CommentService.delete(g.db, comment) 

1732 return _ok({'deleted': True}) 

1733 

1734 

1735@social_bp.route('/comments/<comment_id>/upvote', methods=['POST']) 

1736@require_auth 

1737@rate_limit('vote') 

1738def upvote_comment(comment_id): 

1739 try: 

1740 result = VoteService.vote(g.db, g.user, 'comment', comment_id, 1) 

1741 return _ok(result) 

1742 except ValueError as e: 

1743 return _err(str(e), 404) 

1744 

1745 

1746@social_bp.route('/comments/<comment_id>/downvote', methods=['POST']) 

1747@require_auth 

1748@rate_limit('vote') 

1749def downvote_comment(comment_id): 

1750 try: 

1751 result = VoteService.vote(g.db, g.user, 'comment', comment_id, -1) 

1752 return _ok(result) 

1753 except ValueError as e: 

1754 return _err(str(e), 404) 

1755 

1756 

1757@social_bp.route('/comments/<comment_id>/vote', methods=['DELETE']) 

1758@require_auth 

1759def remove_comment_vote(comment_id): 

1760 VoteService.remove_vote(g.db, g.user, 'comment', comment_id) 

1761 return _ok({'removed': True}) 

1762 

1763 

1764@social_bp.route('/comments/<comment_id>/likes', methods=['GET']) 

1765@optional_auth 

1766def get_comment_likes(comment_id): 

1767 """RN-compatible: returns list of users who liked this comment.""" 

1768 voters = VoteService.get_voters(g.db, 'comment', comment_id) 

1769 return _ok(voters) 

1770 

1771 

1772@social_bp.route('/comments/<comment_id>/report', methods=['POST']) 

1773@require_auth 

1774def report_comment(comment_id): 

1775 data = _get_json() 

1776 reason = data.get('reason', '') 

1777 if not reason: 

1778 return _err("reason required") 

1779 report = ReportService.create(g.db, g.user, 'comment', comment_id, reason, data.get('details', '')) 

1780 return _ok(report.to_dict(), status=201) 

1781 

1782 

1783# ═══════════════════════════════════════════════════════════════ 

1784# COMMUNITIES 

1785# ═══════════════════════════════════════════════════════════════ 

1786 

1787@social_bp.route('/communities', methods=['GET']) 

1788@optional_auth 

1789def list_communities(): 

1790 limit = min(int(request.args.get('limit', 50)), 100) 

1791 offset = int(request.args.get('offset', 0)) 

1792 communities, total = CommunityService.list_communities(g.db, limit, offset) 

1793 return _ok([s.to_dict() for s in communities], _paginate(total, limit, offset)) 

1794 

1795 

1796@social_bp.route('/communities', methods=['POST']) 

1797@require_auth 

1798def create_community(): 

1799 data = _get_json() 

1800 name = data.get('name', '') 

1801 if not name: 

1802 return _err("name required") 

1803 try: 

1804 community = CommunityService.create( 

1805 g.db, g.user, name, data.get('display_name', ''), 

1806 data.get('description', ''), data.get('rules', ''), 

1807 data.get('is_private', False)) 

1808 return _ok(community.to_dict(), status=201) 

1809 except ValueError as e: 

1810 return _err(str(e)) 

1811 

1812 

1813def _resolve_community(name): 

1814 """Resolve a community by name or numeric ID.""" 

1815 community = CommunityService.get_by_name(g.db, name) 

1816 if not community: 

1817 # Try as numeric ID (frontend may send ID instead of name) 

1818 try: 

1819 community = g.db.query(Community).filter(Community.id == int(name)).first() 

1820 except (ValueError, TypeError): 

1821 pass 

1822 return community 

1823 

1824 

1825@social_bp.route('/communities/<name>', methods=['GET']) 

1826@optional_auth 

1827def get_community(name): 

1828 community = _resolve_community(name) 

1829 if not community: 

1830 return _err("Community not found", 404) 

1831 data = community.to_dict() 

1832 if g.user: 

1833 data['is_member'] = CommunityService.get_user_role(g.db, g.user.id, community.id) is not None 

1834 data['role'] = CommunityService.get_user_role(g.db, g.user.id, community.id) 

1835 return _ok(data) 

1836 

1837 

1838@social_bp.route('/communities/<name>', methods=['PATCH']) 

1839@require_auth 

1840def update_community(name): 

1841 community = _resolve_community(name) 

1842 if not community: 

1843 return _err("Community not found", 404) 

1844 role = CommunityService.get_user_role(g.db, g.user.id, community.id) 

1845 if role not in ('admin', 'moderator') and not g.user.is_admin: 

1846 return _err("Moderator access required", 403) 

1847 data = _get_json() 

1848 if 'display_name' in data: 

1849 community.display_name = data['display_name'] 

1850 if 'description' in data: 

1851 community.description = data['description'] 

1852 if 'rules' in data: 

1853 community.rules = data['rules'] 

1854 g.db.flush() 

1855 return _ok(community.to_dict()) 

1856 

1857 

1858@social_bp.route('/communities/<name>/posts', methods=['GET']) 

1859@optional_auth 

1860def get_community_posts(name): 

1861 # Resolve by name or ID so frontend can pass either 

1862 community = _resolve_community(name) 

1863 community_name = community.name if community else name 

1864 sort = request.args.get('sort', 'new') 

1865 limit = min(int(request.args.get('limit', 25)), 100) 

1866 offset = int(request.args.get('offset', 0)) 

1867 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1868 posts, total = PostService.list_posts( 

1869 g.db, sort, community_name=community_name, limit=limit, offset=offset, 

1870 viewer_user=g.user, apply_privacy=apply_privacy) 

1871 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

1872 

1873 

1874@social_bp.route('/communities/<name>/join', methods=['POST']) 

1875@require_auth 

1876def join_community(name): 

1877 community = _resolve_community(name) 

1878 if not community: 

1879 return _err("Community not found", 404) 

1880 joined = CommunityService.join(g.db, g.user, community) 

1881 return _ok({'joined': joined}) 

1882 

1883 

1884@social_bp.route('/communities/<name>/leave', methods=['DELETE']) 

1885@require_auth 

1886def leave_community(name): 

1887 community = _resolve_community(name) 

1888 if not community: 

1889 return _err("Community not found", 404) 

1890 CommunityService.leave(g.db, g.user, community) 

1891 return _ok({'left': True}) 

1892 

1893 

1894@social_bp.route('/communities/<name>/members', methods=['GET']) 

1895@optional_auth 

1896def get_community_members(name): 

1897 community = _resolve_community(name) 

1898 if not community: 

1899 return _err("Community not found", 404) 

1900 limit = min(int(request.args.get('limit', 50)), 100) 

1901 offset = int(request.args.get('offset', 0)) 

1902 members, total = CommunityService.get_members(g.db, community.id, limit, offset) 

1903 return _ok(members, _paginate(total, limit, offset)) 

1904 

1905 

1906@social_bp.route('/communities/<name>/moderators', methods=['POST']) 

1907@require_auth 

1908def add_moderator(name): 

1909 community = _resolve_community(name) 

1910 if not community: 

1911 return _err("Community not found", 404) 

1912 role = CommunityService.get_user_role(g.db, g.user.id, community.id) 

1913 if role != 'admin' and not g.user.is_admin: 

1914 return _err("Admin access required", 403) 

1915 data = _get_json() 

1916 user_id = data.get('user_id', '') 

1917 from .models import CommunityMembership 

1918 membership = g.db.query(CommunityMembership).filter( 

1919 CommunityMembership.user_id == user_id, 

1920 CommunityMembership.community_id == community.id).first() 

1921 if not membership: 

1922 return _err("User is not a member", 400) 

1923 membership.role = 'moderator' 

1924 g.db.flush() 

1925 return _ok({'promoted': True}) 

1926 

1927 

1928@social_bp.route('/communities/<name>/moderators/<user_id>', methods=['DELETE']) 

1929@require_auth 

1930def remove_moderator(name, user_id): 

1931 community = _resolve_community(name) 

1932 if not community: 

1933 return _err("Community not found", 404) 

1934 role = CommunityService.get_user_role(g.db, g.user.id, community.id) 

1935 if role != 'admin' and not g.user.is_admin: 

1936 return _err("Admin access required", 403) 

1937 from .models import CommunityMembership 

1938 membership = g.db.query(CommunityMembership).filter( 

1939 CommunityMembership.user_id == user_id, 

1940 CommunityMembership.community_id == community.id).first() 

1941 if membership: 

1942 membership.role = 'member' 

1943 g.db.flush() 

1944 return _ok({'demoted': True}) 

1945 

1946 

1947# ═══════════════════════════════════════════════════════════════ 

1948# FEED 

1949# ═══════════════════════════════════════════════════════════════ 

1950 

1951@social_bp.route('/feed', methods=['GET']) 

1952@require_auth 

1953def personalized_feed(): 

1954 limit = min(int(request.args.get('limit', 25)), 100) 

1955 offset = int(request.args.get('offset', 0)) 

1956 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1957 posts, total = get_personalized_feed( 

1958 g.db, g.user.id, limit, offset, 

1959 viewer_user=g.user, apply_privacy=apply_privacy) 

1960 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

1961 

1962 

1963@social_bp.route('/feed/all', methods=['GET']) 

1964@optional_auth 

1965def global_feed(): 

1966 sort = request.args.get('sort', 'new') 

1967 limit = min(int(request.args.get('limit', 25)), 100) 

1968 offset = int(request.args.get('offset', 0)) 

1969 uid = g.user.id if getattr(g, 'user', None) else None 

1970 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1971 posts, total = get_global_feed( 

1972 g.db, sort, limit, offset, user_id=uid, 

1973 viewer_user=g.user, apply_privacy=apply_privacy) 

1974 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

1975 

1976 

1977@social_bp.route('/feed/trending', methods=['GET']) 

1978@optional_auth 

1979def trending_feed(): 

1980 limit = min(int(request.args.get('limit', 25)), 100) 

1981 offset = int(request.args.get('offset', 0)) 

1982 uid = g.user.id if getattr(g, 'user', None) else None 

1983 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1984 posts, total = get_trending_feed( 

1985 g.db, limit, offset, user_id=uid, 

1986 viewer_user=g.user, apply_privacy=apply_privacy) 

1987 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

1988 

1989 

1990@social_bp.route('/feed/agents', methods=['GET']) 

1991@optional_auth 

1992def agent_feed(): 

1993 limit = min(int(request.args.get('limit', 25)), 100) 

1994 offset = int(request.args.get('offset', 0)) 

1995 uid = g.user.id if getattr(g, 'user', None) else None 

1996 apply_privacy = bool(getattr(g, 'feature_flags', {}).get('post_privacy', False)) 

1997 posts, total = get_agent_feed( 

1998 g.db, limit, offset, user_id=uid, 

1999 viewer_user=g.user, apply_privacy=apply_privacy) 

2000 return _ok([p.to_dict(include_author=True) for p in posts], _paginate(total, limit, offset)) 

2001 

2002 

2003@social_bp.route('/feed/agent-spotlight', methods=['GET']) 

2004@optional_auth 

2005def agent_spotlight(): 

2006 """Agent spotlight for the HARTs feed tab. 

2007 Returns: hart_of_the_day, rising_harts, your_harts (if authenticated). 

2008 Public-cacheable (X-Cache-Scope: public when no user-specific data). 

2009 """ 

2010 from datetime import timedelta 

2011 from sqlalchemy import func as sa_func 

2012 uid = g.user.id if getattr(g, 'user', None) else None 

2013 db = g.db 

2014 

2015 # HART of the day: agent with most upvotes on posts in last 24h 

2016 hart_of_day = None 

2017 try: 

2018 day_ago = datetime.utcnow() - timedelta(days=1) 

2019 top_agent = db.query( 

2020 Post.author_id, sa_func.sum(Post.upvotes).label('total_harts') 

2021 ).join(User, Post.author_id == User.id).filter( 

2022 User.user_type == 'agent', 

2023 User.is_banned == False, 

2024 Post.created_at >= day_ago, 

2025 ).group_by(Post.author_id).order_by( 

2026 sa_func.sum(Post.upvotes).desc() 

2027 ).first() 

2028 if top_agent: 

2029 agent = db.query(User).filter_by(id=top_agent[0]).first() 

2030 if agent: 

2031 hart_of_day = { 

2032 **{k: v for k, v in agent.to_dict().items() 

2033 if k in ('id', 'username', 'display_name', 'avatar_url', 'user_type')}, 

2034 'total_harts_today': int(top_agent[1] or 0), 

2035 } 

2036 except Exception: 

2037 pass # graceful degradation 

2038 

2039 # Rising HARTs: newest agents with at least 1 post, ordered by karma 

2040 rising = [] 

2041 try: 

2042 week_ago = datetime.utcnow() - timedelta(days=7) 

2043 rising_agents = db.query(User).filter( 

2044 User.user_type == 'agent', 

2045 User.is_banned == False, 

2046 User.created_at >= week_ago, 

2047 User.post_count > 0, 

2048 ).order_by(User.karma_score.desc()).limit(5).all() 

2049 rising = [{k: v for k, v in a.to_dict().items() 

2050 if k in ('id', 'username', 'display_name', 'avatar_url', 'karma_score')} 

2051 for a in rising_agents] 

2052 except Exception: 

2053 pass 

2054 

2055 # Your HARTs: agents owned by current user 

2056 your_harts = [] 

2057 if uid: 

2058 try: 

2059 owned = db.query(User).filter_by( 

2060 owner_id=uid, user_type='agent', is_banned=False 

2061 ).order_by(User.karma_score.desc()).limit(10).all() 

2062 your_harts = [{k: v for k, v in a.to_dict().items() 

2063 if k in ('id', 'username', 'display_name', 'avatar_url', 'karma_score', 'post_count')} 

2064 for a in owned] 

2065 except Exception: 

2066 pass 

2067 

2068 result = { 

2069 'hart_of_the_day': hart_of_day, 

2070 'rising_harts': rising, 

2071 'your_harts': your_harts, 

2072 } 

2073 

2074 resp = _ok(result) 

2075 # Tag as public-cacheable when no user-specific data 

2076 if not uid: 

2077 resp[0].headers['X-Cache-Scope'] = 'public' 

2078 return resp 

2079 

2080 

2081# ═══════════════════════════════════════════════════════════════ 

2082# SEARCH 

2083# ═══════════════════════════════════════════════════════════════ 

2084 

2085@social_bp.route('/search', methods=['GET']) 

2086@optional_auth 

2087@rate_limit('search') 

2088def search(): 

2089 q = request.args.get('q', '').strip() 

2090 if not q: 

2091 return _err("q parameter required") 

2092 

2093 # Security: Validate and sanitize search query, escape SQL LIKE wildcards 

2094 try: 

2095 from security.sanitize import validate_search_query, escape_like 

2096 q = validate_search_query(q) 

2097 q_like = f'%{escape_like(q)}%' 

2098 except ImportError: 

2099 q_like = f'%{q}%' 

2100 except ValueError as e: 

2101 return _err(str(e)) 

2102 

2103 search_type = request.args.get('type', 'posts') 

2104 limit = min(int(request.args.get('limit', 20)), 100) 

2105 offset = int(request.args.get('offset', 0)) 

2106 

2107 if search_type == 'users': 

2108 users = g.db.query(User).filter( 

2109 User.username.ilike(q_like) | User.display_name.ilike(q_like), 

2110 User.is_banned == False 

2111 ).offset(offset).limit(limit).all() 

2112 return _ok([u.to_dict() for u in users]) 

2113 elif search_type == 'communities': 

2114 communities = g.db.query(Community).filter( 

2115 Community.name.ilike(q_like) | Community.description.ilike(q_like) 

2116 ).offset(offset).limit(limit).all() 

2117 return _ok([s.to_dict() for s in communities]) 

2118 else: # posts 

2119 from sqlalchemy import or_ 

2120 from .models import CommunityMembership 

2121 current_user_id = g.user.id if g.user else None 

2122 query = g.db.query(Post).options(joinedload(Post.author)).filter( 

2123 Post.is_deleted == False, 

2124 Post.is_hidden == False, 

2125 Post.title.ilike(q_like) | Post.content.ilike(q_like) 

2126 ) 

2127 # Filter out posts from private communities that the user isn't a member of 

2128 privacy_conditions = [ 

2129 Post.community_id == None, 

2130 Post.community_id.in_( 

2131 g.db.query(Community.id).filter(Community.is_private == False) 

2132 ), 

2133 ] 

2134 if current_user_id: 

2135 privacy_conditions.append( 

2136 Post.community_id.in_( 

2137 g.db.query(CommunityMembership.community_id).filter( 

2138 CommunityMembership.user_id == current_user_id 

2139 ) 

2140 ) 

2141 ) 

2142 query = query.filter(or_(*privacy_conditions)) 

2143 # Phase 7c.5 — also AND in the per-post privacy gate when the 

2144 # flag is on. Search is a high-leak surface (any user can 

2145 # query for a post body across the whole platform), so this 

2146 # is the second-most-important place to gate after /feed. 

2147 if getattr(g, 'feature_flags', {}).get('post_privacy', False): 

2148 from .privacy import visible_posts_filter 

2149 query = query.filter(visible_posts_filter(g.user)) 

2150 posts = query.order_by(Post.score.desc()).offset(offset).limit(limit).all() 

2151 return _ok([p.to_dict(include_author=True) for p in posts]) 

2152 

2153 

2154# ═══════════════════════════════════════════════════════════════ 

2155# TASKS (delegate work to agents from posts) 

2156# ═══════════════════════════════════════════════════════════════ 

2157 

2158@social_bp.route('/tasks', methods=['POST']) 

2159@require_auth 

2160def create_task(): 

2161 data = _get_json() 

2162 post_id = data.get('post_id', '') 

2163 desc_text = data.get('task_description', '') 

2164 if not post_id or not desc_text: 

2165 return _err("post_id and task_description required") 

2166 task = TaskRequest( 

2167 post_id=post_id, requester_id=g.user.id, 

2168 task_description=desc_text, 

2169 ) 

2170 g.db.add(task) 

2171 g.db.flush() 

2172 task.ledger_key = f"task_{g.user.id}_{task.id}" 

2173 return _ok(task.to_dict(), status=201) 

2174 

2175 

2176@social_bp.route('/tasks', methods=['GET']) 

2177@optional_auth 

2178def list_tasks(): 

2179 status = request.args.get('status') 

2180 mine = request.args.get('mine') 

2181 my_agents = request.args.get('my_agents') 

2182 assigned_to = request.args.get('assigned_to') 

2183 limit = min(int(request.args.get('limit', 25)), 100) 

2184 offset = int(request.args.get('offset', 0)) 

2185 q = g.db.query(TaskRequest) 

2186 if status: 

2187 q = q.filter(TaskRequest.status == status) 

2188 if mine and g.user: 

2189 q = q.filter(TaskRequest.requester_id == g.user.id) 

2190 if assigned_to: 

2191 q = q.filter(TaskRequest.assignee_id == assigned_to) 

2192 if my_agents and g.user: 

2193 # Find all agents owned by current user 

2194 from .models import User 

2195 agent_ids = [a.id for a in g.db.query(User.id).filter_by( 

2196 owner_id=g.user.id, user_type='agent').all()] 

2197 if agent_ids: 

2198 q = q.filter(TaskRequest.assignee_id.in_(agent_ids)) 

2199 else: 

2200 q = q.filter(False) # No agents = no results 

2201 total = q.count() 

2202 tasks = q.order_by(TaskRequest.created_at.desc()).offset(offset).limit(limit).all() 

2203 return _ok([t.to_dict() for t in tasks], _paginate(total, limit, offset)) 

2204 

2205 

2206@social_bp.route('/tasks/<task_id>', methods=['GET']) 

2207@optional_auth 

2208def get_task(task_id): 

2209 task = g.db.query(TaskRequest).filter(TaskRequest.id == task_id).first() 

2210 if not task: 

2211 return _err("Task not found", 404) 

2212 return _ok(task.to_dict()) 

2213 

2214 

2215@social_bp.route('/tasks/<task_id>/assign', methods=['POST']) 

2216@require_auth 

2217def assign_task(task_id): 

2218 task = g.db.query(TaskRequest).filter(TaskRequest.id == task_id).first() 

2219 if not task: 

2220 return _err("Task not found", 404) 

2221 # Only task requester or admin can assign 

2222 if str(task.requester_id) != str(g.user.id) and not g.user.is_admin: 

2223 return _err("Only the task requester can assign this task", 403) 

2224 data = _get_json() 

2225 assignee_id = data.get('assignee_id', '') 

2226 if not assignee_id: 

2227 return _err("assignee_id required") 

2228 # Validate assignee exists 

2229 assignee = UserService.get_by_id(g.db, assignee_id) 

2230 if not assignee: 

2231 return _err("Assignee not found", 404) 

2232 # If assigning to an agent, verify ownership 

2233 if assignee.user_type == 'agent' and assignee.owner_id: 

2234 if assignee.owner_id != g.user.id: 

2235 return _err("Cannot assign tasks to agents you don't own", 403) 

2236 task.assignee_id = assignee_id 

2237 task.status = 'assigned' 

2238 # Link to SmartLedger for cross-device persistence 

2239 task.ledger_key = f"task_{g.user.id}_{task.id}" 

2240 g.db.flush() 

2241 # Notify the assignee (or agent owner) 

2242 try: 

2243 notify_target = assignee.owner_id if assignee.user_type == 'agent' else assignee_id 

2244 from .services import NotificationService 

2245 NotificationService.create( 

2246 g.db, user_id=notify_target, type='task_assigned', 

2247 source_user_id=g.user.id, target_type='task', 

2248 target_id=task.id, 

2249 message=f'Task assigned: {task.task_description[:80] if task.task_description else "New task"}', 

2250 ) 

2251 except Exception: 

2252 pass 

2253 return _ok(task.to_dict()) 

2254 

2255 

2256@social_bp.route('/tasks/<task_id>/complete', methods=['POST']) 

2257@require_auth 

2258def complete_task(task_id): 

2259 task = g.db.query(TaskRequest).filter(TaskRequest.id == task_id).first() 

2260 if not task: 

2261 return _err("Task not found", 404) 

2262 # Only assignee, requester, or admin can complete 

2263 is_assignee = task.assignee_id and str(task.assignee_id) == str(g.user.id) 

2264 is_requester = str(task.requester_id) == str(g.user.id) 

2265 if not (is_assignee or is_requester or g.user.is_admin): 

2266 return _err("Not authorized to complete this task", 403) 

2267 data = _get_json() 

2268 task.result = data.get('result', '') 

2269 task.status = 'completed' 

2270 from datetime import datetime 

2271 task.completed_at = datetime.utcnow() 

2272 g.db.flush() 

2273 # Award task karma to assignee 

2274 if task.assignee_id: 

2275 assignee = UserService.get_by_id(g.db, task.assignee_id) 

2276 if assignee: 

2277 recalculate_karma(g.db, assignee) 

2278 try: 

2279 from .resonance_engine import ResonanceService 

2280 ResonanceService.award_action(g.db, assignee.id, 'complete_task', task.id) 

2281 except Exception: 

2282 pass 

2283 return _ok(task.to_dict()) 

2284 

2285 

2286# ═══════════════════════════════════════════════════════════════ 

2287# RECIPES (share trained agent recipes) 

2288# ═══════════════════════════════════════════════════════════════ 

2289 

2290@social_bp.route('/recipes/share', methods=['POST']) 

2291@require_auth 

2292def share_recipe(): 

2293 data = _get_json() 

2294 recipe_file = data.get('recipe_file', '') 

2295 title = data.get('title', '') 

2296 if not recipe_file or not title: 

2297 return _err("recipe_file and title required") 

2298 

2299 # Create the post 

2300 post = PostService.create( 

2301 g.db, g.user, title, data.get('description', ''), 

2302 content_type='recipe', community_name=data.get('community')) 

2303 post.recipe_ref = recipe_file 

2304 

2305 # Create recipe share record 

2306 from .models import RecipeShare 

2307 import re 

2308 match = re.match(r'(\d+)_(\d+)_recipe\.json', recipe_file) 

2309 prompt_id = int(match.group(1)) if match else 0 

2310 flow_id = int(match.group(2)) if match else 0 

2311 share = RecipeShare( 

2312 post_id=post.id, recipe_file=recipe_file, 

2313 prompt_id=prompt_id, flow_id=flow_id, 

2314 persona=data.get('persona', ''), action_summary=data.get('action_summary', ''), 

2315 ) 

2316 g.db.add(share) 

2317 g.db.flush() 

2318 try: 

2319 from .resonance_engine import ResonanceService 

2320 ResonanceService.award_action(g.db, g.user.id, 'recipe_shared', share.id) 

2321 except Exception: 

2322 pass 

2323 return _ok({'post': post.to_dict(include_author=True), 'recipe': share.to_dict()}, status=201) 

2324 

2325 

2326@social_bp.route('/recipes', methods=['GET']) 

2327@optional_auth 

2328def list_recipes(): 

2329 from .models import RecipeShare 

2330 limit = min(int(request.args.get('limit', 25)), 100) 

2331 offset = int(request.args.get('offset', 0)) 

2332 q = g.db.query(RecipeShare).order_by(RecipeShare.fork_count.desc()) 

2333 total = q.count() 

2334 recipes = q.offset(offset).limit(limit).all() 

2335 return _ok([r.to_dict() for r in recipes], _paginate(total, limit, offset)) 

2336 

2337 

2338@social_bp.route('/recipes/<recipe_id>', methods=['GET']) 

2339@optional_auth 

2340def get_recipe(recipe_id): 

2341 from .models import RecipeShare 

2342 recipe = g.db.query(RecipeShare).filter(RecipeShare.id == recipe_id).first() 

2343 if not recipe: 

2344 return _err("Recipe not found", 404) 

2345 return _ok(recipe.to_dict()) 

2346 

2347 

2348@social_bp.route('/recipes/<recipe_id>/fork', methods=['POST']) 

2349@require_auth 

2350def fork_recipe(recipe_id): 

2351 from .models import RecipeShare 

2352 recipe = g.db.query(RecipeShare).filter(RecipeShare.id == recipe_id).first() 

2353 if not recipe: 

2354 return _err("Recipe not found", 404) 

2355 recipe.fork_count += 1 

2356 g.db.flush() 

2357 # Award recipe owner for being forked 

2358 try: 

2359 from .resonance_engine import ResonanceService 

2360 from .models import Post 

2361 owner_post = g.db.query(Post).filter(Post.id == recipe.post_id).first() 

2362 if owner_post and owner_post.author_id: 

2363 ResonanceService.award_action(g.db, owner_post.author_id, 'recipe_forked', recipe.id) 

2364 except Exception: 

2365 pass 

2366 return _ok({'forked': True, 'recipe_file': recipe.recipe_file, 

2367 'fork_count': recipe.fork_count}) 

2368 

2369 

2370# ═══════════════════════════════════════════════════════════════ 

2371# NOTIFICATIONS 

2372# ═══════════════════════════════════════════════════════════════ 

2373 

2374@social_bp.route('/notifications', methods=['GET']) 

2375@require_auth 

2376def get_notifications(): 

2377 unread_only = request.args.get('unread', 'false').lower() == 'true' 

2378 limit = min(int(request.args.get('limit', 50)), 100) 

2379 offset = int(request.args.get('offset', 0)) 

2380 notifs, total = NotificationService.get_for_user( 

2381 g.db, g.user.id, unread_only, limit, offset) 

2382 return _ok([n.to_dict() for n in notifs], _paginate(total, limit, offset)) 

2383 

2384 

2385@social_bp.route('/notifications/read', methods=['POST']) 

2386@require_auth 

2387def mark_notifications_read(): 

2388 data = _get_json() 

2389 ids = data.get('ids', []) 

2390 if ids: 

2391 NotificationService.mark_read(g.db, ids, g.user.id) 

2392 return _ok({'marked': len(ids)}) 

2393 

2394 

2395@social_bp.route('/notifications/read-all', methods=['POST']) 

2396@require_auth 

2397def mark_all_notifications_read(): 

2398 NotificationService.mark_all_read(g.db, g.user.id) 

2399 return _ok({'marked_all': True}) 

2400 

2401 

2402# ═══════════════════════════════════════════════════════════════ 

2403# MODERATION 

2404# ═══════════════════════════════════════════════════════════════ 

2405 

2406@social_bp.route('/moderation/reports', methods=['GET']) 

2407@require_moderator 

2408def list_reports(): 

2409 status = request.args.get('status') 

2410 limit = min(int(request.args.get('limit', 50)), 100) 

2411 offset = int(request.args.get('offset', 0)) 

2412 reports, total = ReportService.list_reports(g.db, status, limit, offset) 

2413 return _ok([r.to_dict() for r in reports], _paginate(total, limit, offset)) 

2414 

2415 

2416@social_bp.route('/moderation/reports/<report_id>', methods=['PATCH']) 

2417@require_moderator 

2418def review_report(report_id): 

2419 report = g.db.query(Report).filter(Report.id == report_id).first() 

2420 if not report: 

2421 return _err("Report not found", 404) 

2422 data = _get_json() 

2423 ReportService.review(g.db, report, g.user.id, data.get('status', 'reviewed')) 

2424 return _ok(report.to_dict()) 

2425 

2426 

2427# Removed 2026-04-15: orphaned public /moderation/ban/<user_id> POST+DELETE routes. 

2428# No frontend caller; duplicate of admin routes at /admin/users/<user_id>/ban (below). 

2429# Ban/unban is strictly an admin action — a user cannot ban themselves, and these 

2430# non-admin-namespaced routes made no product sense. Canonical endpoints: 

2431# POST /api/social/admin/users/<user_id>/ban (admin_ban_user) 

2432# DELETE /api/social/admin/users/<user_id>/ban (admin_unban_user) 

2433 

2434 

2435# ═══════════════════════════════════════════════════════════════ 

2436# ADMIN / STATS 

2437# ═══════════════════════════════════════════════════════════════ 

2438 

2439@social_bp.route('/admin/stats', methods=['GET']) 

2440@require_admin 

2441def platform_stats(): 

2442 from sqlalchemy import func as sqlfunc 

2443 total_users = g.db.query(sqlfunc.count(User.id)).scalar() 

2444 total_agents = g.db.query(sqlfunc.count(User.id)).filter(User.user_type == 'agent').scalar() 

2445 total_humans = g.db.query(sqlfunc.count(User.id)).filter(User.user_type == 'human').scalar() 

2446 total_posts = g.db.query(sqlfunc.count(Post.id)).filter(Post.is_deleted == False).scalar() 

2447 total_comments = g.db.query(sqlfunc.count(Comment.id)).filter(Comment.is_deleted == False).scalar() 

2448 total_communities = g.db.query(sqlfunc.count(Community.id)).scalar() 

2449 pending_reports = g.db.query(sqlfunc.count(Report.id)).filter(Report.status == 'pending').scalar() 

2450 return _ok({ 

2451 'total_users': total_users, 'total_agents': total_agents, 

2452 'total_humans': total_humans, 'total_posts': total_posts, 

2453 'total_comments': total_comments, 'total_communities': total_communities, 

2454 'pending_reports': pending_reports, 

2455 }) 

2456 

2457 

2458@social_bp.route('/admin/revenue-analytics', methods=['GET']) 

2459@require_admin 

2460def admin_revenue_analytics(): 

2461 """Revenue & usage analytics for central admin dashboard.""" 

2462 from sqlalchemy import func as sqlfunc, case 

2463 from datetime import datetime, timedelta 

2464 

2465 days = min(int(request.args.get('days', 30)), 365) 

2466 since = datetime.utcnow() - timedelta(days=days) 

2467 

2468 # ── 1. OVERVIEW TOTALS ── 

2469 total_ad_revenue = g.db.query( 

2470 sqlfunc.coalesce(sqlfunc.sum(AdUnit.spent_spark), 0) 

2471 ).scalar() 

2472 total_ad_impressions = g.db.query( 

2473 sqlfunc.coalesce(sqlfunc.sum(AdUnit.impression_count), 0) 

2474 ).scalar() 

2475 total_ad_clicks = g.db.query( 

2476 sqlfunc.coalesce(sqlfunc.sum(AdUnit.click_count), 0) 

2477 ).scalar() 

2478 

2479 total_compute_cost = g.db.query( 

2480 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.cost_credits), 0) 

2481 ).scalar() 

2482 total_tokens_in = g.db.query( 

2483 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.tokens_in), 0) 

2484 ).scalar() 

2485 total_tokens_out = g.db.query( 

2486 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.tokens_out), 0) 

2487 ).scalar() 

2488 total_compute_ms = g.db.query( 

2489 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.compute_ms), 0) 

2490 ).scalar() 

2491 

2492 agent_goal_spent = g.db.query( 

2493 sqlfunc.coalesce(sqlfunc.sum(AgentGoal.spark_spent), 0) 

2494 ).scalar() 

2495 boost_spent = g.db.query( 

2496 sqlfunc.coalesce(sqlfunc.sum(Boost.spark_spent), 0) 

2497 ).scalar() 

2498 campaign_spent = g.db.query( 

2499 sqlfunc.coalesce(sqlfunc.sum(Campaign.spark_spent), 0) 

2500 ).scalar() 

2501 total_agent_spark_spent = agent_goal_spent + boost_spent + campaign_spent 

2502 

2503 active_agents = g.db.query(sqlfunc.count(User.id)).filter( 

2504 User.user_type == 'agent', User.is_banned == False, 

2505 ).scalar() 

2506 

2507 hosting_total = g.db.query( 

2508 sqlfunc.coalesce(sqlfunc.sum(HostingReward.amount), 0) 

2509 ).scalar() 

2510 

2511 # ── 2. TIME SERIES (daily buckets) ── 

2512 ad_daily = g.db.query( 

2513 sqlfunc.date(AdImpression.created_at).label('day'), 

2514 sqlfunc.count(case( 

2515 (AdImpression.impression_type == 'view', 1), 

2516 )).label('views'), 

2517 sqlfunc.count(case( 

2518 (AdImpression.impression_type == 'click', 1), 

2519 )).label('clicks'), 

2520 ).filter( 

2521 AdImpression.created_at >= since, 

2522 ).group_by( 

2523 sqlfunc.date(AdImpression.created_at) 

2524 ).order_by(sqlfunc.date(AdImpression.created_at)).all() 

2525 

2526 compute_daily = g.db.query( 

2527 sqlfunc.date(APIUsageLog.created_at).label('day'), 

2528 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.cost_credits), 0).label('cost'), 

2529 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.tokens_in + APIUsageLog.tokens_out), 0).label('tokens'), 

2530 sqlfunc.count(APIUsageLog.id).label('requests'), 

2531 ).filter( 

2532 APIUsageLog.created_at >= since, 

2533 ).group_by( 

2534 sqlfunc.date(APIUsageLog.created_at) 

2535 ).order_by(sqlfunc.date(APIUsageLog.created_at)).all() 

2536 

2537 spark_daily = g.db.query( 

2538 sqlfunc.date(ResonanceTransaction.created_at).label('day'), 

2539 sqlfunc.coalesce(sqlfunc.sum( 

2540 case( 

2541 (ResonanceTransaction.amount < 0, sqlfunc.abs(ResonanceTransaction.amount)), 

2542 else_=0 

2543 ) 

2544 ), 0).label('spark_spent'), 

2545 ).filter( 

2546 ResonanceTransaction.created_at >= since, 

2547 ResonanceTransaction.currency == 'spark', 

2548 ResonanceTransaction.source_type.in_(['boost', 'campaign', 'spend']), 

2549 ).group_by( 

2550 sqlfunc.date(ResonanceTransaction.created_at) 

2551 ).order_by(sqlfunc.date(ResonanceTransaction.created_at)).all() 

2552 

2553 # ── 3. PER-USER REVENUE TABLE ── 

2554 ad_per_user = g.db.query( 

2555 AdUnit.advertiser_id.label('user_id'), 

2556 sqlfunc.coalesce(sqlfunc.sum(AdUnit.spent_spark), 0).label('ad_revenue'), 

2557 sqlfunc.count(AdUnit.id).label('ad_count'), 

2558 ).group_by(AdUnit.advertiser_id).subquery() 

2559 

2560 compute_per_user = g.db.query( 

2561 CommercialAPIKey.user_id.label('user_id'), 

2562 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.cost_credits), 0).label('compute_cost'), 

2563 sqlfunc.coalesce(sqlfunc.sum(APIUsageLog.tokens_in + APIUsageLog.tokens_out), 0).label('total_tokens'), 

2564 ).join( 

2565 APIUsageLog, APIUsageLog.api_key_id == CommercialAPIKey.id 

2566 ).group_by(CommercialAPIKey.user_id).subquery() 

2567 

2568 agents_owned_sq = g.db.query( 

2569 User.owner_id.label('user_id'), 

2570 sqlfunc.count(User.id).label('agents_owned'), 

2571 ).filter( 

2572 User.user_type == 'agent', User.owner_id.isnot(None), 

2573 ).group_by(User.owner_id).subquery() 

2574 

2575 goal_per_user = g.db.query( 

2576 AgentGoal.owner_id.label('user_id'), 

2577 sqlfunc.coalesce(sqlfunc.sum(AgentGoal.spark_spent), 0).label('goal_spark'), 

2578 ).group_by(AgentGoal.owner_id).subquery() 

2579 

2580 user_rows = g.db.query( 

2581 User.id, User.username, User.display_name, User.avatar_url, 

2582 ad_per_user.c.ad_revenue, ad_per_user.c.ad_count, 

2583 compute_per_user.c.compute_cost, compute_per_user.c.total_tokens, 

2584 agents_owned_sq.c.agents_owned, goal_per_user.c.goal_spark, 

2585 ).outerjoin( 

2586 ad_per_user, User.id == ad_per_user.c.user_id 

2587 ).outerjoin( 

2588 compute_per_user, User.id == compute_per_user.c.user_id 

2589 ).outerjoin( 

2590 agents_owned_sq, User.id == agents_owned_sq.c.user_id 

2591 ).outerjoin( 

2592 goal_per_user, User.id == goal_per_user.c.user_id 

2593 ).filter(User.user_type == 'human').order_by( 

2594 sqlfunc.coalesce(ad_per_user.c.ad_revenue, 0).desc() 

2595 ).limit(100).all() 

2596 

2597 per_user_table = [] 

2598 for row in user_rows: 

2599 ad_rev = row.ad_revenue or 0 

2600 comp = row.compute_cost or 0 

2601 owned = row.agents_owned or 0 

2602 goal_s = row.goal_spark or 0 

2603 if ad_rev or comp or owned or goal_s: 

2604 per_user_table.append({ 

2605 'user_id': row.id, 'username': row.username, 

2606 'display_name': row.display_name, 'avatar_url': row.avatar_url, 

2607 'ad_revenue': ad_rev, 'ad_count': row.ad_count or 0, 

2608 'compute_cost': round(comp, 4), 'total_tokens': row.total_tokens or 0, 

2609 'agents_owned': owned, 'goal_spark_spent': goal_s, 

2610 }) 

2611 

2612 # ── 4. AGENT OWNERSHIP PANEL ── 

2613 top_owners = g.db.query(User.id, User.username).filter( 

2614 User.user_type == 'human', 

2615 ).outerjoin( 

2616 agents_owned_sq, User.id == agents_owned_sq.c.user_id 

2617 ).order_by( 

2618 sqlfunc.coalesce(agents_owned_sq.c.agents_owned, 0).desc() 

2619 ).limit(20).all() 

2620 

2621 ownership_panel = [] 

2622 for owner in top_owners: 

2623 owned_list = g.db.query( 

2624 User.id, User.username, User.display_name, User.agent_id, 

2625 ).filter(User.owner_id == owner.id, User.user_type == 'agent').all() 

2626 

2627 owned_details = [] 

2628 for a in owned_list: 

2629 evo = g.db.query(AgentEvolution).filter(AgentEvolution.user_id == a.id).first() 

2630 skill_count = g.db.query(sqlfunc.count(AgentSkillBadge.id)).filter( 

2631 AgentSkillBadge.user_id == a.id 

2632 ).scalar() 

2633 owned_details.append({ 

2634 'agent_id': a.id, 'username': a.username, 

2635 'display_name': a.display_name, 'prompt_id': a.agent_id, 

2636 'total_tasks': evo.total_tasks if evo else 0, 

2637 'evolution_xp': evo.evolution_xp if evo else 0, 

2638 'skill_count': skill_count or 0, 

2639 }) 

2640 

2641 collabs = g.db.query( 

2642 AgentCollaboration.agent_b_id.label('agent_id'), 

2643 sqlfunc.count(AgentCollaboration.id).label('collab_count'), 

2644 ).filter( 

2645 AgentCollaboration.agent_a_id == owner.id 

2646 ).group_by(AgentCollaboration.agent_b_id).limit(10).all() 

2647 

2648 external_agents = [] 

2649 for c in collabs: 

2650 agent_user = g.db.query(User.id, User.username, User.display_name, User.owner_id).filter( 

2651 User.id == c.agent_id 

2652 ).first() 

2653 if agent_user and str(agent_user.owner_id) != str(owner.id): 

2654 external_agents.append({ 

2655 'agent_id': agent_user.id, 'username': agent_user.username, 

2656 'display_name': agent_user.display_name, 'collab_count': c.collab_count, 

2657 }) 

2658 

2659 if owned_details or external_agents: 

2660 ownership_panel.append({ 

2661 'user_id': owner.id, 'username': owner.username, 

2662 'owned_agents': owned_details, 'external_agents_used': external_agents, 

2663 }) 

2664 

2665 return _ok({ 

2666 'overview': { 

2667 'total_ad_revenue': total_ad_revenue, 

2668 'total_ad_impressions': total_ad_impressions, 

2669 'total_ad_clicks': total_ad_clicks, 

2670 'total_compute_cost': round(float(total_compute_cost), 4), 

2671 'total_tokens_in': total_tokens_in, 

2672 'total_tokens_out': total_tokens_out, 

2673 'total_compute_ms': total_compute_ms, 

2674 'total_agent_spark_spent': total_agent_spark_spent, 

2675 'agent_goal_spent': agent_goal_spent, 

2676 'boost_spent': boost_spent, 

2677 'campaign_spent': campaign_spent, 

2678 'active_agents': active_agents, 

2679 'hosting_rewards_total': round(float(hosting_total), 2), 

2680 }, 

2681 'time_series': { 

2682 'ad_daily': [{'day': str(r.day), 'views': r.views, 'clicks': r.clicks} for r in ad_daily], 

2683 'compute_daily': [{'day': str(r.day), 'cost': round(float(r.cost), 4), 'tokens': r.tokens, 'requests': r.requests} for r in compute_daily], 

2684 'spark_daily': [{'day': str(r.day), 'spark_spent': round(float(r.spark_spent), 2)} for r in spark_daily], 

2685 }, 

2686 'per_user': per_user_table, 

2687 'ownership': ownership_panel, 

2688 }) 

2689 

2690 

2691@social_bp.route('/admin/users', methods=['GET']) 

2692@require_admin 

2693def admin_list_users(): 

2694 limit = min(int(request.args.get('limit', 50)), 100) 

2695 offset = int(request.args.get('offset', 0)) 

2696 q = g.db.query(User).order_by(User.created_at.desc()) 

2697 total = q.count() 

2698 users = q.offset(offset).limit(limit).all() 

2699 return _ok([{**u.to_dict(), 'is_banned': u.is_banned, 'email': u.email} 

2700 for u in users], _paginate(total, limit, offset)) 

2701 

2702 

2703@social_bp.route('/admin/sync-agents', methods=['POST']) 

2704@require_admin 

2705def sync_agents(): 

2706 try: 

2707 from .agent_bridge import sync_trained_agents 

2708 count = sync_trained_agents() 

2709 return _ok({'synced': count}) 

2710 except Exception as e: 

2711 return _err(str(e), 500) 

2712 

2713 

2714# ═══════════════════════════════════════════════════════════════ 

2715# ADMIN – USER MANAGEMENT 

2716# ═══════════════════════════════════════════════════════════════ 

2717 

2718@social_bp.route('/admin/users/<user_id>', methods=['PATCH']) 

2719@require_admin 

2720def admin_update_user(user_id): 

2721 """Update user details including role assignment.""" 

2722 user = UserService.get_by_id(g.db, user_id) 

2723 if not user: 

2724 return _err("User not found", 404) 

2725 data = _get_json() 

2726 for field in ('display_name', 'bio', 'is_verified'): 

2727 if field in data: 

2728 setattr(user, field, data[field]) 

2729 if 'role' in data: 

2730 UserService.set_user_role(g.db, user, data['role']) 

2731 if 'is_banned' in data: 

2732 user.is_banned = data['is_banned'] 

2733 g.db.flush() 

2734 return _ok({**user.to_dict(), 'is_banned': user.is_banned, 'email': user.email}) 

2735 

2736 

2737@social_bp.route('/admin/users/<user_id>/ban', methods=['POST']) 

2738@require_admin 

2739def admin_ban_user(user_id): 

2740 user = UserService.get_by_id(g.db, user_id) 

2741 if not user: 

2742 return _err("User not found", 404) 

2743 user.is_banned = True 

2744 g.db.flush() 

2745 return _ok({'banned': True}) 

2746 

2747 

2748@social_bp.route('/admin/users/<user_id>/ban', methods=['DELETE']) 

2749@require_admin 

2750def admin_unban_user(user_id): 

2751 user = UserService.get_by_id(g.db, user_id) 

2752 if not user: 

2753 return _err("User not found", 404) 

2754 user.is_banned = False 

2755 g.db.flush() 

2756 return _ok({'unbanned': True}) 

2757 

2758 

2759@social_bp.route('/admin/agents/sync', methods=['POST']) 

2760@require_admin 

2761def admin_sync_agents_alias(): 

2762 """Alias for /admin/sync-agents (frontend compatibility).""" 

2763 try: 

2764 from .agent_bridge import sync_trained_agents 

2765 count = sync_trained_agents() 

2766 return _ok({'synced': count}) 

2767 except Exception as e: 

2768 return _err(str(e), 500) 

2769 

2770 

2771# ═══════════════════════════════════════════════════════════════ 

2772# ADMIN – MODERATION 

2773# ═══════════════════════════════════════════════════════════════ 

2774 

2775@social_bp.route('/admin/moderation/reports', methods=['GET']) 

2776@require_moderator 

2777def admin_list_reports(): 

2778 """List reports (admin panel path alias).""" 

2779 status_filter = request.args.get('status') 

2780 limit = min(int(request.args.get('limit', 50)), 100) 

2781 offset = int(request.args.get('offset', 0)) 

2782 reports, total = ReportService.list_reports(g.db, status_filter, limit, offset) 

2783 return _ok([r.to_dict() for r in reports], _paginate(total, limit, offset)) 

2784 

2785 

2786# Phase 8.B — Tenant slug → tid resolver (#265). Public so a 

2787# Nunba web signup form can map a human tenant slug ("acme-corp") 

2788# to the internal UUID it'll need for JWT 'tid' claim binding. 

2789# Returns 404 (not 403) when the slug is unknown so the response 

2790# shape doesn't reveal whether other tenants exist. 

2791@social_bp.route('/tenants/by-slug/<slug>', methods=['GET']) 

2792def get_tenant_by_slug(slug): 

2793 from .tenant_acl import resolve_tenant_slug 

2794 row = resolve_tenant_slug(g.db, slug) 

2795 if row is None: 

2796 return _err('not found', 404) 

2797 if row.get('is_suspended'): 

2798 return _err('tenant suspended', 410) 

2799 return _ok({'id': row['id'], 'name': row['name'], 

2800 'slug': row['slug'], 'plan': row['plan']}) 

2801 

2802 

2803# Phase 8.B — WAMP subscribe-side dynamic authorizer (#265). 

2804# Crossbar.io dynamic authorizers POST here per subscribe attempt. 

2805# Body: { topic }. Response: { allow: bool, topic }. 

2806# 

2807# Review M1 fix: the JWT payload is ALWAYS derived from `g` (which 

2808# `@require_auth` has cryptographically verified). Earlier draft 

2809# accepted a body-supplied `jwt_payload` — that meant an 

2810# authenticated attacker could probe arbitrary payloads against 

2811# arbitrary topics. Now the only payload we authorize against is 

2812# the caller's own verified identity. 

2813# 

2814# For crossbar to use this endpoint as a dynamic authorizer, the 

2815# subscribing client's JWT must be passed in the standard 

2816# `Authorization: Bearer ...` header so `@require_auth` can verify. 

2817@social_bp.route('/admin/wamp/authorize-subscribe', methods=['POST']) 

2818@require_auth 

2819def admin_wamp_authorize_subscribe(): 

2820 from .tenant_acl import authorize_subscribe 

2821 data = _get_json() 

2822 topic = data.get('topic') 

2823 # Always derive from verified `g` — never trust client-supplied 

2824 # JWT shape. The @require_auth decorator has signed-and-verified 

2825 # the bearer token; that's the only identity we authorize against. 

2826 jwt_payload = { 

2827 'user_id': g.user.id, 

2828 'tid': getattr(g, 'tenant_id', None), 

2829 } 

2830 allow = authorize_subscribe(topic, jwt_payload) 

2831 return _ok({'allow': bool(allow), 'topic': topic}) 

2832 

2833 

2834# Phase 7e — AI moderation quarantine queue. Lists ContentClassifier 

2835# decisions awaiting human review. GLOBAL platform admin only — 

2836# `require_admin` enforces is_admin OR role IN ('regional', 'central'). 

2837# 

2838# Pass-4 P4-11 fix: previously gated by `require_moderator` which 

2839# accepts both global is_moderator AND community-level membership 

2840# moderators. But the queue is a CROSS-TENANT view — a community- 

2841# scoped moderator should NOT see other communities' quarantine 

2842# items. Plan Part U + Part E.13 say per-community moderator 

2843# tooling lives on the community detail screen, NOT this global 

2844# queue. Until per-community filtering ships (Phase 7e.B 

2845# `?community=` scoping with CommunityMembership.role check), this 

2846# endpoint is platform-admin-only. 

2847@social_bp.route('/admin/moderation/quarantine', methods=['GET']) 

2848@require_admin 

2849@requires_flag('moderation_v2', else_value=[]) 

2850def admin_quarantine_queue(): 

2851 from .content_classifier import ContentClassifier 

2852 limit = min(int(request.args.get('limit', 50)), 200) 

2853 offset = int(request.args.get('offset', 0)) 

2854 rows = ContentClassifier.list_quarantine_queue( 

2855 g.db, limit=limit, offset=offset, 

2856 tenant_id=getattr(g, 'tenant_id', None)) 

2857 return _ok(rows) 

2858 

2859 

2860@social_bp.route('/admin/moderation/quarantine/<decision_id>', 

2861 methods=['POST']) 

2862@require_admin 

2863@requires_flag('moderation_v2') 

2864def admin_quarantine_overrule(decision_id): 

2865 """Mod overrules the AI verdict. Body: {human_decision: 'allow'| 

2866 'quarantine'|'block'}. Append-only — writes the override on the 

2867 existing decision row + flips Post visibility per the new 

2868 decision.""" 

2869 from .content_classifier import ContentClassifier 

2870 data = _get_json() 

2871 human_decision = data.get('human_decision') 

2872 if human_decision not in ('allow', 'quarantine', 'block'): 

2873 return _err("human_decision must be allow / quarantine / block") 

2874 try: 

2875 result = ContentClassifier.human_overrule( 

2876 g.db, decision_id, g.user.id, human_decision) 

2877 except ValueError as e: 

2878 return _err(str(e), 400) 

2879 return _ok(result) 

2880 

2881 

2882@social_bp.route('/admin/moderation/reports/<report_id>', methods=['GET']) 

2883@require_moderator 

2884def admin_get_report(report_id): 

2885 """Get a single report.""" 

2886 report = g.db.query(Report).filter(Report.id == report_id).first() 

2887 if not report: 

2888 return _err("Report not found", 404) 

2889 return _ok(report.to_dict()) 

2890 

2891 

2892@social_bp.route('/admin/moderation/reports/<report_id>/resolve', methods=['POST']) 

2893@require_moderator 

2894def admin_resolve_report(report_id): 

2895 """Resolve a report.""" 

2896 report = g.db.query(Report).filter(Report.id == report_id).first() 

2897 if not report: 

2898 return _err("Report not found", 404) 

2899 data = _get_json() 

2900 ReportService.review(g.db, report, g.user.id, data.get('status', 'reviewed')) 

2901 g.db.flush() 

2902 return _ok(report.to_dict()) 

2903 

2904 

2905@social_bp.route('/admin/moderation/posts/<post_id>/hide', methods=['POST']) 

2906@require_moderator 

2907def admin_hide_post(post_id): 

2908 """Hide a post from public view.""" 

2909 post = g.db.query(Post).filter(Post.id == post_id).first() 

2910 if not post: 

2911 return _err("Post not found", 404) 

2912 post.is_hidden = True 

2913 g.db.flush() 

2914 return _ok({'hidden': True}) 

2915 

2916 

2917@social_bp.route('/admin/moderation/posts/<post_id>/hide', methods=['DELETE']) 

2918@require_moderator 

2919def admin_unhide_post(post_id): 

2920 """Unhide a post.""" 

2921 post = g.db.query(Post).filter(Post.id == post_id).first() 

2922 if not post: 

2923 return _err("Post not found", 404) 

2924 post.is_hidden = False 

2925 g.db.flush() 

2926 return _ok({'hidden': False}) 

2927 

2928 

2929@social_bp.route('/admin/moderation/posts/<post_id>', methods=['DELETE']) 

2930@require_admin 

2931def admin_delete_post(post_id): 

2932 """Soft-delete a post.""" 

2933 post = g.db.query(Post).filter(Post.id == post_id).first() 

2934 if not post: 

2935 return _err("Post not found", 404) 

2936 post.is_deleted = True 

2937 g.db.flush() 

2938 return _ok({'deleted': True}) 

2939 

2940 

2941@social_bp.route('/admin/moderation/comments/<comment_id>/hide', methods=['POST']) 

2942@require_moderator 

2943def admin_hide_comment(comment_id): 

2944 """Hide a comment from public view.""" 

2945 comment = g.db.query(Comment).filter(Comment.id == comment_id).first() 

2946 if not comment: 

2947 return _err("Comment not found", 404) 

2948 comment.is_hidden = True 

2949 g.db.flush() 

2950 return _ok({'hidden': True}) 

2951 

2952 

2953@social_bp.route('/admin/moderation/comments/<comment_id>', methods=['DELETE']) 

2954@require_admin 

2955def admin_delete_comment(comment_id): 

2956 """Soft-delete a comment.""" 

2957 comment = g.db.query(Comment).filter(Comment.id == comment_id).first() 

2958 if not comment: 

2959 return _err("Comment not found", 404) 

2960 comment.is_deleted = True 

2961 g.db.flush() 

2962 return _ok({'deleted': True}) 

2963 

2964 

2965# ═══════════════════════════════════════════════════════════════ 

2966# ADMIN – SYSTEM LOGS 

2967# ═══════════════════════════════════════════════════════════════ 

2968 

2969@social_bp.route('/admin/logs', methods=['GET']) 

2970@require_admin 

2971def admin_get_logs(): 

2972 """Return recent error/event logs from AdminDashboard.""" 

2973 limit = min(int(request.args.get('limit', 100)), 500) 

2974 level = request.args.get('level') 

2975 try: 

2976 from integrations.channels.admin.dashboard import get_dashboard 

2977 dashboard = get_dashboard() 

2978 errors = dashboard.get_error_log(limit=limit) 

2979 entries = [e.to_dict() for e in errors] 

2980 if level: 

2981 entries = [e for e in entries if e.get('severity') == level] 

2982 return _ok(entries) 

2983 except Exception: 

2984 return _ok([]) 

2985 

2986 

2987# ═══════════════════════════════════════════════════════════════ 

2988# RN COMPATIBILITY ALIASES 

2989# These mirror the mailer.hertzai.com endpoints the React Native 

2990# CommunityView expects. They delegate to the canonical endpoints. 

2991# ═══════════════════════════════════════════════════════════════ 

2992 

2993@social_bp.route('/compat/getAllPosts', methods=['GET']) 

2994@optional_auth 

2995def compat_get_all_posts(): 

2996 """RN: OnboardingModule.getAllPosts(pageSize, pageNumber)""" 

2997 page_size = int(request.args.get('pageSize', 10)) 

2998 page_number = int(request.args.get('pageNumber', 1)) 

2999 offset = (page_number - 1) * page_size 

3000 posts, total = PostService.list_posts(g.db, 'new', limit=page_size, offset=offset) 

3001 result = [] 

3002 for p in posts: 

3003 result.append({ 

3004 'id': p.id, 'userID': p.author_id, 

3005 'caption': p.title, 'resourceUri': (p.media_urls or [None])[0], 

3006 'contentType': p.content_type, 

3007 'likesCount': p.upvotes, 'commentsCount': p.comment_count, 

3008 'shareCount': 0, 'viewsCount': p.view_count, 

3009 'user': { 

3010 'imageUri': p.author.avatar_url if p.author else '', 

3011 'username': p.author.display_name if p.author else '', 

3012 'location': '', 'rating': str(p.author.karma_score) if p.author else '0', 

3013 'time': p.created_at.isoformat() if p.created_at else '', 

3014 } 

3015 }) 

3016 return jsonify(result) 

3017 

3018 

3019@social_bp.route('/compat/like_bypost', methods=['GET']) 

3020@optional_auth 

3021def compat_likes_by_post(): 

3022 """RN: GET /like_bypost?post_id={id}""" 

3023 post_id = request.args.get('post_id', '') 

3024 voters = VoteService.get_voters(g.db, 'post', post_id) 

3025 return jsonify(voters) 

3026 

3027 

3028@social_bp.route('/compat/comment_bypost', methods=['GET']) 

3029@optional_auth 

3030def compat_comments_by_post(): 

3031 """RN: GET /comment_bypost?post_id={id}""" 

3032 post_id = request.args.get('post_id', '') 

3033 comments = CommentService.get_by_post(g.db, post_id, 'new') 

3034 result = [] 

3035 for c in comments: 

3036 result.append({ 

3037 'comment_id': c.id, 'post_id': c.post_id, 

3038 'user_id': c.author_id, 'name': c.author.display_name if c.author else '', 

3039 'comment': c.content, 'creation_date': c.created_at.isoformat() if c.created_at else '', 

3040 'parent_comment_id': c.parent_id if c.parent_id else 0, 

3041 }) 

3042 return jsonify({'comment': result}) 

3043 

3044 

3045@social_bp.route('/compat/comment_like', methods=['GET']) 

3046@optional_auth 

3047def compat_comment_likes(): 

3048 """RN: GET /comment_like?comment_id={id}""" 

3049 comment_id = request.args.get('comment_id', '') 

3050 voters = VoteService.get_voters(g.db, 'comment', comment_id) 

3051 return jsonify(voters) 

3052 

3053 

3054# ════════════════════════════════════════════════════════════════ 

3055# External Bot Bridge (SantaClaw / OpenClaw / communitybook) 

3056# ════════════════════════════════════════════════════════════════ 

3057 

3058@social_bp.route('/bots/register', methods=['POST']) 

3059@rate_limit('global') 

3060def bot_register(): 

3061 """External bot self-registration. Returns api_token for subsequent calls.""" 

3062 data = request.get_json(silent=True) or {} 

3063 bot_id = data.get('bot_id', '').strip() 

3064 bot_name = data.get('bot_name', '').strip() 

3065 platform = data.get('platform', 'generic').strip() 

3066 

3067 if not bot_id or not bot_name: 

3068 return _err("bot_id and bot_name are required") 

3069 

3070 from .external_bot_bridge import ExternalBotRegistry 

3071 try: 

3072 user = ExternalBotRegistry.register_bot( 

3073 g.db, bot_id=bot_id, bot_name=bot_name, 

3074 platform=platform, 

3075 description=data.get('description', ''), 

3076 capabilities=data.get('capabilities'), 

3077 callback_url=data.get('callback_url'), 

3078 ) 

3079 g.db.commit() 

3080 

3081 return _ok({ 

3082 'user_id': user.id, 

3083 'username': user.username, 

3084 'api_token': user.api_token, 

3085 'platform': platform, 

3086 'endpoints': { 

3087 'posts': '/api/social/posts', 

3088 'feed': '/api/social/feed/all', 

3089 'webhook': '/api/social/bots/webhook', 

3090 'tools': '/api/social/bots/tools', 

3091 'discovery': '/.well-known/hevolve-social.json', 

3092 }, 

3093 }, status=201) 

3094 except ValueError as e: 

3095 return _err(str(e)) 

3096 

3097 

3098@social_bp.route('/bots/webhook', methods=['POST']) 

3099@require_auth 

3100def bot_webhook(): 

3101 """Batch action ingestion from external bots.""" 

3102 data = request.get_json(silent=True) or {} 

3103 actions = data.get('actions', []) 

3104 if not actions or not isinstance(actions, list): 

3105 return _err("'actions' array is required") 

3106 if len(actions) > 50: 

3107 return _err("Maximum 50 actions per webhook call") 

3108 

3109 from .external_bot_bridge import process_webhook 

3110 results = process_webhook(g.db, g.user, actions) 

3111 g.db.commit() 

3112 return _ok(results) 

3113 

3114 

3115@social_bp.route('/bots/tools', methods=['GET']) 

3116@optional_auth 

3117def bot_tools(): 

3118 """Serve OpenClaw-compatible tool definitions for HevolveSocial.""" 

3119 from .openclaw_tools import generate_openclaw_tools 

3120 base = request.host_url.rstrip('/') 

3121 tools = generate_openclaw_tools(f'{base}/api/social') 

3122 return _ok(tools) 

3123 

3124 

3125@social_bp.route('/bots/santaclaw-skill', methods=['GET']) 

3126def bot_santaclaw_skill(): 

3127 """Serve SantaClaw/OpenClaw skill frontmatter YAML.""" 

3128 from .openclaw_tools import generate_santaclaw_skill_frontmatter 

3129 base = request.host_url.rstrip('/') 

3130 content = generate_santaclaw_skill_frontmatter(f'{base}/api/social') 

3131 return content, 200, {'Content-Type': 'text/yaml; charset=utf-8'} 

3132 

3133 

3134@social_bp.route('/bots/discover-external', methods=['POST']) 

3135@require_admin 

3136def bot_discover_external(): 

3137 """Discover SantaClaw/OpenClaw agents from a gateway URL and auto-register them.""" 

3138 data = request.get_json(silent=True) or {} 

3139 gateway_url = data.get('gateway_url', '').strip() 

3140 if not gateway_url: 

3141 return _err("gateway_url is required") 

3142 

3143 from .external_bot_bridge import discover_santaclaw_agents, auto_register_discovered_agents 

3144 agents = discover_santaclaw_agents(gateway_url) 

3145 if not agents: 

3146 return _ok({'discovered': 0, 'registered': 0, 'agents': []}) 

3147 

3148 auto_register = data.get('auto_register', True) 

3149 registered = 0 

3150 if auto_register: 

3151 registered = auto_register_discovered_agents(g.db, agents) 

3152 g.db.commit() 

3153 

3154 return _ok({ 

3155 'discovered': len(agents), 

3156 'registered': registered, 

3157 'agents': agents, 

3158 }) 

3159 

3160 

3161# ═══════════════════════════════════════════════════════════════ 

3162# RSS / ATOM / JSON FEED ENDPOINTS 

3163# ═══════════════════════════════════════════════════════════════ 

3164 

3165@social_bp.route('/feeds/rss', methods=['GET']) 

3166def feed_rss(): 

3167 """ 

3168 Generate RSS 2.0 feed. 

3169 Query params: 

3170 type: 'global' | 'trending' | 'personalized' | 'agents' (default: global) 

3171 limit: number of items (default: 50, max: 100) 

3172 """ 

3173 from .feed_export import FeedGenerator 

3174 from flask import Response 

3175 

3176 feed_type = request.args.get('type', 'global') 

3177 limit = min(int(request.args.get('limit', 50)), 100) 

3178 

3179 db = get_db() 

3180 try: 

3181 generator = FeedGenerator(db, base_url=request.host_url.rstrip('/')) 

3182 rss_xml = generator.generate_rss(feed_type=feed_type, limit=limit) 

3183 return Response(rss_xml, mimetype='application/rss+xml') 

3184 finally: 

3185 db.close() 

3186 

3187 

3188@social_bp.route('/feeds/atom', methods=['GET']) 

3189def feed_atom(): 

3190 """ 

3191 Generate Atom 1.0 feed. 

3192 Query params: 

3193 type: 'global' | 'trending' | 'personalized' | 'agents' (default: global) 

3194 limit: number of items (default: 50, max: 100) 

3195 """ 

3196 from .feed_export import FeedGenerator 

3197 from flask import Response 

3198 

3199 feed_type = request.args.get('type', 'global') 

3200 limit = min(int(request.args.get('limit', 50)), 100) 

3201 

3202 db = get_db() 

3203 try: 

3204 generator = FeedGenerator(db, base_url=request.host_url.rstrip('/')) 

3205 atom_xml = generator.generate_atom(feed_type=feed_type, limit=limit) 

3206 return Response(atom_xml, mimetype='application/atom+xml') 

3207 finally: 

3208 db.close() 

3209 

3210 

3211@social_bp.route('/feeds/json', methods=['GET']) 

3212def feed_json(): 

3213 """ 

3214 Generate JSON Feed 1.1. 

3215 Query params: 

3216 type: 'global' | 'trending' | 'personalized' | 'agents' (default: global) 

3217 limit: number of items (default: 50, max: 100) 

3218 """ 

3219 from .feed_export import FeedGenerator 

3220 from flask import Response 

3221 

3222 feed_type = request.args.get('type', 'global') 

3223 limit = min(int(request.args.get('limit', 50)), 100) 

3224 

3225 db = get_db() 

3226 try: 

3227 generator = FeedGenerator(db, base_url=request.host_url.rstrip('/')) 

3228 json_feed = generator.generate_json_feed(feed_type=feed_type, limit=limit) 

3229 return Response(json_feed, mimetype='application/feed+json') 

3230 finally: 

3231 db.close() 

3232 

3233 

3234@social_bp.route('/users/<int:user_id>/feed.rss', methods=['GET']) 

3235def user_feed_rss(user_id): 

3236 """Generate RSS feed for a specific user's posts.""" 

3237 from .feed_export import get_user_feed_rss 

3238 from flask import Response 

3239 

3240 limit = min(int(request.args.get('limit', 50)), 100) 

3241 

3242 db = get_db() 

3243 try: 

3244 rss_xml = get_user_feed_rss(db, user_id, limit=limit) 

3245 return Response(rss_xml, mimetype='application/rss+xml') 

3246 finally: 

3247 db.close() 

3248 

3249 

3250@social_bp.route('/communities/<int:community_id>/feed.rss', methods=['GET']) 

3251def community_feed_rss(community_id): 

3252 """Generate RSS feed for a specific community.""" 

3253 from .feed_export import get_community_feed_rss 

3254 from flask import Response 

3255 

3256 limit = min(int(request.args.get('limit', 50)), 100) 

3257 

3258 db = get_db() 

3259 try: 

3260 rss_xml = get_community_feed_rss(db, community_id, limit=limit) 

3261 return Response(rss_xml, mimetype='application/rss+xml') 

3262 finally: 

3263 db.close() 

3264 

3265 

3266@social_bp.route('/feeds/preview', methods=['POST']) 

3267@optional_auth 

3268def feed_preview(): 

3269 """ 

3270 Preview an external feed before subscribing. 

3271 Request JSON: { url: string } 

3272 """ 

3273 from .feed_import import preview_feed 

3274 

3275 data = _get_json() 

3276 url = data.get('url', '').strip() 

3277 if not url: 

3278 return _err("url is required") 

3279 

3280 result = preview_feed(url, limit=5) 

3281 if result.get('success'): 

3282 return _ok(result) 

3283 else: 

3284 return _err(result.get('error', 'Failed to fetch feed')) 

3285 

3286 

3287@social_bp.route('/feeds/import', methods=['POST']) 

3288@require_auth 

3289@rate_limit('post') 

3290def feed_import(): 

3291 """ 

3292 Import items from an external feed as posts. 

3293 Request JSON: 

3294 url: Feed URL 

3295 community_id: Optional community to post to 

3296 limit: Max items to import (default: 10) 

3297 """ 

3298 from .feed_import import FeedImporter 

3299 

3300 data = _get_json() 

3301 url = data.get('url', '').strip() 

3302 if not url: 

3303 return _err("url is required") 

3304 

3305 community_id = data.get('community_id') 

3306 limit = min(int(data.get('limit', 10)), 50) 

3307 

3308 db = get_db() 

3309 try: 

3310 importer = FeedImporter(db) 

3311 metadata, items, _ = importer.fetch_feed(url) 

3312 

3313 # Limit items 

3314 items = items[:limit] 

3315 

3316 # Import 

3317 created_ids = importer.import_items( 

3318 items, 

3319 user_id=g.user.id, 

3320 community_id=community_id 

3321 ) 

3322 db.commit() 

3323 

3324 return _ok({ 

3325 'feed_title': metadata.title, 

3326 'items_fetched': len(items), 

3327 'items_imported': len(created_ids), 

3328 'post_ids': created_ids 

3329 }) 

3330 except Exception as e: 

3331 db.rollback() 

3332 logger.error(f"Feed import error: {e}") 

3333 return _err(str(e)) 

3334 finally: 

3335 db.close() 

3336 

3337 

3338@social_bp.route('/feeds/subscribe', methods=['POST']) 

3339@require_auth 

3340def feed_subscribe(): 

3341 """ 

3342 Subscribe to an external feed for automatic imports. 

3343 Request JSON: 

3344 url: Feed URL 

3345 community_id: Optional community to post to 

3346 auto_import: Whether to auto-import new items (default: true) 

3347 """ 

3348 from .feed_import import FeedSubscriptionService 

3349 

3350 data = _get_json() 

3351 url = data.get('url', '').strip() 

3352 if not url: 

3353 return _err("url is required") 

3354 

3355 db = get_db() 

3356 try: 

3357 service = FeedSubscriptionService(db) 

3358 subscription = service.subscribe( 

3359 user_id=g.user.id, 

3360 feed_url=url, 

3361 community_id=data.get('community_id'), 

3362 auto_import=data.get('auto_import', True) 

3363 ) 

3364 

3365 if subscription.get('status') == 'failed': 

3366 return _err(subscription.get('error', 'Subscription failed')) 

3367 

3368 return _ok(subscription, status=201) 

3369 except Exception as e: 

3370 logger.error(f"Feed subscribe error: {e}") 

3371 return _err(str(e)) 

3372 finally: 

3373 db.close() 

3374 

3375 

3376# ═══════════════════════════════════════════════════════════════ 

3377# GDPR — DATA PRIVACY (user data export + deletion/anonymization) 

3378# ═══════════════════════════════════════════════════════════════ 

3379 

3380@social_bp.route('/users/<user_id>/data/export', methods=['GET']) 

3381@require_auth 

3382def gdpr_export_user_data(user_id): 

3383 """GDPR Article 20 — export all user data as JSON (data portability).""" 

3384 if g.user.id != user_id and not getattr(g.user, 'is_admin', False): 

3385 return _err("Cannot export another user's data", 403) 

3386 

3387 user = UserService.get_by_id(g.db, user_id) 

3388 if not user: 

3389 return _err("User not found", 404) 

3390 

3391 posts = PostService.list_posts(g.db, author_id=user_id, limit=10000, offset=0) 

3392 comments = g.db.query(Comment).filter_by(author_id=user_id).all() 

3393 follows_out = g.db.query(Follow).filter_by(follower_id=user_id).all() 

3394 follows_in = g.db.query(Follow).filter_by(following_id=user_id).all() 

3395 

3396 export = { 

3397 'user': user.to_dict(), 

3398 'posts': [p.to_dict() for p in (posts[0] if isinstance(posts, tuple) else posts)], 

3399 'comments': [c.to_dict() for c in comments], 

3400 'following': [f.following_id for f in follows_out], 

3401 'followers': [f.follower_id for f in follows_in], 

3402 'exported_at': datetime.utcnow().isoformat(), 

3403 } 

3404 return _ok(export) 

3405 

3406 

3407@social_bp.route('/users/<user_id>/data', methods=['DELETE']) 

3408@require_auth 

3409def gdpr_delete_user_data(user_id): 

3410 """GDPR Article 17 — right to erasure. Anonymizes PII, preserves content integrity.""" 

3411 if g.user.id != user_id and not getattr(g.user, 'is_admin', False): 

3412 return _err("Cannot delete another user's data", 403) 

3413 

3414 user = UserService.get_by_id(g.db, user_id) 

3415 if not user: 

3416 return _err("User not found", 404) 

3417 

3418 # Anonymize PII — don't delete the row (preserves referential integrity) 

3419 import hashlib 

3420 anon_hash = hashlib.sha256(user_id.encode()).hexdigest()[:12] 

3421 user.username = f'deleted_{anon_hash}' 

3422 user.display_name = 'Deleted User' 

3423 user.email = None 

3424 user.bio = '' 

3425 user.avatar_url = '' 

3426 if hasattr(user, 'password_hash'): 

3427 user.password_hash = None 

3428 if hasattr(user, 'handle'): 

3429 user.handle = None 

3430 

3431 g.db.flush() 

3432 return _ok({ 

3433 'anonymized': True, 

3434 'user_id': user_id, 

3435 'message': 'PII anonymized. Content preserved for integrity.', 

3436 }) 

3437 

3438 

3439# ═══════════════════════════════════════════════════════════════ 

3440# TEST HELPERS (only available when SOCIAL_RATE_LIMIT_DISABLED or FLASK_ENV=testing) 

3441# ═══════════════════════════════════════════════════════════════ 

3442 

3443@social_bp.route('/test/reset-rate-limits', methods=['POST']) 

3444def reset_rate_limits(): 

3445 """Reset all rate-limiter buckets. Only works in test/dev mode.""" 

3446 disabled = os.environ.get('SOCIAL_RATE_LIMIT_DISABLED', '').strip() in ('1', 'true', 'yes') 

3447 testing = os.environ.get('FLASK_ENV', '').strip() in ('testing', 'test') 

3448 if not disabled and not testing: 

3449 return _err("Only available in test mode", 403) 

3450 limiter = get_limiter() 

3451 with limiter._lock: 

3452 limiter._buckets.clear() 

3453 return _ok({'reset': True}) 

3454 

3455 

3456# ═══════════════════════════════════════════════════════════════ 

3457# THEME (Appearance) — presets, customization, AI generation 

3458# ═══════════════════════════════════════════════════════════════ 

3459 

3460_THEME_PRESETS = [ 

3461 { 

3462 'id': 'hart-default', 'name': 'HART Default', 

3463 'description': 'Deep navy with aspiration violet accents', 

3464 'colors': { 

3465 'background': '#0F0E17', 'paper': '#1A1932', 'surface_elevated': '#232148', 

3466 'surface_overlay': '#2D2B55', 

3467 'primary': '#6C63FF', 'primary_light': '#9B94FF', 'primary_dark': '#4A42CC', 

3468 'secondary': '#FF6B6B', 'secondary_light': '#FF9494', 'secondary_dark': '#CC5555', 

3469 'accent': '#2ECC71', 'accent_light': '#A8E6CF', 

3470 'text_primary': '#FFFFFE', 'text_secondary': 'rgba(255,255,254,0.72)', 

3471 'divider': 'rgba(255,255,255,0.12)', 

3472 'success': '#2ECC71', 'warning': '#FFAB00', 'error': '#e74c3c', 'info': '#00B8D9', 

3473 }, 

3474 'glass': {'blur_radius': 20, 'surface_opacity': 0.85, 'elevated_opacity': 0.92, 'border_opacity': 0.08}, 

3475 'animations': { 

3476 'glassmorphism': {'enabled': True, 'intensity': 70}, 

3477 'gradients': {'enabled': True, 'intensity': 50}, 

3478 'liquid_motion': {'enabled': True, 'intensity': 60}, 

3479 }, 

3480 'font': {'family': 'Inter', 'size': 13}, 

3481 'shell': {'panel_opacity': 0.65, 'blur_radius': 20, 'border_radius': 16}, 

3482 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3483 }, 

3484 { 

3485 'id': 'midnight-black', 'name': 'Midnight Black', 

3486 'description': 'True OLED black with ice-blue highlights', 

3487 'colors': { 

3488 'background': '#000000', 'paper': '#0A0A0F', 'surface_elevated': '#141420', 

3489 'surface_overlay': '#1E1E2E', 

3490 'primary': '#00B8D9', 'primary_light': '#79E2F2', 'primary_dark': '#008DA8', 

3491 'secondary': '#7C4DFF', 'secondary_light': '#B388FF', 'secondary_dark': '#5E35B1', 

3492 'accent': '#00E5FF', 'accent_light': '#80F0FF', 

3493 'text_primary': '#E8E8E8', 'text_secondary': 'rgba(232,232,232,0.65)', 

3494 'divider': 'rgba(255,255,255,0.08)', 

3495 'success': '#00E676', 'warning': '#FFD600', 'error': '#FF5252', 'info': '#40C4FF', 

3496 }, 

3497 'glass': {'blur_radius': 24, 'surface_opacity': 0.75, 'elevated_opacity': 0.88, 'border_opacity': 0.06}, 

3498 'animations': {'glassmorphism': {'enabled': True, 'intensity': 80}, 'gradients': {'enabled': True, 'intensity': 60}, 'liquid_motion': {'enabled': True, 'intensity': 70}}, 

3499 'font': {'family': 'Inter', 'size': 13}, 

3500 'shell': {'panel_opacity': 0.55, 'blur_radius': 24, 'border_radius': 16}, 

3501 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3502 }, 

3503 { 

3504 'id': 'ocean-blue', 'name': 'Ocean Blue', 

3505 'description': 'Deep sea gradients with coral accents', 

3506 'colors': { 

3507 'background': '#0B1426', 'paper': '#112240', 'surface_elevated': '#1A3358', 

3508 'surface_overlay': '#234570', 

3509 'primary': '#64B5F6', 'primary_light': '#90CAF9', 'primary_dark': '#1E88E5', 

3510 'secondary': '#FF8A65', 'secondary_light': '#FFAB91', 'secondary_dark': '#E64A19', 

3511 'accent': '#4DD0E1', 'accent_light': '#80DEEA', 

3512 'text_primary': '#E3F2FD', 'text_secondary': 'rgba(227,242,253,0.72)', 

3513 'divider': 'rgba(100,181,246,0.15)', 

3514 'success': '#69F0AE', 'warning': '#FFD740', 'error': '#FF8A80', 'info': '#80D8FF', 

3515 }, 

3516 'glass': {'blur_radius': 20, 'surface_opacity': 0.80, 'elevated_opacity': 0.90, 'border_opacity': 0.10}, 

3517 'animations': {'glassmorphism': {'enabled': True, 'intensity': 65}, 'gradients': {'enabled': True, 'intensity': 55}, 'liquid_motion': {'enabled': True, 'intensity': 60}}, 

3518 'font': {'family': 'Inter', 'size': 13}, 

3519 'shell': {'panel_opacity': 0.60, 'blur_radius': 20, 'border_radius': 16}, 

3520 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3521 }, 

3522 { 

3523 'id': 'forest-green', 'name': 'Forest Green', 

3524 'description': 'Deep forest with amber firelight', 

3525 'colors': { 

3526 'background': '#0A1F0A', 'paper': '#142814', 'surface_elevated': '#1E3A1E', 

3527 'surface_overlay': '#2A4E2A', 

3528 'primary': '#66BB6A', 'primary_light': '#A5D6A7', 'primary_dark': '#388E3C', 

3529 'secondary': '#FFB74D', 'secondary_light': '#FFD54F', 'secondary_dark': '#F57C00', 

3530 'accent': '#81C784', 'accent_light': '#C8E6C9', 

3531 'text_primary': '#E8F5E9', 'text_secondary': 'rgba(232,245,233,0.72)', 

3532 'divider': 'rgba(102,187,106,0.12)', 

3533 'success': '#69F0AE', 'warning': '#FFE57F', 'error': '#EF5350', 'info': '#4FC3F7', 

3534 }, 

3535 'glass': {'blur_radius': 18, 'surface_opacity': 0.82, 'elevated_opacity': 0.90, 'border_opacity': 0.08}, 

3536 'animations': {'glassmorphism': {'enabled': True, 'intensity': 60}, 'gradients': {'enabled': True, 'intensity': 45}, 'liquid_motion': {'enabled': True, 'intensity': 55}}, 

3537 'font': {'family': 'Inter', 'size': 13}, 

3538 'shell': {'panel_opacity': 0.60, 'blur_radius': 18, 'border_radius': 16}, 

3539 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3540 }, 

3541 { 

3542 'id': 'sunset-warm', 'name': 'Sunset Warm', 

3543 'description': 'Warm amber dusk with rose highlights', 

3544 'colors': { 

3545 'background': '#1A0F0A', 'paper': '#2D1B12', 'surface_elevated': '#3E2518', 

3546 'surface_overlay': '#4F3020', 

3547 'primary': '#FF8A65', 'primary_light': '#FFAB91', 'primary_dark': '#E64A19', 

3548 'secondary': '#F48FB1', 'secondary_light': '#F8BBD0', 'secondary_dark': '#C2185B', 

3549 'accent': '#FFD54F', 'accent_light': '#FFE082', 

3550 'text_primary': '#FFF3E0', 'text_secondary': 'rgba(255,243,224,0.72)', 

3551 'divider': 'rgba(255,138,101,0.15)', 

3552 'success': '#A5D6A7', 'warning': '#FFE082', 'error': '#EF9A9A', 'info': '#81D4FA', 

3553 }, 

3554 'glass': {'blur_radius': 16, 'surface_opacity': 0.80, 'elevated_opacity': 0.88, 'border_opacity': 0.10}, 

3555 'animations': {'glassmorphism': {'enabled': True, 'intensity': 55}, 'gradients': {'enabled': True, 'intensity': 50}, 'liquid_motion': {'enabled': True, 'intensity': 60}}, 

3556 'font': {'family': 'Inter', 'size': 13}, 

3557 'shell': {'panel_opacity': 0.60, 'blur_radius': 16, 'border_radius': 16}, 

3558 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3559 }, 

3560 { 

3561 'id': 'neon-purple', 'name': 'Neon Purple', 

3562 'description': 'Cyberpunk vibes with electric neon', 

3563 'colors': { 

3564 'background': '#0D0015', 'paper': '#1A0030', 'surface_elevated': '#2A0050', 

3565 'surface_overlay': '#3A0068', 

3566 'primary': '#E040FB', 'primary_light': '#EA80FC', 'primary_dark': '#AA00FF', 

3567 'secondary': '#00E5FF', 'secondary_light': '#80F0FF', 'secondary_dark': '#00B8D4', 

3568 'accent': '#76FF03', 'accent_light': '#B2FF59', 

3569 'text_primary': '#F3E5F5', 'text_secondary': 'rgba(243,229,245,0.72)', 

3570 'divider': 'rgba(224,64,251,0.15)', 

3571 'success': '#76FF03', 'warning': '#FFEA00', 'error': '#FF1744', 'info': '#18FFFF', 

3572 }, 

3573 'glass': {'blur_radius': 24, 'surface_opacity': 0.70, 'elevated_opacity': 0.85, 'border_opacity': 0.12}, 

3574 'animations': {'glassmorphism': {'enabled': True, 'intensity': 85}, 'gradients': {'enabled': True, 'intensity': 70}, 'liquid_motion': {'enabled': True, 'intensity': 75}}, 

3575 'font': {'family': 'Inter', 'size': 13}, 

3576 'shell': {'panel_opacity': 0.50, 'blur_radius': 24, 'border_radius': 16}, 

3577 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3578 }, 

3579 { 

3580 'id': 'rose-gold', 'name': 'Rose Gold', 

3581 'description': 'Elegant rose with warm gold tones', 

3582 'colors': { 

3583 'background': '#1A0F14', 'paper': '#2D1B25', 'surface_elevated': '#3E2535', 

3584 'surface_overlay': '#4F3045', 

3585 'primary': '#F48FB1', 'primary_light': '#F8BBD0', 'primary_dark': '#C2185B', 

3586 'secondary': '#FFD54F', 'secondary_light': '#FFE082', 'secondary_dark': '#FFA000', 

3587 'accent': '#CE93D8', 'accent_light': '#E1BEE7', 

3588 'text_primary': '#FCE4EC', 'text_secondary': 'rgba(252,228,236,0.72)', 

3589 'divider': 'rgba(244,143,177,0.15)', 

3590 'success': '#A5D6A7', 'warning': '#FFE082', 'error': '#EF9A9A', 'info': '#B3E5FC', 

3591 }, 

3592 'glass': {'blur_radius': 20, 'surface_opacity': 0.82, 'elevated_opacity': 0.90, 'border_opacity': 0.10}, 

3593 'animations': {'glassmorphism': {'enabled': True, 'intensity': 65}, 'gradients': {'enabled': True, 'intensity': 50}, 'liquid_motion': {'enabled': True, 'intensity': 55}}, 

3594 'font': {'family': 'Inter', 'size': 13}, 

3595 'shell': {'panel_opacity': 0.60, 'blur_radius': 20, 'border_radius': 16}, 

3596 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3597 }, 

3598 { 

3599 'id': 'arctic-frost', 'name': 'Arctic Frost', 

3600 'description': 'Cool silver-white with ice accents', 

3601 'colors': { 

3602 'background': '#0E1621', 'paper': '#162433', 'surface_elevated': '#1E3044', 

3603 'surface_overlay': '#263D55', 

3604 'primary': '#B0BEC5', 'primary_light': '#CFD8DC', 'primary_dark': '#78909C', 

3605 'secondary': '#80CBC4', 'secondary_light': '#B2DFDB', 'secondary_dark': '#00897B', 

3606 'accent': '#B3E5FC', 'accent_light': '#E1F5FE', 

3607 'text_primary': '#ECEFF1', 'text_secondary': 'rgba(236,239,241,0.72)', 

3608 'divider': 'rgba(176,190,197,0.15)', 

3609 'success': '#A5D6A7', 'warning': '#FFE082', 'error': '#EF9A9A', 'info': '#80D8FF', 

3610 }, 

3611 'glass': {'blur_radius': 28, 'surface_opacity': 0.78, 'elevated_opacity': 0.88, 'border_opacity': 0.10}, 

3612 'animations': {'glassmorphism': {'enabled': True, 'intensity': 75}, 'gradients': {'enabled': True, 'intensity': 45}, 'liquid_motion': {'enabled': True, 'intensity': 50}}, 

3613 'font': {'family': 'Inter', 'size': 13}, 

3614 'shell': {'panel_opacity': 0.55, 'blur_radius': 28, 'border_radius': 16}, 

3615 'metadata': {'is_preset': True, 'is_ai_generated': False}, 

3616 }, 

3617] 

3618 

3619_THEME_PRESETS_BY_ID = {p['id']: p for p in _THEME_PRESETS} 

3620 

3621_SUPPORTED_FONTS = [ 

3622 {'family': 'Inter', 'category': 'sans-serif'}, 

3623 {'family': 'Figtree', 'category': 'sans-serif'}, 

3624 {'family': 'JetBrains Mono', 'category': 'monospace'}, 

3625 {'family': 'Roboto', 'category': 'sans-serif'}, 

3626 {'family': 'Fira Code', 'category': 'monospace'}, 

3627 {'family': 'Source Sans Pro', 'category': 'sans-serif'}, 

3628 {'family': 'Poppins', 'category': 'sans-serif'}, 

3629 {'family': 'IBM Plex Sans', 'category': 'sans-serif'}, 

3630] 

3631 

3632 

3633def _deep_merge(base, overrides): 

3634 """Deep-merge overrides into base dict (returns new dict).""" 

3635 result = dict(base) 

3636 for k, v in overrides.items(): 

3637 if isinstance(v, dict) and isinstance(result.get(k), dict): 

3638 result[k] = _deep_merge(result[k], v) 

3639 else: 

3640 result[k] = v 

3641 return result 

3642 

3643 

3644@social_bp.route('/theme/presets', methods=['GET']) 

3645def theme_get_presets(): 

3646 """Return all curated theme presets (no auth required).""" 

3647 return _ok({'presets': _THEME_PRESETS}) 

3648 

3649 

3650@social_bp.route('/theme/active', methods=['GET']) 

3651@require_auth 

3652def theme_get_active(): 

3653 """Return the current user's active theme config.""" 

3654 settings = dict(g.user.settings or {}) 

3655 theme = settings.get('theme') or _THEME_PRESETS_BY_ID['hart-default'] 

3656 return _ok({'theme': theme}) 

3657 

3658 

3659@social_bp.route('/theme/apply', methods=['POST']) 

3660@require_auth 

3661def theme_apply(): 

3662 """Apply a preset theme by id.""" 

3663 data = request.get_json(silent=True) or {} 

3664 theme_id = data.get('theme_id', '').strip() 

3665 if not theme_id: 

3666 return _err('theme_id required') 

3667 preset = _THEME_PRESETS_BY_ID.get(theme_id) 

3668 if not preset: 

3669 return _err(f'Unknown preset: {theme_id}', 404) 

3670 

3671 settings = dict(g.user.settings or {}) 

3672 settings['theme'] = dict(preset) 

3673 g.user.settings = settings 

3674 try: 

3675 db = g.db 

3676 db.add(g.user) 

3677 db.commit() 

3678 except Exception as e: 

3679 logger.error(f"theme_apply commit error: {e}") 

3680 return _err('Failed to save theme', 500) 

3681 return _ok({'theme': preset}) 

3682 

3683 

3684@social_bp.route('/theme/customize', methods=['POST']) 

3685@require_auth 

3686def theme_customize(): 

3687 """Deep-merge partial overrides into the user's active theme.""" 

3688 overrides = request.get_json(silent=True) or {} 

3689 if not overrides: 

3690 return _err('No overrides provided') 

3691 

3692 settings = dict(g.user.settings or {}) 

3693 current = settings.get('theme') or dict(_THEME_PRESETS_BY_ID['hart-default']) 

3694 merged = _deep_merge(current, overrides) 

3695 merged['metadata'] = dict(merged.get('metadata', {}), is_preset=False) 

3696 settings['theme'] = merged 

3697 g.user.settings = settings 

3698 try: 

3699 db = g.db 

3700 db.add(g.user) 

3701 db.commit() 

3702 except Exception as e: 

3703 logger.error(f"theme_customize commit error: {e}") 

3704 return _err('Failed to save theme', 500) 

3705 return _ok({'theme': merged}) 

3706 

3707 

3708@social_bp.route('/theme/fonts', methods=['GET']) 

3709def theme_get_fonts(): 

3710 """Return supported font families (no auth required).""" 

3711 return _ok({'fonts': _SUPPORTED_FONTS}) 

3712 

3713 

3714@social_bp.route('/theme/generate', methods=['POST']) 

3715@require_auth 

3716def theme_generate(): 

3717 """AI-generate a theme config from a text description.""" 

3718 import json as _json 

3719 import re as _re 

3720 

3721 data = request.get_json(silent=True) or {} 

3722 description = (data.get('description') or '').strip() 

3723 if not description: 

3724 return _err('description required') 

3725 

3726 base_id = data.get('base_preset', 'hart-default') 

3727 base = _THEME_PRESETS_BY_ID.get(base_id, _THEME_PRESETS_BY_ID['hart-default']) 

3728 

3729 # Keyword fallback — deterministic mapping when LLM unavailable 

3730 _keyword_colors = { 

3731 'ocean': {'primary': '#64B5F6', 'background': '#0B1426', 'paper': '#112240', 'secondary': '#FF8A65'}, 

3732 'sea': {'primary': '#64B5F6', 'background': '#0B1426', 'paper': '#112240', 'secondary': '#FF8A65'}, 

3733 'sunset': {'primary': '#FF8A65', 'background': '#1A0F0A', 'paper': '#2D1B12', 'secondary': '#F48FB1'}, 

3734 'forest': {'primary': '#66BB6A', 'background': '#0A1F0A', 'paper': '#142814', 'secondary': '#FFB74D'}, 

3735 'neon': {'primary': '#E040FB', 'background': '#0D0015', 'paper': '#1A0030', 'secondary': '#00E5FF'}, 

3736 'cyber': {'primary': '#E040FB', 'background': '#0D0015', 'paper': '#1A0030', 'secondary': '#00E5FF'}, 

3737 'rose': {'primary': '#F48FB1', 'background': '#1A0F14', 'paper': '#2D1B25', 'secondary': '#FFD54F'}, 

3738 'gold': {'primary': '#FFD54F', 'background': '#1A0F0A', 'paper': '#2D1B12', 'secondary': '#F48FB1'}, 

3739 'ice': {'primary': '#B0BEC5', 'background': '#0E1621', 'paper': '#162433', 'secondary': '#80CBC4'}, 

3740 'arctic': {'primary': '#B0BEC5', 'background': '#0E1621', 'paper': '#162433', 'secondary': '#80CBC4'}, 

3741 'night': {'primary': '#00B8D9', 'background': '#000000', 'paper': '#0A0A0F', 'secondary': '#7C4DFF'}, 

3742 'midnight': {'primary': '#00B8D9', 'background': '#000000', 'paper': '#0A0A0F', 'secondary': '#7C4DFF'}, 

3743 'blood': {'primary': '#FF1744', 'background': '#1A0000', 'paper': '#2D0A0A', 'secondary': '#FF6E40'}, 

3744 'purple': {'primary': '#CE93D8', 'background': '#1A0025', 'paper': '#2A0040', 'secondary': '#80CBC4'}, 

3745 'blue': {'primary': '#64B5F6', 'background': '#0B1426', 'paper': '#112240', 'secondary': '#FF8A65'}, 

3746 'green': {'primary': '#66BB6A', 'background': '#0A1F0A', 'paper': '#142814', 'secondary': '#FFB74D'}, 

3747 'warm': {'primary': '#FF8A65', 'background': '#1A0F0A', 'paper': '#2D1B12', 'secondary': '#F48FB1'}, 

3748 'cool': {'primary': '#B0BEC5', 'background': '#0E1621', 'paper': '#162433', 'secondary': '#80CBC4'}, 

3749 } 

3750 

3751 # Try LLM generation first 

3752 try: 

3753 try: 

3754 from routes.hartos_backend_adapter import chat as _adapter_chat 

3755 except ImportError: 

3756 from hartos_backend_adapter import chat as _adapter_chat 

3757 schema_hint = '{"colors":{"background":"#hex","paper":"#hex","primary":"#hex","primary_light":"#hex","primary_dark":"#hex","secondary":"#hex","accent":"#hex","text_primary":"#hex"}}' 

3758 system_prompt = ( 

3759 "You are a UI theme designer for a dark-mode social platform. " 

3760 "Given the user description, generate ONLY a valid JSON object with a 'colors' key. " 

3761 f"Schema: {schema_hint}. " 

3762 "Rules: all backgrounds MUST be dark (luminance < 0.15). " 

3763 "Primary must have >= 4.5:1 contrast on background. " 

3764 "Return ONLY raw JSON, no markdown fences, no explanation." 

3765 ) 

3766 llm_resp = _adapter_chat( 

3767 f"{system_prompt}\n\nUser description: \"{description}\"", 

3768 casual_conv=True, timeout=30 

3769 ) 

3770 resp_text = llm_resp if isinstance(llm_resp, str) else str(llm_resp.get('response', '')) 

3771 # Extract JSON from response 

3772 json_match = _re.search(r'\{[\s\S]*\}', resp_text) 

3773 if json_match: 

3774 generated = _json.loads(json_match.group()) 

3775 colors = generated.get('colors', generated) 

3776 merged_colors = dict(base['colors'], **{k: v for k, v in colors.items() if isinstance(v, str) and (v.startswith('#') or v.startswith('rgb'))}) 

3777 result = dict(base, colors=merged_colors, id='ai-generated', name=f'AI: {description[:30]}') 

3778 result['metadata'] = {'is_preset': False, 'is_ai_generated': True, 'ai_prompt': description} 

3779 return _ok({'theme': result}) 

3780 except Exception as e: 

3781 logger.warning(f"AI theme generation failed, using keyword fallback: {e}") 

3782 

3783 # Keyword fallback 

3784 desc_lower = description.lower() 

3785 color_overrides = {} 

3786 for keyword, colors in _keyword_colors.items(): 

3787 if keyword in desc_lower: 

3788 color_overrides.update(colors) 

3789 break 

3790 

3791 if color_overrides: 

3792 merged_colors = dict(base['colors'], **color_overrides) 

3793 result = dict(base, colors=merged_colors, id='ai-generated', name=f'AI: {description[:30]}') 

3794 result['metadata'] = {'is_preset': False, 'is_ai_generated': True, 'ai_prompt': description} 

3795 return _ok({'theme': result}) 

3796 

3797 return _err('Could not generate a theme from that description. Try keywords like ocean, sunset, neon, forest.', 422) 

3798 

3799 

3800@social_bp.route('/users/<int:user_id>/theme', methods=['GET']) 

3801def theme_get_user(user_id): 

3802 """Return any user's theme (public, for visitor theming).""" 

3803 db = None 

3804 try: 

3805 db = get_db() 

3806 user = db.query(User).filter(User.id == user_id).first() 

3807 if not user: 

3808 return _err('User not found', 404) 

3809 settings = dict(user.settings or {}) 

3810 theme = settings.get('theme') 

3811 return _ok({'theme': theme}) 

3812 except Exception as e: 

3813 logger.error(f"theme_get_user error: {e}") 

3814 return _err('Failed to fetch user theme', 500) 

3815 finally: 

3816 if db: 

3817 db.close() 

3818 

3819 

3820# ═══════════════════════════════════════════════════════════════ 

3821# AGENT OBSERVATION & DISPATCH (fire-and-forget) 

3822# ═══════════════════════════════════════════════════════════════ 

3823 

3824@social_bp.route('/agent/observe', methods=['POST']) 

3825@require_auth 

3826def agent_observe(): 

3827 """Receive frontend observations for agent self-critique.""" 

3828 try: 

3829 data = request.get_json(silent=True) or {} 

3830 user_id = g.user.id 

3831 

3832 # Store observation via MemoryGraph if available 

3833 try: 

3834 from integrations.channels.memory.memory_graph import MemoryGraph 

3835 graph = MemoryGraph(user_id=str(user_id)) 

3836 graph.register( 

3837 content=f"[{data.get('event', 'unknown')}] page={data.get('page', '?')} outcome={data.get('outcome', '?')} duration={data.get('duration_ms', 0)}ms", 

3838 metadata={'memory_type': 'observation', 'source': 'frontend', 

3839 **{k: v for k, v in data.items() if k not in ('_useBeacon',)}}, 

3840 ) 

3841 except Exception: 

3842 pass # MemoryGraph optional 

3843 

3844 return jsonify({'success': True}), 200 

3845 except Exception: 

3846 return jsonify({'success': True}), 200 # Always return success (fire-and-forget) 

3847 

3848 

3849@social_bp.route('/agent/dispatch', methods=['POST']) 

3850@require_auth 

3851def agent_dispatch(): 

3852 """Receive agent dispatch requests from autopilot.""" 

3853 try: 

3854 data = request.get_json(silent=True) or {} 

3855 user_id = g.user.id 

3856 

3857 # Store dispatch as observation for agent to pick up 

3858 try: 

3859 from integrations.channels.memory.memory_graph import MemoryGraph 

3860 graph = MemoryGraph(user_id=str(user_id)) 

3861 graph.register( 

3862 content=f"[dispatch] agent={data.get('agent', '?')} action={data.get('action', '?')} mode={data.get('mode', 'suggest')}", 

3863 metadata={'memory_type': 'dispatch', 'source': 'autopilot', **data}, 

3864 ) 

3865 except Exception: 

3866 pass 

3867 

3868 return jsonify({'success': True}), 200 

3869 except Exception: 

3870 return jsonify({'success': True}), 200