Coverage for integrations / social / icebreaker_service.py: 96.2%
78 statements
« prev ^ index » next coverage.py v7.14.0, created at 2026-05-12 04:49 +0000
« prev ^ index » next coverage.py v7.14.0, created at 2026-05-12 04:49 +0000
1"""
2HevolveSocial - Icebreaker drafting service (closes #399).
4Pure-function service that produces a short, personalized opener for
5a mutual-like BLE encounter match. Used by:
7 * The seeded `encounter_icebreaker_agent` goal — when the match
8 WAMP topic fires, the goal's recipe calls draft_icebreaker(...)
9 to generate the candidate text, then publishes to the
10 com.hevolve.encounter.icebreaker WAMP topic for user approval.
11 * The /api/social/encounter/icebreaker/draft REST endpoint — when
12 the SPA wants to inspect / edit a draft before calling the
13 existing /icebreaker/approve endpoint.
15Design contract (project_encounter_icebreaker.md §9):
17 1. Input: a match_id (server-side rows already validated as
18 mutual-like) plus a SQLAlchemy session. Output: a dict with
19 `draft`, `rationale`, `alt_drafts`, `length`.
20 2. NEVER auto-sends — the caller is responsible for routing the
21 draft to the user-approval surface.
22 3. Length is capped at ENCOUNTER_DRAFT_MAX_CHARS at the service
23 boundary; longer outputs are truncated with a sentence-aware
24 trim, never rejected (a returned draft must always be usable).
25 4. Deterministic fallback: when no LLM callback is supplied OR the
26 LLM raises, falls back to a neutral template populated from
27 the peer's opt-in vibe_tags. This ensures the encounter
28 feature works on offline / cold-boot machines.
29 5. No PII surface: the draft text only mentions the peer's
30 OPT-IN public-facing fields (vibe_tags, avatar_style). Never
31 references the peer's user_id, real name, location, or any
32 stored memory_graph entry that wasn't tagged shared.
34Constitutional / cultural-wisdom gating happens in the caller (the
35seeded goal's `constitutional_gates: [no_autosend, consent_required,
36trust_quarantine_check, cultural_wisdom_filter]`), not here. This
37module is a pure draft generator — the gate stack wraps it.
38"""
39from __future__ import annotations
41import logging
42from typing import Any, Callable, Optional
44from core.constants import ENCOUNTER_DRAFT_MAX_CHARS
46from .models import DiscoverablePref, Encounter
48logger = logging.getLogger('hevolve_social')
51# ──────────────────────────────────────────────────────────────────────
52# Edge / cloud topology gate.
53#
54# Drafting reads the user's memory_graph + runs LLM inference with
55# their personal context. Allowed unconditionally on user-trusted
56# edge zones (flat = single machine; regional = LAN cluster). On
57# central topology (cloud), drafting is consent-gated rather than
58# prohibited — if the user explicitly opted into cloud-capability
59# for this feature (UserConsent row, consent_type='cloud_capability'
60# with scope='*' or 'encounter_icebreaker', granted=True, revoked_at
61# IS NULL), drafting is allowed; otherwise PermissionError.
62#
63# Callers inject the consent check as a callable so the service
64# stays pure (no DB-shape coupling). encounter_api.icebreaker_draft
65# wires the real UserConsent query; tests pass a deterministic
66# lambda.
67# ──────────────────────────────────────────────────────────────────────
70def _topology() -> str:
71 """Return current node tier, defaulting to 'flat' on any error.
73 Wraps security.key_delegation.get_node_tier so a missing security
74 package (HARTOS minimal-install) doesn't break drafting on the
75 one topology it's most likely deployed in (flat).
76 """
77 try:
78 from security.key_delegation import get_node_tier
79 return get_node_tier()
80 except Exception: # noqa: BLE001
81 return 'flat'
84# ──────────────────────────────────────────────────────────────────────
85# Templates — neutral fallbacks used when no LLM is available.
86#
87# Stored as a tuple so iteration order is stable for the alt_drafts
88# slot, and so dirty mutation by callers doesn't drift the list.
89# ──────────────────────────────────────────────────────────────────────
91_NEUTRAL_TEMPLATES: tuple[str, ...] = (
92 "Hey — nice to actually be across the room from you.",
93 "Hi! Funny how the room got smaller for a second there.",
94 "Hello — I noticed you noticed. Wanted to say hi properly.",
95)
97_VIBE_TEMPLATES: tuple[str, ...] = (
98 "Hey — saw the {tag} thing. Same. Nice to meet you properly.",
99 "Hi! I think we share the {tag} corner of the universe.",
100 "Hello. {tag}, huh? Curious how you got into it.",
101)
104# ──────────────────────────────────────────────────────────────────────
105# Helpers
106# ──────────────────────────────────────────────────────────────────────
109def _trim_to_cap(text: str, cap: int = ENCOUNTER_DRAFT_MAX_CHARS) -> str:
110 """Sentence-aware trim — never returns >cap chars. Prefers ending
111 on a sentence boundary; falls back to a hard cut with an ellipsis
112 if no boundary exists in range."""
113 if not text:
114 return text
115 text = text.strip()
116 if len(text) <= cap:
117 return text
118 # Look for the last sentence boundary inside cap.
119 boundary = -1
120 for sep in ('. ', '? ', '! ', '\n'):
121 idx = text.rfind(sep, 0, cap)
122 if idx > boundary:
123 boundary = idx + len(sep) - 1 # keep the separator char
124 if boundary > 0:
125 return text[: boundary + 1].rstrip()
126 # Hard cut with ellipsis (never exceed cap).
127 return text[: max(0, cap - 1)].rstrip() + '…'
130def _pick_shared_tag(
131 a_tags: list[str], b_tags: list[str],
132) -> Optional[str]:
133 """Pick the single shared vibe tag to anchor the draft. If no
134 overlap, fall back to the peer's first tag (so the draft can
135 still be vibe-flavored). Returns None when neither side has
136 any tags (→ neutral template)."""
137 a_set = {str(t).lower() for t in (a_tags or [])}
138 for t in b_tags or []:
139 if str(t).lower() in a_set:
140 return str(t)
141 if b_tags:
142 return str(b_tags[0])
143 if a_tags:
144 return str(a_tags[0])
145 return None
148def _peer_id_for(match: Encounter, viewer_uid: str) -> Optional[str]:
149 if match.user_a_id == viewer_uid:
150 return match.user_b_id
151 if match.user_b_id == viewer_uid:
152 return match.user_a_id
153 return None
156# ──────────────────────────────────────────────────────────────────────
157# Public API
158# ──────────────────────────────────────────────────────────────────────
161def draft_icebreaker(
162 match_id: str,
163 viewer_user_id: str,
164 db_session,
165 llm_callback: Optional[Callable[[dict], str]] = None,
166 cloud_consent_check: Optional[Callable[[str], bool]] = None,
167 topology: Optional[str] = None,
168) -> dict:
169 """Produce a draft icebreaker for the given match, viewed from the
170 side of `viewer_user_id`.
172 Args:
173 match_id: Encounter.id of a row with context_type='ble'.
174 viewer_user_id: the user requesting the draft (must be one of
175 match.user_a_id / user_b_id).
176 db_session: SQLAlchemy session.
177 llm_callback: optional callable taking a context dict and
178 returning a draft string. When supplied + non-
179 raising, its output is used as the primary draft;
180 its failure is logged and the deterministic
181 template is used instead (NEVER the bare error).
182 cloud_consent_check: optional callable taking a user_id and
183 returning True iff that user has explicitly
184 consented to cloud-capability for this feature
185 (UserConsent row, consent_type='cloud_capability'
186 with scope='*' or 'encounter_icebreaker',
187 granted=True, revoked_at IS NULL).
188 Required when this process is running in
189 central topology — drafting reads memory_graph
190 + runs LLM with personal context, so cloud
191 execution requires explicit per-user opt-in.
192 Ignored on flat / regional (user-trusted edge).
194 Returns:
195 {
196 'draft': str, # primary draft, ≤ ENCOUNTER_DRAFT_MAX_CHARS
197 'rationale': str, # one-line why-this-tag explanation
198 'alt_drafts': list[str],# 2 alternates, also length-capped
199 'length': int, # len(draft)
200 'shared_tag': str|None, # tag the draft was anchored on
201 'source': 'llm'|'template',
202 }
204 Raises:
205 ValueError: when match_id doesn't exist, isn't a BLE match, or
206 viewer_user_id isn't one of the match parties.
207 PermissionError: when running in central topology and the
208 viewer hasn't opted into cloud-capability for
209 this feature (consent-gated, not prohibited).
210 """
211 tier = topology if topology is not None else _topology()
212 if tier == 'central':
213 ok = bool(
214 cloud_consent_check
215 and cloud_consent_check(viewer_user_id)
216 )
217 if not ok:
218 raise PermissionError(
219 "central-topology drafting requires user "
220 "cloud_capability consent for encounter_icebreaker",
221 )
223 match = db_session.query(Encounter).filter_by(
224 id=match_id, context_type='ble',
225 ).first()
226 if match is None:
227 raise ValueError(f"BLE match {match_id} not found")
228 peer_uid = _peer_id_for(match, viewer_user_id)
229 if peer_uid is None:
230 raise ValueError(
231 f"viewer {viewer_user_id} is not a party in match {match_id}",
232 )
234 peer_pref = db_session.query(DiscoverablePref).filter_by(
235 user_id=peer_uid,
236 ).first()
237 viewer_pref = db_session.query(DiscoverablePref).filter_by(
238 user_id=viewer_user_id,
239 ).first()
240 peer_tags = list(peer_pref.vibe_tags or []) if peer_pref else []
241 viewer_tags = list(viewer_pref.vibe_tags or []) if viewer_pref else []
242 shared = _pick_shared_tag(viewer_tags, peer_tags)
244 context = {
245 'match_id': match_id,
246 'peer_user_id': peer_uid,
247 'peer_vibe_tags': peer_tags,
248 'viewer_vibe_tags': viewer_tags,
249 'shared_tag': shared,
250 'avatar_style': peer_pref.avatar_style if peer_pref else None,
251 }
253 primary = None
254 source = 'template'
255 if llm_callback is not None:
256 try:
257 cand = llm_callback(context)
258 if isinstance(cand, str) and cand.strip():
259 primary = _trim_to_cap(cand)
260 source = 'llm'
261 except Exception as exc: # noqa: BLE001
262 logger.warning(
263 'icebreaker llm_callback failed for match=%s: %s',
264 match_id, exc,
265 )
267 if primary is None:
268 primary = _trim_to_cap(_render_template(shared, 0))
270 alt_drafts = [
271 _trim_to_cap(_render_template(shared, i))
272 for i in (1, 2)
273 ]
274 rationale = (
275 f"anchored on shared interest '{shared}'"
276 if shared
277 else 'neutral opener — no shared vibe tags to anchor on'
278 )
280 return {
281 'draft': primary,
282 'rationale': rationale,
283 'alt_drafts': alt_drafts,
284 'length': len(primary),
285 'shared_tag': shared,
286 'source': source,
287 }
290def _render_template(shared_tag: Optional[str], index: int) -> str:
291 """Pick template[index] and substitute the shared tag if present."""
292 if shared_tag:
293 templates = _VIBE_TEMPLATES
294 return templates[index % len(templates)].format(tag=shared_tag)
295 return _NEUTRAL_TEMPLATES[index % len(_NEUTRAL_TEMPLATES)]