Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
152 changes: 146 additions & 6 deletions apps/mobile/app/(tabs)/agent.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import { useApp } from "@/lib/app/app-provider";
import { requireOwnerAuth } from "@/lib/security/owner-auth";
import { loadWallet, type WalletSnapshot } from "@/lib/wallet/wallet";
import { useTransfer } from "@/lib/agent/use-transfer";
import { useAgentChatDemo, useAgentChatLive, type ToolCall } from "@/lib/agent/use-agent-chat";
import { GhostButton, IconButton, PrimaryButton } from "@/ui/buttons";
import { AppIcon } from "@/ui/app-icon";
import { Badge } from "@/ui/badge";
Expand All @@ -35,6 +36,77 @@ function shortenHex(input: string): string {
return `${s.slice(0, 10)}…${s.slice(-6)}`;
}

/** Keys that are actual secrets (always redact) */
const SENSITIVE_SECRET_KEYS = [
"privateKey", "private_key", "secret", "apiKey", "api_key",
"secretKey", "mnemonic", "seed", "password",
];

/** Keys that may contain sensitive data (redact only in secure context) */
const SENSITIVE_KEYS = [
...SENSITIVE_SECRET_KEYS,
"signature", "token",
];

/**
* Sanitize tool call params/result for display
* @param data - The data to sanitize
* @param maxLen - Maximum length to display
* @param secretsOnly - If true, only redact true secrets (privateKey, apiKey, etc.), not wallet data
*/
function sanitizeForDisplay(data: unknown, maxLen = 100, secretsOnly = false): string {
if (data === null || data === undefined) return "";

// If it's a string, check for sensitive patterns
if (typeof data === "string") {
// Truncate long strings
if (data.length > maxLen) {
return data.slice(0, maxLen) + "…";
}
return data;
}

// If it's an object, filter sensitive keys
if (typeof data === "object") {
const obj = data as Record<string, unknown>;
const sanitized: Record<string, unknown> = {};
const keysToCheck = secretsOnly ? SENSITIVE_SECRET_KEYS : SENSITIVE_KEYS;

for (const [key, value] of Object.entries(obj)) {
const lowerKey = key.toLowerCase();
// Use exact match or check if key ends with the sensitive suffix
const isSensitive = keysToCheck.some((sk) =>
lowerKey === sk || lowerKey.endsWith(sk) || lowerKey.endsWith(`_${sk}`)
);

if (isSensitive) {
sanitized[key] = "[redacted]";
} else if (typeof value === "object" && value !== null) {
sanitized[key] = "[object]";
} else {
sanitized[key] = value;
}
}

let str = JSON.stringify(sanitized);
if (str.length > maxLen) {
str = str.slice(0, maxLen) + "…";
}
return str;
Comment thread
omarespejel marked this conversation as resolved.
}

// Fallback
const str = String(data);
return str.length > maxLen ? str.slice(0, maxLen) + "…" : str;
}

/** Type guard for messages with isStreaming property */
function hasIsStreaming(m: unknown): m is { isStreaming: boolean } {
if (typeof m !== "object" || m === null) return false;
const obj = m as Record<string, unknown>;
return typeof obj.isStreaming === "boolean";
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

export default function AgentScreen() {
const t = useAppTheme();
const insets = useSafeAreaInsets();
Expand All @@ -52,6 +124,14 @@ export default function AgentScreen() {
}
}, [isLive]);

// Always call both hooks (React rule: hooks cannot be conditional)
const [demoChatState, demoChatActions] = useAgentChatDemo();
const [liveChatState, liveChatActions] = useAgentChatLive();

// Use appropriate chat based on mode
const chatState = isLive ? liveChatState : demoChatState;
const chatActions = isLive ? liveChatActions : demoChatActions;

const [draft, setDraft] = React.useState("");

const pending = state.agent.proposals.filter((p) => p.status === "pending");
Expand Down Expand Up @@ -302,13 +382,30 @@ export default function AgentScreen() {
<View style={{ gap: 10 }}>
<Row>
<H2>Conversation</H2>
<Muted>{state.agent.messages.length} messages</Muted>
<Muted>{isLive ? chatState.messages.length : state.agent.messages.length} messages</Muted>
</Row>
<View style={{ gap: 10 }}>
{state.agent.messages.map((m) => (
<MessageBubble key={m.id} role={m.role} text={m.text} />
{(isLive ? chatState.messages : state.agent.messages).map((m) => (
<MessageBubble key={m.id} role={m.role} text={m.text} isStreaming={hasIsStreaming(m) ? m.isStreaming : false} />
))}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
</View>

{/* Show error if any */}
{chatState.error && (
<View style={{ padding: 10, borderRadius: t.radius.md, backgroundColor: "rgba(255,69,58,0.10)" }}>
<Muted style={{ color: t.colors.bad }}>{chatState.error}</Muted>
</View>
)}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

{/* Show tool calls in live mode */}
{isLive && chatState.toolCalls.length > 0 && (
<View style={{ gap: 8 }}>
<Muted>Tool Calls</Muted>
{chatState.toolCalls.map((tc) => (
<ToolCallCard key={tc.id} toolCall={tc} />
))}
</View>
)}
</View>
</GlassCard>
</Animated.View>
Expand Down Expand Up @@ -353,8 +450,8 @@ export default function AgentScreen() {
}}
/>
<IconButton
disabled={!draft.trim() || transfer.phase === "preparing" || transfer.phase === "executing"}
tone={draft.trim() && transfer.phase !== "preparing" && transfer.phase !== "executing" ? "accent" : "neutral"}
disabled={!draft.trim() || transfer.phase === "preparing" || transfer.phase === "executing" || chatState.isResponding}
tone={draft.trim() && transfer.phase !== "preparing" && transfer.phase !== "executing" && !chatState.isResponding ? "accent" : "neutral"}
onPress={async () => {
await haptic("tap");
const text = draft.trim();
Expand All @@ -363,6 +460,14 @@ export default function AgentScreen() {
// Live mode: check if it's a transfer request
if (isLive && isTransferRequest(text)) {
await transfer.prepare(text);
} else if (isLive) {
// Live mode: send to LLM chat with error handling
try {
await chatActions.sendMessage(text);
} catch (err) {
await haptic("error");
console.error("Chat send error:", err);
}
Comment thread
omarespejel marked this conversation as resolved.
Comment thread
omarespejel marked this conversation as resolved.
} else {
// Demo mode or non-transfer message
actions.sendAgentMessage(text);
Expand All @@ -378,6 +483,37 @@ export default function AgentScreen() {
);
}

/** Tool call card for live mode - shows tool execution status */
function ToolCallCard(props: { toolCall: ToolCall }) {
const t = useAppTheme();
const { toolCall } = props;

const statusLabel = toolCall.status === "success" ? "✓" : toolCall.status === "error" ? "✗" : "…";

return (
<View style={{
padding: 10,
borderRadius: t.radius.md,
backgroundColor: t.scheme === "dark" ? "rgba(255,255,255,0.05)" : "rgba(255,255,255,0.6)",
borderWidth: 1,
borderColor: t.colors.glassBorder,
}}>
<Row>
<Body style={{ fontFamily: t.font.bodySemibold }}>{toolCall.toolName}</Body>
<Badge label={statusLabel} tone={toolCall.status === "success" ? "good" : toolCall.status === "error" ? "danger" : "warn"} />
</Row>
<Muted style={{ fontSize: 11, marginTop: 4 }}>
{sanitizeForDisplay(toolCall.params, 100, true)}
</Muted>
{toolCall.result && (
<Muted style={{ fontSize: 11, marginTop: 4, color: t.colors.muted }}>
{sanitizeForDisplay(toolCall.result, 100, true)}
</Muted>
)}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
</View>
);
}

function PromptChip(props: { label: string; onPress: () => void }) {
return (
<Chip
Expand Down Expand Up @@ -414,7 +550,7 @@ function ToggleRow(props: { title: string; body: string; value: boolean; onChang
);
}

function MessageBubble(props: { role: "user" | "assistant"; text: string }) {
function MessageBubble(props: { role: "user" | "assistant"; text: string; isStreaming?: boolean }) {
const t = useAppTheme();
const isUser = props.role === "user";
const borderA = isUser
Expand All @@ -432,8 +568,12 @@ function MessageBubble(props: { role: "user" | "assistant"; text: string }) {
: t.scheme === "dark"
? "rgba(255,255,255,0.05)"
: "rgba(255,255,255,0.60)";

return (
<View style={{ alignSelf: isUser ? "flex-end" : "flex-start", maxWidth: "92%" }}>
{props.isStreaming && (
<Muted style={{ fontSize: 10, marginBottom: 2 }}>typing...</Muted>
)}
<LinearGradient
colors={[borderA, borderB]}
start={{ x: 0.1, y: 0.0 }}
Expand Down
92 changes: 81 additions & 11 deletions apps/mobile/lib/agent-runtime/openai-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,29 +21,87 @@ const DEFAULT_MODEL = "gpt-4o-mini";
// SSE line parser
// ---------------------------------------------------------------------------

function parseSseLine(line: string): StreamChunk | null {
if (!line.startsWith("data: ")) return null;
/** Accumulator for tool call arguments - passed as parameter to avoid module-level state */
function parseSseLine(line: string, toolCallBuffers: Map<string, string>): StreamChunk[] {
if (!line.startsWith("data: ")) return [];
const data = line.slice(6).trim();
if (data === "[DONE]") return { type: "done", finishReason: "stop" };
if (data === "[DONE]") return [{ type: "done", finishReason: "stop" }];

try {
const json = JSON.parse(data);
const delta = json?.choices?.[0]?.delta;
const finishReason = json?.choices?.[0]?.finish_reason;

if (finishReason === "stop" || finishReason === "length") {
return { type: "done", finishReason };
return [{ type: "done", finishReason }];
}

const text = delta?.content;
if (typeof text === "string" && text.length > 0) {
return { type: "delta", text };
return [{ type: "delta", text }];
}

// Check for tool calls
const toolCalls = delta?.tool_calls;
if (Array.isArray(toolCalls) && toolCalls.length > 0) {
const results: StreamChunk[] = [];

for (const tc of toolCalls) {
if (tc?.id && tc?.function?.name) {
const tcId = tc.id;
const funcName = tc.function.name;
const newArgsFragment = tc.function.arguments || "";

// Get or create accumulator for this tool call ID
let buffer = toolCallBuffers.get(tcId);
if (!buffer) {
buffer = "";
toolCallBuffers.set(tcId, buffer);
}

// Append the new fragment
buffer += newArgsFragment;
toolCallBuffers.set(tcId, buffer);

// Try to parse accumulated arguments
let args = {};
if (buffer) {
try {
// Only parse if it looks like complete JSON
if (buffer.startsWith("{") && buffer.endsWith("}")) {
args = JSON.parse(buffer);
// Clear buffer after successful parse
toolCallBuffers.delete(tcId);
}
} catch {
// Partial arguments - will be completed in next chunk
}
}

// Only emit if we have valid args
if (Object.keys(args).length > 0 || !buffer) {
results.push({
Comment thread
omarespejel marked this conversation as resolved.
Outdated
type: "tool_call",
toolCall: {
id: tcId,
name: funcName,
arguments: args,
},
});
}
}
}
Comment thread
omarespejel marked this conversation as resolved.

// Return all tool calls instead of just the first one
if (results.length > 0) {
return results;
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
} catch {
// Malformed JSON — skip.
}

return null;
return [];
}

// ---------------------------------------------------------------------------
Expand All @@ -65,6 +123,9 @@ async function* streamSse(
const abortOnParent = () => controller.abort();
combinedSignal.addEventListener("abort", abortOnParent);

// Create fresh buffer for this stream - avoids shared state between calls
const toolCallBuffers = new Map<string, string>();

Comment thread
coderabbitai[bot] marked this conversation as resolved.
try {
const res = await fetch(url, {
method: "POST",
Expand Down Expand Up @@ -103,23 +164,31 @@ async function* streamSse(
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
const chunk = parseSseLine(trimmed);
if (chunk) yield chunk;
if (chunk?.type === "done") return;
const chunks = parseSseLine(trimmed, toolCallBuffers);
for (const chunk of chunks) {
yield chunk;
if (chunk.type === "done") {
toolCallBuffers.clear();
return;
}
}
}
}

// Process any remaining buffer.
if (buffer.trim()) {
const chunk = parseSseLine(buffer.trim());
if (chunk) yield chunk;
const chunks = parseSseLine(buffer.trim(), toolCallBuffers);
for (const chunk of chunks) {
yield chunk;
}
}

// If we never received [DONE], emit one.
yield { type: "done", finishReason: "stop" };
} finally {
clearTimeout(timeoutId);
combinedSignal.removeEventListener("abort", abortOnParent);
toolCallBuffers.clear();
}
}

Expand Down Expand Up @@ -173,6 +242,7 @@ export function createOpenAiProvider(apiKey: string): LlmProvider {
};
if (opts.maxTokens != null) body.max_tokens = opts.maxTokens;
if (opts.temperature != null) body.temperature = opts.temperature;
if (opts.tools != null) body.tools = opts.tools;

const generator = streamSse(
`${OPENAI_BASE}/chat/completions`,
Expand Down
Loading
Loading