Browse Source

fix(agent): lazy-load LangSmith to avoid build failure

Use dynamic import() for langsmith so API build does not depend on
resolving langsmith/experimental/vercel at compile time. Tracing still
works at runtime when LANGSMITH_API_KEY is set.

Co-authored-by: Cursor <cursoragent@cursor.com>
pull/6386/head
Yash Kuceriya 1 month ago
parent
commit
9389a97504
  1. 62
      apps/api/src/app/endpoints/agent/agent.service.ts

62
apps/api/src/app/endpoints/agent/agent.service.ts

@ -13,11 +13,6 @@ import { Injectable, Logger } from '@nestjs/common';
import { createOpenRouter } from '@openrouter/ai-sdk-provider';
import * as ai from 'ai';
import { generateText, tool } from 'ai';
import { Client } from 'langsmith';
import {
createLangSmithProviderOptions,
wrapAISDK
} from 'langsmith/experimental/vercel';
import { randomUUID } from 'crypto';
import { z } from 'zod';
@ -39,6 +34,39 @@ function ensureLangSmithEnv(): string | null {
return key;
}
/** Lazy-load LangSmith tracing when key is set; avoids hard build dependency. */
async function getTracedGenerateText(metadata: Record<string, unknown>): Promise<{
generateTextFn: typeof generateText;
flush?: () => Promise<void>;
}> {
const key = ensureLangSmithEnv();
if (!key) return { generateTextFn: generateText };
try {
const { Client } = await import('langsmith');
const { wrapAISDK, createLangSmithProviderOptions } = await import(
'langsmith/experimental/vercel'
);
const client = new Client();
const traced = wrapAISDK(ai, { client });
const generateTextFn = (opts: Parameters<typeof generateText>[0]) =>
traced.generateText({
...opts,
providerOptions: {
...(opts as any).providerOptions,
langsmith: createLangSmithProviderOptions({
name: 'Ghostfolio Agent',
tags: ['ghostfolio', 'agent'],
metadata
})
}
});
const flush = () => (client as any).awaitPendingTraceBatches?.() ?? Promise.resolve();
return { generateTextFn, flush };
} catch {
return { generateTextFn: generateText };
}
}
export interface AgentChatMessage {
role: 'user' | 'assistant' | 'system';
content: string;
@ -302,13 +330,8 @@ export class AgentService {
content: m.content
}));
// Optional LangSmith tracing (same pattern as Collabboard)
const hasLangSmith = !!ensureLangSmithEnv();
const langsmithClient = hasLangSmith ? new Client() : null;
const tracedAi = langsmithClient
? wrapAISDK(ai, { client: langsmithClient })
: null;
const generateTextFn = tracedAi?.generateText ?? generateText;
// Optional LangSmith tracing (lazy-loaded to avoid build failures)
const { generateTextFn, flush } = await getTracedGenerateText({ traceId });
const llmT0 = Date.now();
const { text, usage } = await generateTextFn({
@ -316,20 +339,9 @@ export class AgentService {
system: systemPrompt,
messages: coreMessages,
tools,
maxSteps: 5,
...(langsmithClient && {
providerOptions: {
langsmith: createLangSmithProviderOptions({
name: 'Ghostfolio Agent',
tags: ['ghostfolio', 'agent'],
metadata: { traceId }
})
}
})
maxSteps: 5
});
if (langsmithClient) {
await langsmithClient.awaitPendingTraceBatches?.();
}
await flush?.();
const llmMs = Date.now() - llmT0;
const { content, verification } = verifyAgentOutput(text);

Loading…
Cancel
Save