Deploy Langfuse with your application across different platforms. Use when deploying Langfuse to Vercel, AWS, GCP, or Docker, or integrating Langfuse into your deployment pipeline. Trigger with phrases like "deploy langfuse", "langfuse Vercel", "langfuse AWS", "langfuse Docker", "langfuse production deploy".
Deploy Langfuse observability with your application across various platforms.
# Set environment variables
vercel env add LANGFUSE_PUBLIC_KEY production
vercel env add LANGFUSE_SECRET_KEY production
vercel env add LANGFUSE_HOST production
// vercel.json
{
"env": {
"LANGFUSE_HOST": "https://cloud.langfuse.com"
}
}
// Next.js API route with Langfuse
// app/api/chat/route.ts
import { Langfuse } from "langfuse";
import { NextResponse } from "next/server";
// Initialize outside handler for connection reuse
const langfuse = new Langfuse({
publicKey: process.env.LANGFUSE_PUBLIC_KEY!,
secretKey: process.env.LANGFUSE_SECRET_KEY!,
flushAt: 1, // Flush immediately in serverless
});
export async function POST(request: Request) {
const { message } = await request.json();
const trace = langfuse.trace({
name: "api/chat",
input: { message },
metadata: {
platform: "vercel",
region: process.env.VERCEL_REGION,
},
});
try {
const response = await processChat(message, trace);
trace.update({ output: response });
// Flush before response in serverless
await langfuse.flushAsync();
return NextResponse.json(response);
} catch (error) {
trace.update({ level: "ERROR", statusMessage: String(error) });
await langfuse.flushAsync();
throw error;
}
}
// handler.ts
import { Langfuse } from "langfuse";
import { APIGatewayEvent, Context } from "aws-lambda";
// Initialize outside handler for warm start reuse
const langfuse = new Langfuse({
publicKey: process.env.LANGFUSE_PUBLIC_KEY!,
secretKey: process.env.LANGFUSE_SECRET_KEY!,
flushAt: 1, // Immediate flush for Lambda
});
export async function handler(event: APIGatewayEvent, context: Context) {
// Don't wait for event loop to empty
context.callbackWaitsForEmptyEventLoop = false;
const trace = langfuse.trace({
name: "lambda/handler",
input: JSON.parse(event.body || "{}"),
metadata: {
requestId: context.awsRequestId,
functionName: context.functionName,
coldStart: !global.isWarm,
},
});
global.isWarm = true;
try {
const result = await processRequest(event, trace);
trace.update({ output: result });
// CRITICAL: Flush before Lambda freezes
await langfuse.flushAsync();
return {
statusCode: 200,
body: JSON.stringify(result),
};
} catch (error) {
trace.update({ level: "ERROR", statusMessage: String(error) });
await langfuse.flushAsync();
return {
statusCode: 500,
body: JSON.stringify({ error: "Internal error" }),
};
}
}
# serverless.yml