Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/sample-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"run:gemini": "npm run build && node dist/src/vertexai/gemini.js",
"run:palm2": "npm run build && node dist/src/vertexai/palm2.js",
"run:decorators": "npm run build && node dist/src/sample_decorators.js",
"run:associations": "npm run build && node dist/src/sample_associations.js",
"run:with": "npm run build && node dist/src/sample_with.js",
"run:prompt_mgmt": "npm run build && node dist/src/sample_prompt_mgmt.js",
"run:vercel": "npm run build && node dist/src/sample_vercel_ai.js",
Expand Down Expand Up @@ -43,6 +44,7 @@
"run:mcp": "npm run build && node dist/src/sample_mcp.js",
"run:mcp:real": "npm run build && node dist/src/sample_mcp_real.js",
"run:mcp:working": "npm run build && node dist/src/sample_mcp_working.js",
"run:chatbot": "npm run build && node dist/src/sample_chatbot_interactive.js",
"dev:image_generation": "pnpm --filter @traceloop/instrumentation-openai build && pnpm --filter @traceloop/node-server-sdk build && npm run build && node dist/src/sample_openai_image_generation.js",
"lint": "eslint .",
"lint:fix": "eslint . --fix"
Expand Down
178 changes: 178 additions & 0 deletions packages/sample-app/src/sample_associations.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
import * as traceloop from "@traceloop/node-server-sdk";
import OpenAI from "openai";

// Initialize Traceloop
traceloop.initialize({
appName: "associations_demo",
apiKey: process.env.TRACELOOP_API_KEY,
disableBatch: true,
});

const openai = new OpenAI();

/**
* Sample chatbot that demonstrates the Associations API.
* This example shows how to track conversations, users, and sessions
* across multiple LLM interactions.
*/
class ChatbotWithAssociations {
constructor(
private conversationId: string,
private userId: string,
private sessionId: string,
) {}

/**
* Process a multi-turn conversation with associations
*/
@traceloop.workflow({ name: "chatbot_conversation" })
async handleConversation() {
console.log("\n=== Starting Chatbot Conversation ===");
console.log(`Conversation ID: ${this.conversationId}`);
console.log(`User ID: ${this.userId}`);
console.log(`Session ID: ${this.sessionId}\n`);

// Set standard associations at the beginning of the conversation
// These will be automatically attached to all spans within this context
traceloop.Associations.set([
[traceloop.AssociationProperty.CONVERSATION_ID, this.conversationId],
[traceloop.AssociationProperty.USER_ID, this.userId],
[traceloop.AssociationProperty.SESSION_ID, this.sessionId],
]);

// Use withAssociationProperties to add custom properties
// Custom properties (like chat_subject) will be prefixed with traceloop.association.properties
return traceloop.withAssociationProperties(
{ chat_subject: "general" },
async () => {
// First message
const greeting = await this.sendMessage(
"Hello! What's the weather like today?",
);
console.log(`Bot: ${greeting}\n`);

// Second message in the same conversation
const followup = await this.sendMessage(
"What should I wear for that weather?",
);
console.log(`Bot: ${followup}\n`);

// Third message
const final = await this.sendMessage("Thanks for the advice!");
console.log(`Bot: ${final}\n`);

return {
greeting,
followup,
final,
};
},
);
}

/**
* Send a single message - this is a task within the workflow
*/
@traceloop.task({ name: "send_message" })
private async sendMessage(userMessage: string): Promise<string> {
console.log(`User: ${userMessage}`);

const completion = await openai.chat.completions.create({
messages: [{ role: "user", content: userMessage }],
model: "gpt-3.5-turbo",
});

return completion.choices[0].message.content || "No response";
}
}

/**
* Simulate a customer service scenario with multiple customers
*/
async function customerServiceDemo() {
return traceloop.withWorkflow(
{ name: "customer_service_scenario" },
async () => {
console.log("\n=== Customer Service Scenario ===\n");

// Customer 1
traceloop.Associations.set([
[traceloop.AssociationProperty.CUSTOMER_ID, "cust-001"],
[traceloop.AssociationProperty.USER_ID, "agent-alice"],
]);

const customer1Response = await openai.chat.completions.create({
messages: [
{
role: "user",
content: "I need help with my order #12345",
},
],
model: "gpt-3.5-turbo",
});

console.log("Customer 1 (cust-001):");
console.log(
`Response: ${customer1Response.choices[0].message.content}\n`,
);

// Customer 2 - Update associations for new customer
traceloop.Associations.set([
[traceloop.AssociationProperty.CUSTOMER_ID, "cust-002"],
[traceloop.AssociationProperty.USER_ID, "agent-bob"],
]);

const customer2Response = await openai.chat.completions.create({
messages: [
{
role: "user",
content: "How do I return an item?",
},
],
model: "gpt-3.5-turbo",
});

console.log("Customer 2 (cust-002):");
console.log(
`Response: ${customer2Response.choices[0].message.content}\n`,
);
},
);
}

/**
* Main execution
*/
async function main() {
console.log("============================================");
console.log("Traceloop Associations API Demo");
console.log("============================================");

try {
// Example 1: Multi-turn chatbot conversation with custom properties
const chatbot = new ChatbotWithAssociations(
"conv-abc-123", // conversation_id
"user-alice-456", // user_id
"session-xyz-789", // session_id
);

await chatbot.handleConversation();

// Example 2: Customer service with multiple customers
await customerServiceDemo();

console.log("\n=== Demo Complete ===");
console.log(
"Check your Traceloop dashboard to see the associations attached to traces!",
);
console.log(
"You can filter and search by conversation_id, user_id, session_id, customer_id, or custom properties like chat_subject.",
);
} catch (error) {
console.error("Error running demo:", error);
process.exit(1);
}
}

// Run the demo
main();
Loading
Loading