Run Pipe Agent Chat with LLM API Keys

This example demonstrates how to run a pipe agent chat with LLM API keys.


Run Pipe Agent Chat with LLM API Keys Example

Run Pipe Agent Chat with LLM API Keys Example

import 'dotenv/config'; import {getRunner, Langbase} from 'langbase'; const langbase = new Langbase({ apiKey: process.env.LANGBASE_API_KEY! }); async function main() { await createSummaryAgent(); // Get readable stream const {stream, threadId, rawResponse} = await langbase.pipes.run({ stream: true, name: 'summary-agent', rawResponse: true, messages: [ { role: 'user', content: 'Who is an AI Engineer?' } ], llmKey: process.env.LLM_KEY!, // Your LLM API key }); // Convert the stream to a stream runner. const runner = getRunner(stream); // Method 1: Using event listeners runner.on('connect', () => { console.log('Stream started.\n'); }); runner.on('content', content => { process.stdout.write(content); }); runner.on('end', () => { console.log('\nStream ended.'); }); runner.on('error', error => { console.error('Error:', error); }); } /** * Creates a summary agent pipe if it doesn't already exist. * * This function checks if a pipe with the name 'summary-agent' exists in the system. * If the pipe doesn't exist, it creates a new private pipe with a system message * configuring it as a helpful assistant. * * @async * @returns {Promise<void>} A promise that resolves when the operation is complete * @throws {Error} Logs any errors encountered during the creation process */ async function createSummaryAgent() { try { await langbase.pipes.create({ name: 'summary-agent', upsert: true, status: 'private', messages: [ { role: 'system', content: 'You are a helpful assistant that help users summarize text.', }, ], }); } catch (error) { console.error('Error creating summary agent:', error); } } main();