Summarization workflow
This example demonstrates how to create a workflow that summarizes text input with parallel processing.
Summarization workflow example
Simple Agent Workflow Example
import 'dotenv/config';
import { Langbase, Workflow } from 'langbase';
async function processText({ input }: { input: string }) {
// Initialize Langbase
const langbase = new Langbase({
apiKey: process.env.LANGBASE_API_KEY!,
});
// Create workflow with debug mode
const workflow = new Workflow({
debug: true
});
try {
// Define a single step with retries
const response = await workflow.step({
id: 'process_text',
retries: {
limit: 2,
delay: 1000,
backoff: 'exponential'
},
run: async () => {
const response = await langbase.agent.run({
model: 'openai:gpt-4o',
instructions: `Summarize the following text in a
single paragraph. Be concise but capture the key information.`,
apiKey: process.env.LLM_API_KEY!,
input: [{ role: 'user', content: input }],
stream: false
});
return response.output;
}
});
// Return the result
return {
response
};
} catch (error) {
console.error('Workflow step failed:', error);
throw error;
}
}
async function main() {
const sampleText = `
Langbase is the most powerful serverless AI platform for building AI agents with memory.
Build, deploy, and scale AI agents with tools and memory (RAG). Simple AI primitives
with a world-class developer experience without using any frameworks.
Compared to complex AI frameworks, Langbase is serverless and the first composable
AI platform. Build AI agents without any bloated frameworks. You write the logic,
we handle the logistics.
Langbase offers AI Pipes (serverless agents with tools), AI Memory (serverless RAG),
and AI Studio (developer platform). The platform is 30-50x less expensive than
competitors, supports 250+ LLM models, and enables collaboration among team members.
`;
const results = await processText({ input: sampleText });
console.log(JSON.stringify(results, null, 2));
}
main();