// Create a simple chatflow with text input and LLM response
async function createSimpleChatbot(name, category, apiKey) {
const chatflowData = {
name,
category,
isPublic: true,
nodes: [
{
id: "textInput",
type: "textInput",
position: { x: 100, y: 200 },
data: { text: "Hello! How can I help you today?" }
},
{
id: "llmChain",
type: "llmChain",
position: { x: 400, y: 200 },
data: {
model: "gpt-3.5-turbo",
apiKey: apiKey,
temperature: 0.7
}
}
],
edges: [
{
id: "edge-1",
source: "textInput",
target: "llmChain",
sourceHandle: "output",
targetHandle: "input"
}
]
};
return await client.flowise.createChatflow(chatflowData);
}
// Update chatflow with a new node
async function addMemoryToExistingChatflow(chatflowId) {
// Get existing chatflow
const chatflow = await client.flowise.getChatflow(chatflowId);
// Add memory node
const memoryNode = {
id: "memoryNode",
type: "bufferMemory",
position: { x: 250, y: 300 },
data: { memoryKey: "chat_history" }
};
// Add new edges
const newEdges = [
{
id: "edge-to-memory",
source: "textInput",
target: "memoryNode",
sourceHandle: "output",
targetHandle: "input"
},
{
id: "edge-from-memory",
source: "memoryNode",
target: "llmChain",
sourceHandle: "output",
targetHandle: "memory"
}
];
// Update chatflow
chatflow.nodes.push(memoryNode);
chatflow.edges = [...chatflow.edges, ...newEdges];
return await client.flowise.updateChatflow(chatflowId, chatflow);
}