branch.messages is a Pile[RoledMessage] containing all messages in the conversation. Access it to review what has been sent and received.
# Number of messageslen(branch.messages)# Iterate all messagesformsginbranch.messages:print(f"[{msg.role}] {msg.content[:100]}")# Get the last messagelast=branch.messages[-1]# Access by UUIDmsg=branch.messages[some_uuid]
fromlionagi.protocols.messagesimport(System,# system promptInstruction,# user instructionAssistantResponse,# LLM responseActionRequest,# tool call from LLMActionResponse,# tool result back to LLM)# Check message typefromlionagi.protocols.messagesimportAssistantResponseifisinstance(branch.messages[-1],AssistantResponse):response=branch.messages[-1]print(response.content)# response textprint(response.model_response)# raw provider response dict
branch.logs is a Pile[Log] containing API call logs, tool invocations, and other activity records.
# Number of log entrieslen(branch.logs)# Iterate logsforloginbranch.logs:print(log.content)# dict with event details# Dump logs to filebranch.dump_logs(persist_path="./debug_logs.json",clear=False)# Async versionawaitbranch.adump_logs(persist_path="./debug_logs.json",clear=False)
# Save branch state to dictstate=branch.to_dict()# state contains: messages, logs, chat_model, parse_model, system, log_config, metadata# Restore from dictrestored=Branch.from_dict(state)# restored has the same messages, models, and configuration# Save to JSON fileimportjsonwithopen("branch_state.json","w")asf:json.dump(state,f,default=str)# Load from JSON filewithopen("branch_state.json")asf:data=json.load(f)restored=Branch.from_dict(data)
clone() creates a new Branch with the same messages, system prompt, tools, and model configuration. Use it to explore alternative conversation paths without affecting the original.
# Synchronous clonealt_branch=branch.clone()# Async clone (acquires message lock)alt_branch=awaitbranch.aclone()# Clone with a specific sender IDalt_branch=branch.clone(sender=some_id)
# Original conversationawaitbranch.communicate("Analyze this code for bugs")# Fork and try a different approachalt=branch.clone()awaitalt.communicate("Now focus specifically on security vulnerabilities")# Compare resultsoriginal_response=branch.messages[-1].contentalternative_response=alt.messages[-1].content
session=Session()session.include_branches([branch])# Split creates a clone and adds it to the sessionforked=session.split(branch)# forked is now managed by the session alongside the original
# Get the full message sequence that was sentformsginbranch.messages:print(f"Role: {msg.role}")print(f"Content: {msg.content}")print(f"Created: {msg.created_at}")print("---")
# Logs contain raw API call informationforloginbranch.logs:content=log.contentifisinstance(content,dict):# Check for API payloadif"payload"incontent:print(f"Request: {content['payload']}")# Check for responseif"response"incontent:print(f"Response: {content['response']}")
fromlionagiimportiModel# Upgrade to a more capable model for complex tasksbranch.chat_model=iModel(provider="openai",model="gpt-4.1")# Use a faster model for simple follow-upsbranch.chat_model=iModel(provider="openai",model="gpt-4.1-mini")
# Expensive model for conversation, cheap model for parsingbranch=Branch(chat_model=iModel(provider="anthropic",model="claude-sonnet-4-20250514"),parse_model=iModel(provider="openai",model="gpt-4.1-mini"),)
# Use a specific model for just this callresult=awaitbranch.communicate("Complex analysis requiring high capability",chat_model=iModel(provider="openai",model="gpt-4.1"),)