Memory is Ziet’s built-in durable storage system that persists data across actions, agent steps, and even failures. It provides:
Durable storage - Data persists even if agent steps fail
Key-value storage - Store and retrieve by unique keys
Semantic search - Find data by meaning, not just exact matches
Metadata filtering - Tag and filter entries
Automatic scoping - Data is isolated per run by default
Cross-agent sharing - Agents can hand off and share memory
Important: Memory is durable and survives failures. If an agent step fails, memory is preserved and the workflow can resume from where it left off.No database setup required - just import and use.
from ziet import memory# Store data (key comes first)memory.add(key="best_flight", value={"price": 350})# Retrieve by keybest = memory.get("best_flight") # Returns {"price": 350}# Search by meaningresults = memory.search("cheapest flight option")# Remove when donememory.remove(key="best_flight")
from ziet import Action, memory@Action( id="fetch_user", name="Fetch User", description="Fetch user data from database")def fetch_user(user_id: str) -> None: user = database.get_user(user_id) # Store for later use memory.add(key=f"user_{user_id}", value=user)@Action( id="send_notification", name="Send Notification", description="Send notification email")def send_notification(user_id: str, message: str) -> None: # Retrieve cached user data user = memory.get(f"user_{user_id}") if not user: # Fetch if not in memory fetch_user(user_id) user = memory.get(f"user_{user_id}") result = send_email(user["email"], message) memory.add(key="notification_sent", value=result)
from ziet import Action, memory@Action( id="process_data", name="Process Data", description="Process data with temporary storage")def process_data(data: dict) -> None: # Store temporary working data entry_id = memory.add(key="temp_working_data", value=data) # Process result = expensive_computation(data) # Store final result memory.add(key="final_result", value=result) # Clean up temporary data memory.remove(entry_id=entry_id)
from ziet import Agent, Action, memory@Action( id="risky_operation", name="Risky Operation", description="Operation that might fail")def risky_operation() -> None: result = dangerous_computation() memory.add(key="operation_result", value=result)@Agent( id="resilient_agent", name="ResilientAgent", instructions=""" 1. Store progress in memory with key 'progress' 2. Execute risky_operation action 3. If it fails, memory is preserved and workflow can retry from last step Memory is durable - even if operations fail, stored data persists. """, actions=["risky_operation"])class ResilientAgent: pass# Even if risky_operation fails, memory.get("progress") is still available
from ziet import Action, Agent, memoryfrom ziet.integrations import google, apify, openai@Action( id="search_flights", name="Search Flights", description="Search Google for flights")def search_flights(origin: str, dest: str) -> None: results = google.search(f"flights {origin} to {dest}") memory.add(key="search_results", value=results)@Action( id="scrape_prices", name="Scrape Prices", description="Scrape prices from flight websites")def scrape_prices() -> None: search_results = memory.get("search_results") for url in search_results[:3]: # Top 3 results data = apify.scrape(url) memory.add( key=f"scraped_{url}", value=data, metadata={"type": "scrape"} )@Action( id="summarize_options", name="Summarize Options", description="Generate AI summary of flight options")def summarize_options() -> None: # Get data from memory search_results = memory.get("search_results") all_scrapes = memory.search(metadata={"type": "scrape"}) scraped_data = [entry["value"] for entry in all_scrapes] # Generate summary prompt = f"Summarize: {search_results}, {scraped_data}" summary = openai.chat([{"role": "user", "content": prompt}]) memory.add(key="final_summary", value=summary)@Agent( id="flight_agent", name="FlightAgent", instructions=""" 1. Use search_flights action to search for flights 2. Use scrape_prices action to get detailed pricing 3. Use summarize_options action to generate summary All data is stored in memory and can be retrieved: - 'search_results': raw search results - 'scraped_*': scraped data for each URL - 'final_summary': AI-generated summary """, actions=["search_flights", "scrape_prices", "summarize_options"])class FlightAgent: pass