Lightweight local SQLite database for agent state, caching, and small datasets. Zero-config, file-based, works everywhere. Use this skill when the user needs a quick local database, key-value storage, caching, queuing, or lightweight persistence. Also trigger for "sqlite", "local database", "cache", "key-value store", "lightweight storage", or when a task needs simple persistence without a full database server.
Lightweight local SQLite database for caching, state management, and data persistence. Zero-config, file-based, works everywhere.
Use for:
NOT for:
db = create_database("cache.db")
Creates cache.db in current directory. File-based, no server needed.
# Store value
set_key(db, "user_123_prefs", {"theme": "dark", "lang": "en"})
# Retrieve value
prefs = get_key(db, "user_123_prefs")
# Delete value
delete_key(db, "user_123_prefs")
# Check if key exists
exists(db, "user_123_prefs")
# Execute SQL
results = query(db, "SELECT * FROM cache WHERE key LIKE ?", ["%user_%"])
# Execute with no results
execute(db, "DELETE FROM cache WHERE created < ?", [time.time() - 86400])
# Get single row
row = query_one(db, "SELECT * FROM cache WHERE key = ?", ["settings"])
Table: cache
- key (TEXT, PRIMARY KEY)
- value (JSON)
- created (UNIX timestamp)
- expires (UNIX timestamp, optional)
Use for: Settings, preferences, user data, cached API responses.
db = create_database("cache.db", schema="key-value")
set_key(db, "api_response_weather", {"temp": 72, "condition": "sunny"})
Table: metrics
- timestamp (UNIX timestamp)
- metric_name (TEXT)
- value (FLOAT)
- tags (JSON)
Use for: Performance metrics, event logs, sensor data.
db = create_database("metrics.db", schema="time-series")
insert_metric(db, "cpu_usage", 65.2, tags={"host": "server1"})
# Query last hour
recent = query_metrics_since(db, time.time() - 3600)
Table: queue
- id (INTEGER, PRIMARY KEY)
- task (JSON)
- status (TEXT: pending, processing, done, failed)
- created (UNIX timestamp)
- retry_count (INTEGER)
Use for: Task queues, job processing, async work.
db = create_database("jobs.db", schema="queue")
# Enqueue task
enqueue(db, {"type": "email", "to": "[email protected]"})
# Get next pending task
task = dequeue(db)
# Mark done
mark_done(db, task["id"])
Table: cache_ttl
- key (TEXT, PRIMARY KEY)
- value (JSON)
- created (UNIX timestamp)
- ttl_seconds (INTEGER)
- expires_at (UNIX timestamp, computed)
Use for: Session data, temporary results, cache with auto-expiry.
db = create_database("sessions.db", schema="cache-ttl")
# Store with 1-hour expiry
cache_with_ttl(db, "session_abc", {"user_id": 123}, ttl_seconds=3600)
# Expired entries are automatically ignored
session = get_key(db, "session_abc") # Returns None if expired
db = create_database("agent_memory.db", schema="key-value")
# Store conversation context
set_key(db, "conversation_state", {
"user_id": 123,
"step": 2,
"data": {"name": "Alice", "email": "[email protected]"}
})
# Retrieve later
state = get_key(db, "conversation_state")
print(f"User: {state['data']['name']}")
db = create_database("perf.db", schema="time-series")
import time
# Log metrics periodically
for i in range(10):
insert_metric(db, "request_time_ms", 150 + i*5)
time.sleep(1)
# Analyze
avg = query(db, "SELECT AVG(value) as avg FROM metrics WHERE metric_name = ?", ["request_time_ms"])
print(f"Average response: {avg[0]['avg']:.2f}ms")
db = create_database("tasks.db", schema="queue")
# Add tasks
enqueue(db, {"type": "scrape", "url": "example.com/page1"})
enqueue(db, {"type": "scrape", "url": "example.com/page2"})
# Process
while True:
task = dequeue(db)
if not task:
break
try:
result = scrape_url(task["url"])
mark_done(db, task["id"])
except Exception as e:
mark_failed(db, task["id"], error=str(e))
db = create_database("api_cache.db", schema="cache-ttl")
def get_weather(city):
cache_key = f"weather_{city}"
# Check cache first
cached = get_key(db, cache_key)
if cached:
return cached
# Fetch from API
response = fetch_weather_api(city)
# Cache for 1 hour
cache_with_ttl(db, cache_key, response, ttl_seconds=3600)
return response
# First call: fetches from API
print(get_weather("NYC"))
# Second call (within 1 hour): returns cached
print(get_weather("NYC"))
Enabled by default for better performance:
db = create_database("data.db", wal_mode=True)
Benefits:
size = get_database_size(db)
print(f"Database: {size / 1024:.2f} KB")
clear_cache(db) # Delete all key-value entries
export_to_json(db, "backup.json")
import_from_json(db, "backup.json")
vacuum_database(db) # Reduces file size
Pre-built indexes for common queries:
# Time-series queries are indexed on timestamp
db = create_database("metrics.db", schema="time-series")
# These are fast:
query_metrics_since(db, start_timestamp)
query_metrics_range(db, start, end)
Use sqlite-quick for:
Upgrade to postgres-advanced when: