SQLite database patterns for Cubby — migrations, queries, transactions, FTS5 search, dynamic updates
Use when adding or modifying database code in backend/internal/db/.
db, err := sql.Open("sqlite", path+"?_journal_mode=WAL&_busy_timeout=5000&_foreign_keys=ON")
Always enable: WAL mode, busy timeout, foreign keys.
Embedded SQL files in internal/db/migrations/. Naming: NNN_description.up.sql / .down.sql.
-- 002_add_labels.up.sql
CREATE TABLE IF NOT EXISTS labels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
color TEXT NOT NULL DEFAULT '#6b7280',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
Register new migrations in :
db.gomigrations := []string{"001_initial", "002_add_labels"}
All queries live on the Queries struct. Follow the verb convention:
| Method | Signature |
|---|---|
| List | func (q *Queries) ListThings() ([]models.Thing, error) |
| Get | func (q *Queries) GetThing(id int64) (*models.Thing, error) |
| Create | func (q *Queries) CreateThing(req models.CreateThingRequest) (*models.Thing, error) |
| Update | func (q *Queries) UpdateThing(id int64, req models.UpdateThingRequest) (*models.Thing, error) |
| Delete | func (q *Queries) DeleteThing(id int64) error |
Return nil, nil — not found is not an error:
if err == sql.ErrNoRows {
return nil, nil
}
tx, err := q.db.Begin()
if err != nil {
return fmt.Errorf("begin tx: %w", err)
}
defer tx.Rollback()
// ... operations on tx ...
return tx.Commit()
sets := []string{}
args := []any{}
if req.Name != nil {
sets = append(sets, "name = ?")
args = append(args, *req.Name)
}
if len(sets) == 0 {
return q.GetThing(id) // no changes
}
sets = append(sets, "updated_at = ?")
args = append(args, time.Now().UTC())
args = append(args, id)
query := fmt.Sprintf("UPDATE things SET %s WHERE id = ?", strings.Join(sets, ", "))
When adding a searchable entity, create an FTS5 virtual table with triggers:
CREATE VIRTUAL TABLE IF NOT EXISTS things_fts USING fts5(
name, description,
content='things',
content_rowid='id'
);
-- Keep FTS in sync
CREATE TRIGGER things_ai AFTER INSERT ON things BEGIN
INSERT INTO things_fts(rowid, name, description) VALUES (new.id, new.name, new.description);
END;
CREATE TRIGGER things_ad AFTER DELETE ON things BEGIN
INSERT INTO things_fts(things_fts, rowid, name, description) VALUES ('delete', old.id, old.name, old.description);
END;
CREATE TRIGGER things_au AFTER UPDATE ON things BEGIN
INSERT INTO things_fts(things_fts, rowid, name, description) VALUES ('delete', old.id, old.name, old.description);
INSERT INTO things_fts(rowid, name, description) VALUES (new.id, new.name, new.description);
END;
Always defer rows.Close() immediately. Check rows.Err() after the loop:
rows, err := q.db.Query(query, args...)
if err != nil {
return nil, fmt.Errorf("list things: %w", err)
}
defer rows.Close()
var things []models.Thing
for rows.Next() {
var t models.Thing
if err := rows.Scan(&t.ID, &t.Name, ...); err != nil {
return nil, fmt.Errorf("scan thing: %w", err)
}
things = append(things, t)
}
return things, rows.Err()
* in SELECT — always list columns explicitlyinterface{} for args — use []anyrows.Err()