Use this skill when the user asks about "Firestore", "database queries", "indexes", "batch operations", "pagination", "TTL", "write limits", or any Firestore-related work. Provides Firestore query optimization, indexing, and best practices.
// ❌ BAD: Fetch all, filter in JS
const all = await customersRef.get();
const active = all.docs.filter(d => d.data().status === 'active');
// ✅ GOOD: Filter in query
const active = await customersRef
.where('status', '==', 'active')
.where('shopId', '==', shopId)
.limit(100)
.get();
// ❌ BAD: Read in loop (N reads)
for (const id of customerIds) {
const doc = await customerRef.doc(id).get();
}
// ✅ GOOD: Batch read (1 operation)
const docs = await firestore.getAll(
...customerIds.map(id => customerRef.doc(id))
);
// ❌ BAD: Uses .size
if (snapshot.size === 0) { }
// ✅ GOOD: Uses .empty (fast)
if (snapshot.empty) { }
const batch = firestore.batch();
const BATCH_SIZE = 500;
for (let i = 0; i < items.length; i += BATCH_SIZE) {
const chunk = items.slice(i, i + BATCH_SIZE);
chunk.forEach(item => {
batch.set(collectionRef.doc(item.id), item);
});
await batch.commit();
}
If firestore-indexes/ folder exists, always add indexes there (not directly to firestore.indexes.json):
firestore-indexes/
├── build.js # Merge all → firestore.indexes.json
├── split.js # Split into collection files
├── customers.json # Indexes for customers
└── {collection}.json # One file per collection
firestore-indexes/{collection}.jsonyarn firestore:build to regenerate firestore.indexes.json| Command | Description |
|---|---|
yarn firestore:build | Merge into firestore.indexes.json |
yarn firestore:split | Split into collection files |
| Query Pattern | Index Needed? |
|---|---|
Single field where() | NO (auto) |
where() + orderBy() different fields | YES |
Multiple inequality where() | YES |
When a repository uses paginateQuery or supports sortable grids/lists, create indexes for BOTH ASC and DESC directions:
// firestore-indexes/{collection}.json
{
"indexes": [
{
"collectionGroup": "trustBadges",
"queryScope": "COLLECTION",
"fields": [
{"fieldPath": "shopId", "order": "ASCENDING"},
{"fieldPath": "order", "order": "ASCENDING"}
]
},
{
"collectionGroup": "trustBadges",
"queryScope": "COLLECTION",
"fields": [
{"fieldPath": "shopId", "order": "ASCENDING"},
{"fieldPath": "order", "order": "DESCENDING"}
]
}
]
}
Why both directions?
paginateQuery helper uses startAfter/endBefore cursorsFAILED_PRECONDITION errorUse for large fields you don't query:
{
"fieldOverrides": [
{
"collectionGroup": "webhookLogs",
"fieldPath": "body",
"indexes": []
}
]
}
Automatically delete old documents without cron jobs:
expireAt Field in Repository/** TTL duration in milliseconds (90 days) */
const TTL_MS = 90 * 24 * 60 * 60 * 1000;
function getExpireAt(now) {
return new Date(now.getTime() + TTL_MS);
}
export async function createNotification({shopId, data}) {
const now = new Date();
return collection.add({
...data,
shopId,
createdAt: now,
expireAt: getExpireAt(now) // TTL field
});
}
// firestore-indexes/{collection}.json
{
"indexes": [...],
"fieldOverrides": [
{
"collectionGroup": "salePopNotifications",
"fieldPath": "expireAt",
"ttl": true,
"indexes": []
}
]
}
yarn firestore:build # Merge index files
firebase deploy --only firestore:indexes
Notes:
Limit: 1 write per document per second
// ❌ BAD: Multiple writes to same doc
await shopRef.doc(shopId).update({ lastSyncAt: new Date() });
// ✅ GOOD: Write to separate collection
await shopUpdatesRef.add({
shopId,
lastSyncAt: new Date(),
expiredAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000)
});
ONE repository = ONE collection
const customersRef = firestore.collection('customers');
export const getByShop = (shopId) =>
customersRef.where('shopId', '==', shopId).get();
export const update = (id, data) =>
customersRef.doc(id).update({ ...data, updatedAt: new Date() });
Import standardized utilities from repositories/helper.js:
import {
prepareDoc,
paginateQuery,
getOrderBy,
getByIds,
batchCreate,
batchUpdate,
batchDelete,
getDocsInChunks
} from './helper';
| Function | Purpose | Use Case |
|---|---|---|
prepareDoc({doc}) | Format document with date conversion | All read operations |
paginateQuery({queriedRef, collection, query}) | Cursor-based pagination with hasPre/hasNext | List endpoints |
getOrderBy(sortType) | Parse "field_direction" string | Sortable lists |
getByIds({collection, ids, filters}) | Batch fetch by IDs (handles 10-item 'in' limit) | Bulk lookups |
batchCreate/Update/Delete | Chunked batch operations (500 limit) | Bulk mutations |
getDocsInChunks({collection, shopId}) | Recursive fetch for large datasets | Exports, migrations |
export async function getItemList({shopId, query = {}}) {
const {order, status} = query;
// Always start with shopId filter
let queriedRef = collection.where('shopId', '==', shopId);
// Apply optional filters
if (status) {
queriedRef = queriedRef.where('status', '==', status);
}
// Apply sorting
const {sortField, direction} = getOrderBy(order);
queriedRef = queriedRef.orderBy(sortField, direction);
// Returns: {data, count, total, pageInfo: {hasPre, hasNext, totalPage}}
return await paginateQuery({queriedRef, collection, query});
}
// Fetch multiple documents by ID with shopId filter
const items = await getByIds({
collection,
ids: ['id1', 'id2', 'id3'],
filters: {shopId} // Security: always include shopId
});
// Fetch by custom field
const items = await getByIds({
collection,
ids: ['SKU001', 'SKU002'],
idField: 'sku',
selectFields: ['name', 'price']
});
For new repositories, use packages/functions/src/repositories/sampleRepository.js as a template.
Includes patterns for:
# Copy and rename for new collection
cp packages/functions/src/repositories/sampleRepository.js \
packages/functions/src/repositories/myFeatureRepository.js
Key patterns in template:
// Ownership validation
export async function getById(id, shopId) {
const doc = await collection.doc(id).get();
if (!doc.exists) return null;
const data = prepareDoc({doc});
if (data.shopId !== shopId) {
console.error(`Unauthorized access: ${shopId} tried to access ${id}`);
return null;
}
return data;
}
// Update with ownership check
export async function updateById(id, shopId, data) {
const existing = await getById(id, shopId);
if (!existing) {
return {success: false, error: 'Not found or access denied'};
}
// ... update logic
}
For maintaining counters, averages, and breakdowns that must stay consistent with related data. Common uses: rating averages, point totals, inventory counts, statistics.
// Collection: {feature}Aggregates (e.g., productAggregates, orderAggregates)
{
shopId: string,
resourceId: string, // ID of parent resource
average: number, // Calculated: sum / total
total: number, // Counter
breakdown: { // Per-category counters (optional)
category1: number,
category2: number
},
updatedAt: Timestamp
}
Use composite keys for aggregates scoped to multiple fields:
function getDocId(shopId, resourceId) {
return `${shopId}_${resourceId}`;
}
const docRef = collection.doc(getDocId(shopId, resourceId));
export async function incrementAggregate(shopId, resourceId, value, category = null) {
const docRef = collection.doc(getDocId(shopId, resourceId));
await firestore.runTransaction(async transaction => {
const doc = await transaction.get(docRef);
if (!doc.exists) {
// Initialize new aggregate
const newDoc = {
shopId,
resourceId,
average: value,
total: 1,
updatedAt: FieldValue.serverTimestamp()
};
if (category) {
newDoc.breakdown = {[category]: 1};
}
transaction.set(docRef, newDoc);
return;
}
// Recalculate average
const data = doc.data();
const newTotal = data.total + 1;
const newAverage = (data.average * data.total + value) / newTotal;
const updates = {
total: newTotal,
average: Math.round(newAverage * 10) / 10,
updatedAt: FieldValue.serverTimestamp()
};
if (category) {
updates[`breakdown.${category}`] = FieldValue.increment(1);
}
transaction.update(docRef, updates);
});
}
export async function decrementAggregate(shopId, resourceId, value, category = null) {
const docRef = collection.doc(getDocId(shopId, resourceId));
await firestore.runTransaction(async transaction => {
const doc = await transaction.get(docRef);
if (!doc.exists) return;
const data = doc.data();
const newTotal = Math.max(0, data.total - 1);
if (newTotal === 0) {
const updates = {
total: 0,
average: 0,
updatedAt: FieldValue.serverTimestamp()
};
if (category) {
updates[`breakdown.${category}`] = FieldValue.increment(-1);
}
transaction.update(docRef, updates);
return;
}
// Recalculate average
const oldSum = data.average * data.total;
const newAverage = (oldSum - value) / newTotal;
const updates = {
total: newTotal,
average: Math.round(newAverage * 10) / 10,
updatedAt: FieldValue.serverTimestamp()
};
if (category) {
updates[`breakdown.${category}`] = FieldValue.increment(-1);
}
transaction.update(docRef, updates);
});
}
export async function getAggregatesBulk(shopId, resourceIds) {
if (!resourceIds.length) return {};
const docIds = resourceIds.map(id => getDocId(shopId, id));
const docs = await firestore.getAll(...docIds.map(id => collection.doc(id)));
const result = {};
resourceIds.forEach((resourceId, index) => {
const doc = docs[index];
result[resourceId] = doc.exists ? doc.data() : getDefaultAggregate(shopId, resourceId);
});
return result;
}
function getDefaultAggregate(shopId, resourceId) {
return {shopId, resourceId, average: 0, total: 0, breakdown: {}};
}
// For data repair or after bulk imports
export async function recalculateAggregate(shopId, resourceId, items, valueField = 'value') {
const breakdown = {};
let totalValue = 0;
items.forEach(item => {
const val = item[valueField];
totalValue += val;
if (item.category) {
breakdown[item.category] = (breakdown[item.category] || 0) + 1;
}
});
const total = items.length;
const average = total > 0 ? Math.round((totalValue / total) * 10) / 10 : 0;
await collection.doc(getDocId(shopId, resourceId)).set({
shopId,
resourceId,
average,
total,
breakdown,
updatedAt: FieldValue.serverTimestamp()
});
}
- Use transactions for increment/decrement (consistency)
- Handle non-existent docs (initialize defaults)
- Use FieldValue.increment() for counters
- Recalculate averages in transaction
- Round averages to 1 decimal place
- Provide bulk read for collection/list pages
- Include recalculate function for data repair