Skip to content

Update document index entries #3132

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 14 commits into from
Nov 23, 2021
Original file line number Diff line number Diff line change
Expand Up @@ -14,44 +14,43 @@

package com.google.firebase.firestore.index;

/**
* Represents an index entry saved by the SDK in the local storage. Temporary placeholder, since
* we'll probably serialize the indexValue right away rather than store it.
*/
// TODO(indexing)
public class IndexEntry {
private final int indexId;
private final byte[] arrayValue;
private final byte[] directionalValue;
private final String uid;
private final String documentName;

public IndexEntry(
int indexId, byte[] arrayValue, byte[] directionalValue, String uid, String documentName) {
this.indexId = indexId;
this.arrayValue = arrayValue;
this.directionalValue = directionalValue;
this.uid = uid;
this.documentName = documentName;
}
import static com.google.firebase.firestore.util.Util.compareByteArrays;
import static com.google.firebase.firestore.util.Util.nullSafeCompare;

public int getIndexId() {
return indexId;
}
import com.google.auto.value.AutoValue;
import com.google.firebase.firestore.model.DocumentKey;
import com.google.firebase.firestore.util.Util;

public byte[] getArrayValue() {
return arrayValue;
}
/** Represents an index entry saved by the SDK in its local storage. */
@AutoValue
public abstract class IndexEntry implements Comparable<IndexEntry> {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Made this an AutoValue so that we have hash/equals. I am also planning to keep this class so I removed the TODO that says it is a placeholder.

cc @thebrianchen


public byte[] getDirectionalValue() {
return directionalValue;
public static IndexEntry create(
int indexId, DocumentKey documentKey, byte[] arrayValue, byte[] directionalValue) {
return new AutoValue_IndexEntry(indexId, documentKey, arrayValue, directionalValue);
}

public String getUid() {
return uid;
}
public abstract int getIndexId();

public abstract DocumentKey getDocumentKey();

@SuppressWarnings("mutable")
public abstract byte[] getArrayValue();

@SuppressWarnings("mutable")
public abstract byte[] getDirectionalValue();

@Override
public int compareTo(IndexEntry other) {
int cmp = Integer.compare(getIndexId(), other.getIndexId());
if (cmp != 0) return cmp;

cmp = getDocumentKey().compareTo(other.getDocumentKey());
if (cmp != 0) return cmp;

cmp = compareByteArrays(getDirectionalValue(), other.getDirectionalValue());
if (cmp != 0) return cmp;

public String getDocumentName() {
return documentName;
return nullSafeCompare(getArrayValue(), other.getArrayValue(), Util::compareByteArrays);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,6 @@ public interface IndexManager {
*/
List<ResourcePath> getCollectionParents(String collectionId);

/** Updates the index entries for the given document. */
void handleDocumentChange(@Nullable Document oldDocument, @Nullable Document newDocument);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

All updates will be handled by updateIndexEntries.


/**
* Adds a field path index.
*
Expand Down Expand Up @@ -100,9 +97,6 @@ public interface IndexManager {
*/
void updateCollectionGroup(String collectionGroup, SnapshotVersion readTime);

/**
* Updates the index entries for the provided documents and corresponding field indexes until the
* cap is reached.
*/
/** Updates the index entries for the provided documents. */
void updateIndexEntries(Collection<Document> documents);
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,6 @@ public List<ResourcePath> getCollectionParents(String collectionId) {
return collectionParentsIndex.getEntries(collectionId);
}

@Override
public void handleDocumentChange(@Nullable Document oldDocument, @Nullable Document newDocument) {
// Field indices are not supported with memory persistence.
}

@Override
public void addFieldIndex(FieldIndex index) {
// Field indices are not supported with memory persistence.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import static com.google.firebase.firestore.model.Values.isArray;
import static com.google.firebase.firestore.util.Assert.fail;
import static com.google.firebase.firestore.util.Assert.hardAssert;
import static com.google.firebase.firestore.util.Util.diffCollections;
import static com.google.firebase.firestore.util.Util.repeatSequence;
import static java.lang.Math.max;

Expand All @@ -30,6 +31,7 @@
import com.google.firebase.firestore.index.DirectionalIndexByteEncoder;
import com.google.firebase.firestore.index.FirestoreIndexValueWriter;
import com.google.firebase.firestore.index.IndexByteEncoder;
import com.google.firebase.firestore.index.IndexEntry;
import com.google.firebase.firestore.model.Document;
import com.google.firebase.firestore.model.DocumentKey;
import com.google.firebase.firestore.model.FieldIndex;
Expand All @@ -52,6 +54,8 @@
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;

/** A persisted implementation of IndexManager. */
final class SQLiteIndexManager implements IndexManager {
Expand Down Expand Up @@ -220,11 +224,30 @@ public void updateIndexEntries(Collection<Document> documents) {
for (Document document : documents) {
Collection<FieldIndex> fieldIndexes = getFieldIndexes(document.getKey().getCollectionGroup());
for (FieldIndex fieldIndex : fieldIndexes) {
writeEntries(document, fieldIndex);
SortedSet<IndexEntry> existingEntries =
getExistingIndexEntries(document.getKey(), fieldIndex);
SortedSet<IndexEntry> newEntries = computeIndexEntries(document, fieldIndex);
if (!existingEntries.equals(newEntries)) {
updateEntries(document, existingEntries, newEntries);
}
}
}
}

/**
* Updates the index entries for the provided document by deleting entries that are no longer
* referenced in {@code newEntries} and adding all newly added entries.
*/
private void updateEntries(
Document document, SortedSet<IndexEntry> existingEntries, SortedSet<IndexEntry> newEntries) {
Logger.debug(TAG, "Updating index entries for document '%s'", document.getKey());
diffCollections(
existingEntries,
newEntries,
entry -> addIndexEntry(document, entry),
entry -> deleteIndexEntry(document, entry));
}

@Override
public Collection<FieldIndex> getFieldIndexes(String collectionGroup) {
hardAssert(started, "IndexManager not started");
Expand Down Expand Up @@ -264,70 +287,74 @@ private void memoizeIndex(FieldIndex fieldIndex) {
Math.max(memoizedMaxSequenceNumber, fieldIndex.getIndexState().getSequenceNumber());
}

/** Persists the index entries for the given document. */
private void writeEntries(Document document, FieldIndex fieldIndex) {
/** Creates the index entries for the given document. */
private SortedSet<IndexEntry> computeIndexEntries(Document document, FieldIndex fieldIndex) {
SortedSet<IndexEntry> result = new TreeSet<>();

@Nullable byte[] directionalValue = encodeDirectionalElements(fieldIndex, document);
if (directionalValue == null) {
return;
return result;
}

@Nullable FieldIndex.Segment arraySegment = fieldIndex.getArraySegment();
if (arraySegment != null) {
Value value = document.getField(arraySegment.getFieldPath());
if (!isArray(value)) {
return;
}

for (Value arrayValue : value.getArrayValue().getValuesList()) {
addSingleEntry(
document, fieldIndex.getIndexId(), encodeSingleElement(arrayValue), directionalValue);
if (isArray(value)) {
for (Value arrayValue : value.getArrayValue().getValuesList()) {
result.add(
IndexEntry.create(
fieldIndex.getIndexId(),
document.getKey(),
encodeSingleElement(arrayValue),
directionalValue));
}
}
} else {
addSingleEntry(document, fieldIndex.getIndexId(), /* arrayValue= */ null, directionalValue);
result.add(
IndexEntry.create(
fieldIndex.getIndexId(), document.getKey(), new byte[] {}, directionalValue));
}
}

@Override
public void handleDocumentChange(@Nullable Document oldDocument, @Nullable Document newDocument) {
hardAssert(started, "IndexManager not started");
hardAssert(oldDocument == null, "Support for updating documents is not yet available");
hardAssert(newDocument != null, "Support for removing documents is not yet available");

DocumentKey documentKey = newDocument.getKey();
Collection<FieldIndex> fieldIndices = getFieldIndexes(documentKey.getCollectionGroup());
addIndexEntry(newDocument, fieldIndices);
}

/**
* Writes index entries for the field indexes that apply to the provided document.
*
* @param document The provided document to index.
* @param fieldIndexes A list of field indexes to apply.
*/
private void addIndexEntry(Document document, Collection<FieldIndex> fieldIndexes) {
for (FieldIndex fieldIndex : fieldIndexes) {
writeEntries(document, fieldIndex);
}
return result;
}

/** Adds a single index entry into the index entries table. */
private void addSingleEntry(
Document document, int indexId, @Nullable Object arrayValue, Object directionalValue) {
if (Logger.isDebugEnabled()) {
Logger.debug(
TAG, "Adding index values for document '%s' to index '%s'", document.getKey(), indexId);
}

private void addIndexEntry(Document document, IndexEntry indexEntry) {
db.execute(
"INSERT INTO index_entries (index_id, uid, array_value, directional_value, document_name) "
+ "VALUES(?, ?, ?, ?, ?)",
indexId,
indexEntry.getIndexId(),
uid,
arrayValue,
directionalValue,
indexEntry.getArrayValue(),
indexEntry.getDirectionalValue(),
document.getKey().toString());
}

private void deleteIndexEntry(Document document, IndexEntry indexEntry) {
db.execute(
"DELETE FROM index_entries WHERE index_id = ? AND uid = ? AND array_value = ? "
+ "AND directional_value = ? AND document_name = ?",
indexEntry.getIndexId(),
uid,
indexEntry.getArrayValue(),
indexEntry.getDirectionalValue(),
document.getKey().toString());
}

private SortedSet<IndexEntry> getExistingIndexEntries(
DocumentKey documentKey, FieldIndex fieldIndex) {
SortedSet<IndexEntry> results = new TreeSet<>();
db.query(
"SELECT array_value, directional_value FROM index_entries "
+ "WHERE index_id = ? AND document_name = ? AND uid = ?")
.binding(fieldIndex.getIndexId(), documentKey.toString(), uid)
.forEach(
row ->
results.add(
IndexEntry.create(
fieldIndex.getIndexId(), documentKey, row.getBlob(0), row.getBlob(1))));
return results;
}

@Override
public Set<DocumentKey> getDocumentsMatchingTarget(FieldIndex fieldIndex, Target target) {
hardAssert(started, "IndexManager not started");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -377,23 +377,23 @@ private void createFieldIndex() {
// Per index per user state to track the backfill state for each index
db.execSQL(
"CREATE TABLE index_state ("
+ "uid TEXT, "
+ "index_id INTEGER, "
+ "uid TEXT, "
+ "sequence_number INTEGER, " // Specifies the order of updates
+ "read_time_seconds INTEGER, " // Read time of last processed document
+ "read_time_nanos INTEGER, "
+ "PRIMARY KEY (uid, index_id))");
+ "PRIMARY KEY (index_id, uid))");

// The index entry table stores the encoded entries for all fields.
// The table only has a single primary index. `array_value` should be set for all queries.
db.execSQL(
"CREATE TABLE index_entries ("
+ "uid TEXT, " // user id
+ "index_id INTEGER, " // The index_id of the field index creating this entry
+ "uid TEXT, "
+ "array_value BLOB, " // index values for ArrayContains/ArrayContainsAny
+ "directional_value BLOB, " // index values for equality and inequalities
+ "document_name TEXT, "
+ "PRIMARY KEY (uid, index_id, array_value, directional_value, document_name))");
+ "PRIMARY KEY (index_id, uid, array_value, directional_value, document_name))");

db.execSQL(
"CREATE INDEX read_time ON remote_documents(read_time_seconds, read_time_nanos)");
Expand Down
Loading