16
16
17
17
import static com .google .firebase .firestore .util .Assert .fail ;
18
18
19
+ import android .database .Cursor ;
19
20
import androidx .annotation .Nullable ;
20
21
import com .google .firebase .firestore .auth .User ;
21
22
import com .google .firebase .firestore .model .DocumentKey ;
22
23
import com .google .firebase .firestore .model .ResourcePath ;
23
24
import com .google .firebase .firestore .model .mutation .Mutation ;
24
25
import com .google .firebase .firestore .model .mutation .Overlay ;
26
+ import com .google .firebase .firestore .util .BackgroundQueue ;
27
+ import com .google .firebase .firestore .util .Executors ;
25
28
import com .google .firestore .v1 .Write ;
26
29
import com .google .protobuf .InvalidProtocolBufferException ;
30
+ import java .util .ArrayList ;
31
+ import java .util .Arrays ;
27
32
import java .util .HashMap ;
33
+ import java .util .List ;
28
34
import java .util .Map ;
35
+ import java .util .SortedSet ;
36
+ import java .util .concurrent .Executor ;
29
37
30
38
public class SQLiteDocumentOverlayCache implements DocumentOverlayCache {
31
39
private final SQLitePersistence db ;
@@ -47,7 +55,47 @@ public Overlay getOverlay(DocumentKey key) {
47
55
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
48
56
+ "WHERE uid = ? AND collection_path = ? AND document_id = ?" )
49
57
.binding (uid , collectionPath , documentId )
50
- .firstValue (this ::decodeOverlay );
58
+ .firstValue (row -> this .decodeOverlay (row .getBlob (0 ), row .getInt (1 )));
59
+ }
60
+
61
+ @ Override
62
+ public Map <DocumentKey , Overlay > getOverlays (SortedSet <DocumentKey > keys ) {
63
+ Map <DocumentKey , Overlay > result = new HashMap <>();
64
+
65
+ ResourcePath currentCollectionPath = ResourcePath .EMPTY ;
66
+ List <Object > currentDocumentIds = new ArrayList <>();
67
+ for (DocumentKey key : keys ) {
68
+ if (!currentCollectionPath .equals (key .getCollectionPath ())) {
69
+ processSingleCollection (result , currentCollectionPath , currentDocumentIds );
70
+ currentDocumentIds = new ArrayList <>();
71
+ }
72
+ currentCollectionPath = key .getCollectionPath ();
73
+ currentDocumentIds .add (key .getDocumentId ());
74
+ }
75
+
76
+ processSingleCollection (result , currentCollectionPath , currentDocumentIds );
77
+ return result ;
78
+ }
79
+
80
+ /** Reads the overlays for the documents in a single collection. */
81
+ private void processSingleCollection (
82
+ Map <DocumentKey , Overlay > result , ResourcePath collectionPath , List <Object > documentIds ) {
83
+ SQLitePersistence .LongQuery longQuery =
84
+ new SQLitePersistence .LongQuery (
85
+ db ,
86
+ "SELECT overlay_mutation, largest_batch_id FROM document_overlays "
87
+ + "WHERE uid = ? AND collection_path = ? AND document_id IN (" ,
88
+ Arrays .asList (uid , EncodedPath .encode (collectionPath )),
89
+ documentIds ,
90
+ ")" );
91
+
92
+ BackgroundQueue backgroundQueue = new BackgroundQueue ();
93
+ while (longQuery .hasMoreSubqueries ()) {
94
+ longQuery
95
+ .performNextSubquery ()
96
+ .forEach (row -> processOverlaysInBackground (backgroundQueue , result , row ));
97
+ }
98
+ backgroundQueue .drain ();
51
99
}
52
100
53
101
private void saveOverlay (int largestBatchId , DocumentKey key , @ Nullable Mutation mutation ) {
@@ -83,46 +131,45 @@ public void removeOverlaysForBatchId(int batchId) {
83
131
84
132
@ Override
85
133
public Map <DocumentKey , Overlay > getOverlays (ResourcePath collection , int sinceBatchId ) {
86
- String collectionPath = EncodedPath .encode (collection );
87
-
88
134
Map <DocumentKey , Overlay > result = new HashMap <>();
135
+ BackgroundQueue backgroundQueue = new BackgroundQueue ();
89
136
db .query (
90
137
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
91
138
+ "WHERE uid = ? AND collection_path = ? AND largest_batch_id > ?" )
92
- .binding (uid , collectionPath , sinceBatchId )
93
- .forEach (
94
- row -> {
95
- Overlay overlay = decodeOverlay (row );
96
- result .put (overlay .getKey (), overlay );
97
- });
98
-
139
+ .binding (uid , EncodedPath .encode (collection ), sinceBatchId )
140
+ .forEach (row -> processOverlaysInBackground (backgroundQueue , result , row ));
141
+ backgroundQueue .drain ();
99
142
return result ;
100
143
}
101
144
102
145
@ Override
103
146
public Map <DocumentKey , Overlay > getOverlays (
104
147
String collectionGroup , int sinceBatchId , int count ) {
105
148
Map <DocumentKey , Overlay > result = new HashMap <>();
106
- Overlay [] lastOverlay = new Overlay [] {null };
149
+ String [] lastCollectionPath = new String [] {null };
150
+ String [] lastDocumentPath = new String [] {null };
151
+ int [] lastLargestBatchId = new int [] {0 };
107
152
153
+ BackgroundQueue backgroundQueue = new BackgroundQueue ();
108
154
db .query (
109
- "SELECT overlay_mutation, largest_batch_id FROM document_overlays "
155
+ "SELECT overlay_mutation, largest_batch_id, collection_path, document_id "
156
+ + " FROM document_overlays "
110
157
+ "WHERE uid = ? AND collection_group = ? AND largest_batch_id > ? "
111
158
+ "ORDER BY largest_batch_id, collection_path, document_id LIMIT ?" )
112
159
.binding (uid , collectionGroup , sinceBatchId , count )
113
160
.forEach (
114
161
row -> {
115
- lastOverlay [0 ] = decodeOverlay (row );
116
- result .put (lastOverlay [0 ].getKey (), lastOverlay [0 ]);
162
+ lastLargestBatchId [0 ] = row .getInt (1 );
163
+ lastCollectionPath [0 ] = row .getString (2 );
164
+ lastDocumentPath [0 ] = row .getString (3 );
165
+ processOverlaysInBackground (backgroundQueue , result , row );
117
166
});
118
167
119
- if (lastOverlay [0 ] == null ) {
168
+ if (lastCollectionPath [0 ] == null ) {
120
169
return result ;
121
170
}
122
171
123
172
// Finish batch
124
- DocumentKey key = lastOverlay [0 ].getKey ();
125
- String encodedCollectionPath = EncodedPath .encode (key .getCollectionPath ());
126
173
db .query (
127
174
"SELECT overlay_mutation, largest_batch_id FROM document_overlays "
128
175
+ "WHERE uid = ? AND collection_group = ? "
@@ -131,23 +178,36 @@ public Map<DocumentKey, Overlay> getOverlays(
131
178
.binding (
132
179
uid ,
133
180
collectionGroup ,
134
- encodedCollectionPath ,
135
- encodedCollectionPath ,
136
- key .getDocumentId (),
137
- lastOverlay [0 ].getLargestBatchId ())
138
- .forEach (
139
- row -> {
140
- Overlay overlay = decodeOverlay (row );
141
- result .put (overlay .getKey (), overlay );
142
- });
181
+ lastCollectionPath [0 ],
182
+ lastCollectionPath [0 ],
183
+ lastDocumentPath [0 ],
184
+ lastLargestBatchId [0 ])
185
+ .forEach (row -> processOverlaysInBackground (backgroundQueue , result , row ));
143
186
187
+ backgroundQueue .drain ();
144
188
return result ;
145
189
}
146
190
147
- private Overlay decodeOverlay (android .database .Cursor row ) {
191
+ private void processOverlaysInBackground (
192
+ BackgroundQueue backgroundQueue , Map <DocumentKey , Overlay > results , Cursor row ) {
193
+ byte [] rawMutation = row .getBlob (0 );
194
+ int largestBatchId = row .getInt (1 );
195
+
196
+ // Since scheduling background tasks incurs overhead, we only dispatch to a
197
+ // background thread if there are still some documents remaining.
198
+ Executor executor = row .isLast () ? Executors .DIRECT_EXECUTOR : backgroundQueue ;
199
+ executor .execute (
200
+ () -> {
201
+ Overlay document = decodeOverlay (rawMutation , largestBatchId );
202
+ synchronized (results ) {
203
+ results .put (document .getKey (), document );
204
+ }
205
+ });
206
+ }
207
+
208
+ private Overlay decodeOverlay (byte [] rawMutation , int largestBatchId ) {
148
209
try {
149
- Write write = Write .parseFrom (row .getBlob (0 ));
150
- int largestBatchId = row .getInt (1 );
210
+ Write write = Write .parseFrom (rawMutation );
151
211
Mutation mutation = serializer .decodeMutation (write );
152
212
return Overlay .create (largestBatchId , mutation );
153
213
} catch (InvalidProtocolBufferException e ) {
0 commit comments