Skip to content

Commit d0f981e

Browse files
authored
Remove "can write very large batches" test. (#1510)
This reverts the test added in commit 20beaef but leaves the test re-ordering. The test took ~6s to run causing some timeouts, and we're unlikely to regress large batch support given IndexedDb doesn't have inherent size limitations for us to work around.
1 parent 1620d6e commit d0f981e

File tree

1 file changed

+0
-42
lines changed

1 file changed

+0
-42
lines changed

packages/firestore/test/integration/api/batch_writes.test.ts

Lines changed: 0 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616

1717
import * as firestore from '@firebase/firestore-types';
1818
import { expect } from 'chai';
19-
import { AutoId } from '../../../src/util/misc';
2019

2120
import { EventsAccumulator } from '../util/events_accumulator';
2221
import firebase from '../util/firebase_export';
@@ -350,45 +349,4 @@ apiDescribe('Database batch writes', persistence => {
350349
});
351350
});
352351
});
353-
354-
it('can write very large batches', () => {
355-
// On Android, SQLite Cursors are limited reading no more than 2 MB per row
356-
// (despite being able to write very large values). This test verifies that
357-
// the local MutationQueue is not subject to this limitation.
358-
359-
// Create a map containing nearly 1 MB of data. Note that if you use 1024
360-
// below this will create a document larger than 1 MB, which will be
361-
// rejected by the backend as too large.
362-
let kb = 'a';
363-
while (kb.length < 1000) {
364-
kb += kb;
365-
}
366-
kb = kb.substr(0, 1000);
367-
const values = {};
368-
for (let i = 0; i < 1000; i++) {
369-
values[AutoId.newId()] = kb;
370-
}
371-
372-
return integrationHelpers.withTestCollection(
373-
persistence,
374-
{},
375-
async collection => {
376-
const doc = collection.doc('a');
377-
const batch = doc.firestore.batch();
378-
379-
// Write a batch containing 3 copies of the data, creating a ~3 MB
380-
// batch. Writing to the same document in a batch is allowed and so long
381-
// as the net size of the document is under 1 MB the batch is allowed.
382-
batch.set(doc, values);
383-
for (let i = 0; i < 2; i++) {
384-
batch.update(doc, values);
385-
}
386-
387-
await batch.commit();
388-
389-
const snap = await doc.get();
390-
expect(snap.data()).to.deep.equal(values);
391-
}
392-
);
393-
});
394352
});

0 commit comments

Comments
 (0)