Skip to content

Commit 760cde4

Browse files
authored
[bq] 0.2 introduce integration tests for writers
1 parent 7946573 commit 760cde4

File tree

10 files changed

+360
-54
lines changed

10 files changed

+360
-54
lines changed

spring-batch-bigquery/pom.xml

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151

5252
<!-- Dependent on Spring Batch core -->
5353
<java.version>17</java.version>
54+
<logback.version>1.4.5</logback.version>
5455
</properties>
5556

5657
<dependencies>
@@ -96,6 +97,25 @@
9697
<version>4.9.0</version>
9798
<scope>test</scope>
9899
</dependency>
100+
<dependency>
101+
<groupId>ch.qos.logback</groupId>
102+
<artifactId>logback-core</artifactId>
103+
<version>${logback.version}</version>
104+
<scope>test</scope>
105+
</dependency>
106+
<dependency>
107+
<groupId>ch.qos.logback</groupId>
108+
<artifactId>logback-classic</artifactId>
109+
<version>${logback.version}</version>
110+
<scope>test</scope>
111+
</dependency>
112+
<dependency>
113+
<groupId>org.slf4j</groupId>
114+
<artifactId>slf4j-api</artifactId>
115+
<version>2.0.6</version>
116+
<scope>test</scope>
117+
</dependency>
118+
99119

100120
</dependencies>
101121

@@ -117,6 +137,12 @@
117137
<groupId>org.apache.maven.plugins</groupId>
118138
<artifactId>maven-surefire-plugin</artifactId>
119139
<version>2.22.2</version>
140+
<configuration>
141+
<includes>
142+
<!-- Integration tests are omitted because they are designed to be run locally -->
143+
<include>/unit</include>
144+
</includes>
145+
</configuration>
120146
</plugin>
121147

122148
<!-- Generate javadoc and source jars -->

spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/package-info.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,9 @@
3232
* Take into account that BigQuery has rate limits, and it is very easy to exceed those in concurrent environment.
3333
* @see <a href="https://cloud.google.com/bigquery/quotas">BigQuery Quotas &amp; Limits</a>
3434
*
35+
* Also worth mentioning that you should ensure ordering of the fields in DTO that you are going to send to the BigQuery.
36+
* In case of CSV/JSON and Jackson consider using {@link com.fasterxml.jackson.annotation.JsonPropertyOrder}.
37+
*
3538
* @author Volodymyr Perebykivskyi
3639
* @since 0.1.0
3740
* @see <a href="https://cloud.google.com/bigquery/">Google BigQuery</a>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
/**
2+
* In order to launch these tests you should provide a way how to authorize to Google BigQuery.
3+
* A simple way is to create service account, store credentials as JSON file and provide environment variable.
4+
* Example: GOOGLE_APPLICATION_CREDENTIALS=/home/dgray/Downloads/bq-key.json
5+
* @see <a href="https://cloud.google.com/bigquery/docs/quickstarts/quickstart-client-libraries#before-you-begin">Authentication</a>
6+
*
7+
* Test names should follow this pattern: test1, test2, testN.
8+
* So later in BigQuery you will see generated table name: csv_test1, csv_test2, csv_testN.
9+
* This way it will be easier to trace errors in BigQuery.
10+
*/
11+
package org.springframework.batch.extensions.bigquery.integration;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
/*
2+
* Copyright 2002-2022 the original author or authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.springframework.batch.extensions.bigquery.integration.writer;
18+
19+
import com.google.cloud.bigquery.BigQuery;
20+
import com.google.cloud.bigquery.Dataset;
21+
import com.google.cloud.bigquery.FormatOptions;
22+
import com.google.cloud.bigquery.JobId;
23+
import com.google.cloud.bigquery.Table;
24+
import com.google.cloud.bigquery.TableId;
25+
import com.google.cloud.bigquery.TableResult;
26+
import org.apache.commons.lang3.math.NumberUtils;
27+
import org.junit.jupiter.api.Assertions;
28+
import org.junit.jupiter.api.Tag;
29+
import org.junit.jupiter.api.Test;
30+
import org.junit.jupiter.api.TestInfo;
31+
import org.springframework.batch.extensions.bigquery.integration.writer.base.BaseBigQueryItemWriterTest;
32+
import org.springframework.batch.extensions.bigquery.writer.BigQueryCsvItemWriter;
33+
import org.springframework.batch.extensions.bigquery.writer.builder.BigQueryCsvItemWriterBuilder;
34+
import org.springframework.batch.item.Chunk;
35+
36+
import java.util.concurrent.atomic.AtomicReference;
37+
38+
@Tag("csv")
39+
public class BigQueryCsvItemWriterTest extends BaseBigQueryItemWriterTest {
40+
41+
@Test
42+
void test1(TestInfo testInfo) throws Exception {
43+
AtomicReference<JobId> jobId = new AtomicReference<>();
44+
45+
BigQueryCsvItemWriter writer = new BigQueryCsvItemWriterBuilder<PersonDto>()
46+
.bigQuery(bigQuery)
47+
.writeChannelConfig(generateConfiguration(testInfo, FormatOptions.csv()))
48+
.jobConsumer(j -> jobId.set(j.getJobId()))
49+
.build();
50+
51+
writer.afterPropertiesSet();
52+
53+
Chunk<PersonDto> chunk = Chunk.of(new PersonDto("Volodymyr", 27), new PersonDto("Oleksandra", 26));
54+
writer.write(chunk);
55+
56+
waitForJobToFinish(jobId.get());
57+
58+
Dataset dataset = bigQuery.getDataset(DATASET);
59+
Table table = bigQuery.getTable(TableId.of(DATASET, getTableName(testInfo)));
60+
TableId tableId = table.getTableId();
61+
TableResult tableResult = bigQuery.listTableData(tableId, BigQuery.TableDataListOption.pageSize(2L));
62+
63+
Assertions.assertNotNull(dataset.getDatasetId());
64+
Assertions.assertNotNull(tableId);
65+
Assertions.assertEquals(chunk.size(), tableResult.getTotalRows());
66+
67+
tableResult
68+
.getValues()
69+
.forEach(field -> {
70+
Assertions.assertTrue(
71+
chunk.getItems().stream().map(PersonDto::name).anyMatch(name -> field.get(NumberUtils.INTEGER_ZERO).getStringValue().equals(name))
72+
);
73+
74+
boolean ageCondition = chunk
75+
.getItems()
76+
.stream()
77+
.map(PersonDto::age)
78+
.map(Long::valueOf)
79+
.anyMatch(age -> age.compareTo(field.get(NumberUtils.INTEGER_ONE).getLongValue()) == NumberUtils.INTEGER_ZERO);
80+
81+
Assertions.assertTrue(ageCondition);
82+
});
83+
}
84+
85+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
/*
2+
* Copyright 2002-2022 the original author or authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.springframework.batch.extensions.bigquery.integration.writer;
18+
19+
import com.google.cloud.bigquery.BigQuery;
20+
import com.google.cloud.bigquery.Dataset;
21+
import com.google.cloud.bigquery.FormatOptions;
22+
import com.google.cloud.bigquery.JobId;
23+
import com.google.cloud.bigquery.Table;
24+
import com.google.cloud.bigquery.TableId;
25+
import com.google.cloud.bigquery.TableResult;
26+
import org.apache.commons.lang3.math.NumberUtils;
27+
import org.junit.jupiter.api.Assertions;
28+
import org.junit.jupiter.api.Tag;
29+
import org.junit.jupiter.api.Test;
30+
import org.junit.jupiter.api.TestInfo;
31+
import org.springframework.batch.extensions.bigquery.integration.writer.base.BaseBigQueryItemWriterTest;
32+
import org.springframework.batch.extensions.bigquery.writer.BigQueryJsonItemWriter;
33+
import org.springframework.batch.extensions.bigquery.writer.builder.BigQueryJsonItemWriterBuilder;
34+
import org.springframework.batch.item.Chunk;
35+
36+
import java.util.concurrent.atomic.AtomicReference;
37+
38+
@Tag("json")
39+
public class BigQueryJsonItemWriterTest extends BaseBigQueryItemWriterTest {
40+
41+
@Test
42+
void test1(TestInfo testInfo) throws Exception {
43+
AtomicReference<JobId> jobId = new AtomicReference<>();
44+
45+
BigQueryJsonItemWriter<PersonDto> writer = new BigQueryJsonItemWriterBuilder<PersonDto>()
46+
.bigQuery(bigQuery)
47+
.writeChannelConfig(generateConfiguration(testInfo, FormatOptions.json()))
48+
.jobConsumer(j -> jobId.set(j.getJobId()))
49+
.build();
50+
51+
writer.afterPropertiesSet();
52+
53+
Chunk<PersonDto> chunk = Chunk.of(new PersonDto("Viktor", 57), new PersonDto("Nina", 57));
54+
writer.write(chunk);
55+
56+
waitForJobToFinish(jobId.get());
57+
58+
Dataset dataset = bigQuery.getDataset(DATASET);
59+
Table table = bigQuery.getTable(TableId.of(DATASET, getTableName(testInfo)));
60+
TableId tableId = table.getTableId();
61+
TableResult tableResult = bigQuery.listTableData(tableId, BigQuery.TableDataListOption.pageSize(2L));
62+
63+
Assertions.assertNotNull(dataset.getDatasetId());
64+
Assertions.assertNotNull(tableId);
65+
Assertions.assertEquals(chunk.size(), tableResult.getTotalRows());
66+
67+
tableResult
68+
.getValues()
69+
.forEach(field -> {
70+
Assertions.assertTrue(
71+
chunk.getItems().stream().map(PersonDto::name).anyMatch(name -> field.get(NumberUtils.INTEGER_ZERO).getStringValue().equals(name))
72+
);
73+
74+
boolean ageCondition = chunk
75+
.getItems()
76+
.stream()
77+
.map(PersonDto::age)
78+
.map(Long::valueOf)
79+
.anyMatch(age -> age.compareTo(field.get(NumberUtils.INTEGER_ONE).getLongValue()) == NumberUtils.INTEGER_ZERO);
80+
81+
Assertions.assertTrue(ageCondition);
82+
});
83+
}
84+
85+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
/*
2+
* Copyright 2002-2022 the original author or authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* https://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.springframework.batch.extensions.bigquery.integration.writer.base;
18+
19+
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
20+
import com.google.cloud.bigquery.BigQuery;
21+
import com.google.cloud.bigquery.BigQueryOptions;
22+
import com.google.cloud.bigquery.DatasetInfo;
23+
import com.google.cloud.bigquery.Field;
24+
import com.google.cloud.bigquery.FormatOptions;
25+
import com.google.cloud.bigquery.JobId;
26+
import com.google.cloud.bigquery.JobStatus;
27+
import com.google.cloud.bigquery.Schema;
28+
import com.google.cloud.bigquery.StandardSQLTypeName;
29+
import com.google.cloud.bigquery.StandardTableDefinition;
30+
import com.google.cloud.bigquery.TableDefinition;
31+
import com.google.cloud.bigquery.TableId;
32+
import com.google.cloud.bigquery.TableInfo;
33+
import com.google.cloud.bigquery.WriteChannelConfiguration;
34+
import org.apache.commons.lang3.BooleanUtils;
35+
import org.junit.jupiter.api.AfterEach;
36+
import org.junit.jupiter.api.BeforeEach;
37+
import org.junit.jupiter.api.TestInfo;
38+
39+
import java.lang.reflect.Method;
40+
import java.util.Objects;
41+
42+
public abstract class BaseBigQueryItemWriterTest {
43+
44+
protected static final String DATASET = "spring_extensions";
45+
46+
protected final BigQuery bigQuery = BigQueryOptions.getDefaultInstance().getService();
47+
48+
private static final String TABLE_PATTERN = "%s_%s";
49+
50+
@BeforeEach
51+
void prepareTest(TestInfo testInfo) {
52+
if (Objects.isNull(bigQuery.getDataset(DATASET))) {
53+
bigQuery.create(DatasetInfo.of(DATASET));
54+
}
55+
56+
if (Objects.isNull(bigQuery.getTable(DATASET, getTableName(testInfo)))) {
57+
TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema());
58+
bigQuery.create(TableInfo.of(TableId.of(DATASET, getTableName(testInfo)), tableDefinition));
59+
}
60+
}
61+
62+
@AfterEach
63+
void cleanupTest(TestInfo testInfo) {
64+
bigQuery.delete(TableId.of(DATASET, getTableName(testInfo)));
65+
}
66+
67+
protected String getTableName(TestInfo testInfo) {
68+
return String.format(
69+
TABLE_PATTERN,
70+
testInfo.getTags().stream().findFirst().orElseThrow(),
71+
testInfo.getTestMethod().map(Method::getName).orElseThrow()
72+
);
73+
}
74+
75+
protected WriteChannelConfiguration generateConfiguration(TestInfo testInfo, FormatOptions formatOptions) {
76+
return WriteChannelConfiguration
77+
.newBuilder(TableId.of(DATASET, getTableName(testInfo)))
78+
.setSchema(PersonDto.getBigQuerySchema())
79+
.setAutodetect(false)
80+
.setFormatOptions(formatOptions)
81+
.build();
82+
}
83+
84+
protected void waitForJobToFinish(JobId jobId) {
85+
JobStatus status = bigQuery.getJob(jobId).getStatus();
86+
87+
while (BooleanUtils.isFalse(JobStatus.State.DONE.equals(status.getState()))) {
88+
status = bigQuery.getJob(jobId).getStatus();
89+
}
90+
}
91+
92+
@JsonPropertyOrder(value = {"name", "age"})
93+
public record PersonDto(String name, Integer age) {
94+
95+
public static Schema getBigQuerySchema() {
96+
Field nameField = Field.newBuilder("name", StandardSQLTypeName.STRING).build();
97+
Field ageField = Field.newBuilder("age", StandardSQLTypeName.INT64).build();
98+
return Schema.of(nameField, ageField);
99+
}
100+
101+
}
102+
103+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
package org.springframework.batch.extensions.bigquery.unit.base;
2+
3+
import com.google.cloud.bigquery.BigQuery;
4+
import org.mockito.Mockito;
5+
6+
public abstract class AbstractBigQueryTest {
7+
8+
protected BigQuery prepareMockedBigQuery() {
9+
BigQuery mockedBigQuery = Mockito.mock(BigQuery.class);
10+
11+
Mockito
12+
.when(mockedBigQuery.getTable(Mockito.any()))
13+
.thenReturn(null);
14+
15+
Mockito
16+
.when(mockedBigQuery.getDataset(Mockito.anyString()))
17+
.thenReturn(null);
18+
19+
return mockedBigQuery;
20+
}
21+
22+
public record PersonDto(String name) {}
23+
24+
}

0 commit comments

Comments
 (0)