Skip to content

Commit f1fc918

Browse files
[bigtable] Update region tags for reads and filters (#3148)
* Update read region tags * Update region tags for filters * Move region tags in filters * Move region tags out of class * Update tags for java * change generic to print * update java bigtable filters region tags * make into one sample * untrack hbase-test * change function to code * lint everything
1 parent a3453bf commit f1fc918

File tree

19 files changed

+144
-221
lines changed

19 files changed

+144
-221
lines changed

bigtable/beam/helloworld/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,16 @@
2323
<artifactId>helloworld</artifactId>
2424
<version>1.0-SNAPSHOT</version>
2525

26+
<!--
27+
The parent pom defines common style checks and testing strategies for our samples.
28+
Removing or replacing it should not affect the execution of the samples in anyway.
29+
-->
30+
<parent>
31+
<groupId>com.google.cloud.samples</groupId>
32+
<artifactId>shared-configuration</artifactId>
33+
<version>1.0.18</version>
34+
</parent>
35+
2636
<properties>
2737
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
2838
<maven.compiler.source>1.8</maven.compiler.source>

bigtable/beam/keyviz-art/pom.xml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,15 @@
2323
<artifactId>keyviz-art</artifactId>
2424
<version>1.0-SNAPSHOT</version>
2525

26+
<!--
27+
The parent pom defines common style checks and testing strategies for our samples.
28+
Removing or replacing it should not affect the execution of the samples in anyway.
29+
-->
30+
<parent>
31+
<groupId>com.google.cloud.samples</groupId>
32+
<artifactId>shared-configuration</artifactId>
33+
<version>1.0.18</version>
34+
</parent>
2635

2736
<properties>
2837
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

bigtable/beam/keyviz-art/src/test/java/KeyVizArtTest.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,15 @@
1818
import static org.junit.Assert.assertEquals;
1919
import static org.junit.Assert.assertNotNull;
2020

21-
import keyviz.ReadData.ReadDataOptions;
22-
import keyviz.ReadData.ReadFromTableFn;
2321
import com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration;
2422
import com.google.cloud.bigtable.hbase.BigtableConfiguration;
2523
import java.io.ByteArrayOutputStream;
2624
import java.io.IOException;
2725
import java.io.PrintStream;
2826
import java.util.UUID;
2927
import keyviz.LoadData;
28+
import keyviz.ReadData.ReadDataOptions;
29+
import keyviz.ReadData.ReadFromTableFn;
3030
import org.apache.beam.sdk.Pipeline;
3131
import org.apache.beam.sdk.options.PipelineOptionsFactory;
3232
import org.apache.beam.sdk.transforms.Create;
@@ -143,7 +143,7 @@ public void testWriteAndRead() {
143143
.build();
144144

145145
// Initiates a new pipeline every second
146-
p.apply(Create.of(1l))
146+
p.apply(Create.of(1L))
147147
.apply(ParDo.of(new ReadFromTableFn(bigtableTableConfig, options)));
148148
p.run().waitUntilFinish();
149149

@@ -160,7 +160,7 @@ public void testWriteAndRead() {
160160
p = Pipeline.create(options);
161161

162162
// Initiates a new pipeline every second
163-
p.apply(Create.of(1l))
163+
p.apply(Create.of(1L))
164164
.apply(ParDo.of(new ReadFromTableFn(bigtableTableConfig, options)));
165165
p.run().waitUntilFinish();
166166

bigtable/cassandra-migration-codelab/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,16 @@
2323
<artifactId>bigtable</artifactId>
2424
<version>1.0-SNAPSHOT</version>
2525

26+
<!--
27+
The parent pom defines common style checks and testing strategies for our samples.
28+
Removing or replacing it should not affect the execution of the samples in anyway.
29+
-->
30+
<parent>
31+
<groupId>com.google.cloud.samples</groupId>
32+
<artifactId>shared-configuration</artifactId>
33+
<version>1.0.18</version>
34+
</parent>
35+
2636
<properties>
2737
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
2838
<maven.compiler.source>1.8</maven.compiler.source>

bigtable/cassandra-migration-codelab/src/main/java/CassandraMigrationCodelab.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,10 @@
2626
import com.google.protobuf.ByteString;
2727

2828
public class CassandraMigrationCodelab {
29+
2930
private BigtableDataClient dataClient;
3031
private String tableId;
31-
private final String COLUMN_FAMILY_NAME = "stats_summary";
32+
private static final String COLUMN_FAMILY_NAME = "stats_summary";
3233

3334
public CassandraMigrationCodelab(String projectId, String instanceId, String tableId) {
3435
this.tableId = tableId;
@@ -59,7 +60,7 @@ public void write() {
5960
long timestamp = (long) 1556712000 * 1000; // Timestamp of June 1, 2019 12:00
6061

6162
String rowKey = "phone#4c410523#20190501";
62-
ByteString one = ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 1});
63+
ByteString one = ByteString.copyFrom(new byte[]{0, 0, 0, 0, 0, 0, 0, 1});
6364

6465
RowMutation rowMutation =
6566
RowMutation.create(tableId, rowKey)
@@ -122,7 +123,7 @@ public void update2() {
122123

123124
String rowKey = "phone#4c410523#20190501";
124125

125-
ByteString zero = ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 0});
126+
ByteString zero = ByteString.copyFrom(new byte[]{0, 0, 0, 0, 0, 0, 0, 0});
126127

127128
RowMutation rowMutation =
128129
RowMutation.create(tableId, rowKey)

bigtable/cassandra-migration-codelab/src/test/java/CassandraMigrationCodelabTest.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,9 @@
2727
import org.junit.Test;
2828

2929
public class CassandraMigrationCodelabTest {
30+
3031
private static final String INSTANCE_ENV = "BIGTABLE_TESTING_INSTANCE";
31-
private final String COLUMN_FAMILY_NAME = "stats_summary";
32+
private static final String COLUMN_FAMILY_NAME = "stats_summary";
3233
private static final String TABLE_PREFIX = "cass-";
3334

3435
private static final String TABLE_ID =

bigtable/hbase/snippets/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,16 @@
2424
<version>1.0-SNAPSHOT</version>
2525
<name>docs-samples</name>
2626

27+
<!--
28+
The parent pom defines common style checks and testing strategies for our samples.
29+
Removing or replacing it should not affect the execution of the samples in anyway.
30+
-->
31+
<parent>
32+
<groupId>com.google.cloud.samples</groupId>
33+
<artifactId>shared-configuration</artifactId>
34+
<version>1.0.18</version>
35+
</parent>
36+
2737
<properties>
2838
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
2939
<maven.compiler.source>1.8</maven.compiler.source>

bigtable/hbase/snippets/src/main/java/com/example/bigtable/Filters.java

Lines changed: 6 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616

1717
package com.example.bigtable;
1818

19+
// [START bigtable_filters_print]
20+
1921
import static com.google.cloud.bigtable.data.v2.models.Filters.FILTERS;
2022

2123
import com.google.api.gax.rpc.ServerStream;
@@ -52,37 +54,10 @@
5254
import org.apache.hadoop.hbase.filter.ValueFilter;
5355
import org.apache.hadoop.hbase.util.Bytes;
5456

55-
// [START bigtable_filters_limit_row_sample]
56-
// [START bigtable_filters_limit_row_regex]
57-
// [START bigtable_filters_limit_cells_per_col]
58-
// [START bigtable_filters_limit_cells_per_row]
59-
// [START bigtable_filters_limit_cells_per_row_offset]
60-
// [START bigtable_filters_limit_col_family_regex]
61-
// [START bigtable_filters_limit_col_qualifier_regex]
62-
// [START bigtable_filters_limit_col_range]
63-
// [START bigtable_filters_limit_value_range]
64-
// [START bigtable_filters_limit_value_regex]
65-
// [START bigtable_filters_limit_timestamp_range]
66-
// [START bigtable_filters_limit_block_all]
67-
// [START bigtable_filters_composing_chain]
68-
// [START bigtable_filters_composing_interleave]
69-
7057
public class Filters {
71-
// [END bigtable_filters_limit_row_sample]
72-
// [END bigtable_filters_limit_row_regex]
73-
// [END bigtable_filters_limit_cells_per_col]
74-
// [END bigtable_filters_limit_cells_per_row]
75-
// [END bigtable_filters_limit_cells_per_row_offset]
76-
// [END bigtable_filters_limit_col_family_regex]
77-
// [END bigtable_filters_limit_col_qualifier_regex]
78-
// [END bigtable_filters_limit_col_range]
79-
// [END bigtable_filters_limit_value_range]
80-
// [END bigtable_filters_limit_value_regex]
81-
// [END bigtable_filters_limit_timestamp_range]
82-
// [END bigtable_filters_limit_block_all]
83-
// [END bigtable_filters_composing_chain]
84-
// [END bigtable_filters_composing_interleave]
8558

59+
// Write your code here.
60+
// [START_EXCLUDE]
8661
// [START bigtable_filters_limit_row_sample]
8762
public static void filterLimitRowSample() {
8863
// TODO(developer): Replace these variables before running the sample.
@@ -362,21 +337,8 @@ public static void filterComposingInterleave(
362337
readWithFilter(projectId, instanceId, tableId, scan);
363338
}
364339
// [END bigtable_filters_composing_interleave]
340+
// [END_EXCLUDE]
365341

366-
// [START bigtable_filters_limit_row_sample]
367-
// [START bigtable_filters_limit_row_regex]
368-
// [START bigtable_filters_limit_cells_per_col]
369-
// [START bigtable_filters_limit_cells_per_row]
370-
// [START bigtable_filters_limit_cells_per_row_offset]
371-
// [START bigtable_filters_limit_col_family_regex]
372-
// [START bigtable_filters_limit_col_qualifier_regex]
373-
// [START bigtable_filters_limit_col_range]
374-
// [START bigtable_filters_limit_value_range]
375-
// [START bigtable_filters_limit_value_regex]
376-
// [START bigtable_filters_limit_timestamp_range]
377-
// [START bigtable_filters_limit_block_all]
378-
// [START bigtable_filters_composing_chain]
379-
// [START bigtable_filters_composing_interleave]
380342
public static void readWithFilter(
381343
String projectId, String instanceId, String tableId, Scan scan) {
382344
// Initialize client that will be used to send requests. This client only needs to be created
@@ -414,17 +376,4 @@ private static void printRow(Result row) {
414376
System.out.println();
415377
}
416378
}
417-
// [END bigtable_filters_limit_row_sample]
418-
// [END bigtable_filters_limit_row_regex]
419-
// [END bigtable_filters_limit_cells_per_col]
420-
// [END bigtable_filters_limit_cells_per_row]
421-
// [END bigtable_filters_limit_cells_per_row_offset]
422-
// [END bigtable_filters_limit_col_family_regex]
423-
// [END bigtable_filters_limit_col_qualifier_regex]
424-
// [END bigtable_filters_limit_col_range]
425-
// [END bigtable_filters_limit_value_range]
426-
// [END bigtable_filters_limit_value_regex]
427-
// [END bigtable_filters_limit_timestamp_range]
428-
// [END bigtable_filters_limit_block_all]
429-
// [END bigtable_filters_composing_chain]
430-
// [END bigtable_filters_composing_interleave]
379+
// [END bigtable_filters_print]

bigtable/hbase/snippets/src/main/java/com/example/bigtable/Reads.java

Lines changed: 8 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,7 @@
1616

1717
package com.example.bigtable;
1818

19-
// [START bigtable_reads_row]
20-
// [START bigtable_reads_row_partial]
21-
// [START bigtable_reads_rows]
22-
// [START bigtable_reads_row_range]
23-
// [START bigtable_reads_row_ranges]
24-
// [START bigtable_reads_prefix]
25-
// [START bigtable_reads_filter]
19+
// [START bigtable_reads_print]
2620

2721
import com.google.cloud.bigtable.hbase.BigtableConfiguration;
2822
import java.io.IOException;
@@ -44,15 +38,11 @@
4438
import org.apache.hadoop.hbase.filter.ValueFilter;
4539
import org.apache.hadoop.hbase.util.Bytes;
4640

41+
4742
public class Reads {
48-
// [END bigtable_reads_row]
49-
// [END bigtable_reads_row_partial]
50-
// [END bigtable_reads_rows]
51-
// [END bigtable_reads_row_range]
52-
// [END bigtable_reads_row_ranges]
53-
// [END bigtable_reads_prefix]
54-
// [END bigtable_reads_filter]
5543

44+
// Write your code here.
45+
// [START_EXCLUDE]
5646
// [START bigtable_reads_row]
5747
public static void readRow() {
5848
// TODO(developer): Replace these variables before running the sample.
@@ -79,6 +69,8 @@ public static void readRow(String projectId, String instanceId, String tableId)
7969
"Unable to initialize service client, as a network error occurred: \n" + e.toString());
8070
}
8171
}
72+
// [END bigtable_reads_row]
73+
8274
// [START bigtable_reads_row_partial]
8375
public static void readRowPartial() {
8476
// TODO(developer): Replace these variables before running the sample.
@@ -276,14 +268,8 @@ public static void readFilter(String projectId, String instanceId, String tableI
276268
}
277269
}
278270
// [END bigtable_reads_filter]
271+
// [END_EXCLUDE]
279272

280-
// [START bigtable_reads_row]
281-
// [START bigtable_reads_row_partial]
282-
// [START bigtable_reads_rows]
283-
// [START bigtable_reads_row_range]
284-
// [START bigtable_reads_row_ranges]
285-
// [START bigtable_reads_prefix]
286-
// [START bigtable_reads_filter]
287273
private static void printRow(Result row) {
288274
System.out.printf("Reading data for %s%n", Bytes.toString(row.rawCells()[0].getRowArray()));
289275
String colFamily = "";
@@ -302,10 +288,4 @@ private static void printRow(Result row) {
302288
System.out.println();
303289
}
304290
}
305-
// [END bigtable_reads_row]
306-
// [END bigtable_reads_row_partial]
307-
// [END bigtable_reads_rows]
308-
// [END bigtable_reads_row_range]
309-
// [END bigtable_reads_row_ranges]
310-
// [END bigtable_reads_prefix]
311-
// [END bigtable_reads_filter]
291+
// [END bigtable_reads_print]

bigtable/hbase/snippets/src/main/java/com/example/bigtable/WriteBatch.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import org.apache.hadoop.hbase.util.Bytes;
2929

3030
public class WriteBatch {
31+
3132
private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");
3233

3334
public static void writeBatch(String projectId, String instanceId, String tableId) {
@@ -36,9 +37,9 @@ public static void writeBatch(String projectId, String instanceId, String tableI
3637
// String tableId = "mobile-time-series";
3738

3839
try (Connection connection = BigtableConfiguration.connect(projectId, instanceId)) {
39-
Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
40+
final Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
4041
long timestamp = System.currentTimeMillis();
41-
byte[] one = new byte[] {0, 0, 0, 0, 0, 0, 0, 1};
42+
byte[] one = new byte[]{0, 0, 0, 0, 0, 0, 0, 1};
4243

4344
List<Put> puts = new ArrayList<Put>();
4445
puts.add(new Put(Bytes.toBytes("tablet#a0b81f74#20190501")));

bigtable/hbase/snippets/src/main/java/com/example/bigtable/WriteConditionally.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import org.apache.hadoop.hbase.util.Bytes;
2929

3030
public class WriteConditionally {
31+
3132
private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");
3233

3334
public static void writeConditionally(String projectId, String instanceId, String tableId) {

bigtable/hbase/snippets/src/main/java/com/example/bigtable/WriteIncrement.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import org.apache.hadoop.hbase.util.Bytes;
2626

2727
public class WriteIncrement {
28+
2829
private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");
2930

3031
public static void writeIncrement(String projectId, String instanceId, String tableId) {

bigtable/hbase/snippets/src/main/java/com/example/bigtable/WriteSimple.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import org.apache.hadoop.hbase.util.Bytes;
2727

2828
public class WriteSimple {
29+
2930
private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");
3031

3132
public static void writeSimple(String projectId, String instanceId, String tableId) {
@@ -34,9 +35,9 @@ public static void writeSimple(String projectId, String instanceId, String table
3435
// String tableId = "mobile-time-series";
3536

3637
try (Connection connection = BigtableConfiguration.connect(projectId, instanceId)) {
37-
Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
38+
final Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
3839
long timestamp = System.currentTimeMillis();
39-
byte[] one = new byte[] {0, 0, 0, 0, 0, 0, 0, 1};
40+
byte[] one = new byte[]{0, 0, 0, 0, 0, 0, 0, 1};
4041

4142
String rowKey = "phone#4c410523#20190501";
4243
Put put = new Put(Bytes.toBytes(rowKey));

bigtable/snippets/pom.xml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,16 @@
2424
<version>1.0-SNAPSHOT</version>
2525
<name>docs-samples</name>
2626

27+
<!--
28+
The parent pom defines common style checks and testing strategies for our samples.
29+
Removing or replacing it should not affect the execution of the samples in anyway.
30+
-->
31+
<parent>
32+
<groupId>com.google.cloud.samples</groupId>
33+
<artifactId>shared-configuration</artifactId>
34+
<version>1.0.18</version>
35+
</parent>
36+
2737
<properties>
2838
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
2939
<maven.compiler.source>1.8</maven.compiler.source>

0 commit comments

Comments
 (0)