Skip to content

[bigtable] Update region tags for reads and filters #3148

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 11 commits into from
Jun 18, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions bigtable/beam/helloworld/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,16 @@
<artifactId>helloworld</artifactId>
<version>1.0-SNAPSHOT</version>

<!--
The parent pom defines common style checks and testing strategies for our samples.
Removing or replacing it should not affect the execution of the samples in anyway.
-->
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.18</version>
</parent>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
Expand Down
9 changes: 9 additions & 0 deletions bigtable/beam/keyviz-art/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,15 @@
<artifactId>keyviz-art</artifactId>
<version>1.0-SNAPSHOT</version>

<!--
The parent pom defines common style checks and testing strategies for our samples.
Removing or replacing it should not affect the execution of the samples in anyway.
-->
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.18</version>
</parent>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
Expand Down
8 changes: 4 additions & 4 deletions bigtable/beam/keyviz-art/src/test/java/KeyVizArtTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;

import keyviz.ReadData.ReadDataOptions;
import keyviz.ReadData.ReadFromTableFn;
import com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration;
import com.google.cloud.bigtable.hbase.BigtableConfiguration;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.UUID;
import keyviz.LoadData;
import keyviz.ReadData.ReadDataOptions;
import keyviz.ReadData.ReadFromTableFn;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.Create;
Expand Down Expand Up @@ -143,7 +143,7 @@ public void testWriteAndRead() {
.build();

// Initiates a new pipeline every second
p.apply(Create.of(1l))
p.apply(Create.of(1L))
.apply(ParDo.of(new ReadFromTableFn(bigtableTableConfig, options)));
p.run().waitUntilFinish();

Expand All @@ -160,7 +160,7 @@ public void testWriteAndRead() {
p = Pipeline.create(options);

// Initiates a new pipeline every second
p.apply(Create.of(1l))
p.apply(Create.of(1L))
.apply(ParDo.of(new ReadFromTableFn(bigtableTableConfig, options)));
p.run().waitUntilFinish();

Expand Down
10 changes: 10 additions & 0 deletions bigtable/cassandra-migration-codelab/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,16 @@
<artifactId>bigtable</artifactId>
<version>1.0-SNAPSHOT</version>

<!--
The parent pom defines common style checks and testing strategies for our samples.
Removing or replacing it should not affect the execution of the samples in anyway.
-->
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.18</version>
</parent>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,10 @@
import com.google.protobuf.ByteString;

public class CassandraMigrationCodelab {

private BigtableDataClient dataClient;
private String tableId;
private final String COLUMN_FAMILY_NAME = "stats_summary";
private static final String COLUMN_FAMILY_NAME = "stats_summary";

public CassandraMigrationCodelab(String projectId, String instanceId, String tableId) {
this.tableId = tableId;
Expand Down Expand Up @@ -59,7 +60,7 @@ public void write() {
long timestamp = (long) 1556712000 * 1000; // Timestamp of June 1, 2019 12:00

String rowKey = "phone#4c410523#20190501";
ByteString one = ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 1});
ByteString one = ByteString.copyFrom(new byte[]{0, 0, 0, 0, 0, 0, 0, 1});

RowMutation rowMutation =
RowMutation.create(tableId, rowKey)
Expand Down Expand Up @@ -122,7 +123,7 @@ public void update2() {

String rowKey = "phone#4c410523#20190501";

ByteString zero = ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 0});
ByteString zero = ByteString.copyFrom(new byte[]{0, 0, 0, 0, 0, 0, 0, 0});

RowMutation rowMutation =
RowMutation.create(tableId, rowKey)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,9 @@
import org.junit.Test;

public class CassandraMigrationCodelabTest {

private static final String INSTANCE_ENV = "BIGTABLE_TESTING_INSTANCE";
private final String COLUMN_FAMILY_NAME = "stats_summary";
private static final String COLUMN_FAMILY_NAME = "stats_summary";
private static final String TABLE_PREFIX = "cass-";

private static final String TABLE_ID =
Expand Down
10 changes: 10 additions & 0 deletions bigtable/hbase/snippets/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,16 @@
<version>1.0-SNAPSHOT</version>
<name>docs-samples</name>

<!--
The parent pom defines common style checks and testing strategies for our samples.
Removing or replacing it should not affect the execution of the samples in anyway.
-->
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.18</version>
</parent>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

package com.example.bigtable;

// [START bigtable_filters_print]

import static com.google.cloud.bigtable.data.v2.models.Filters.FILTERS;

import com.google.api.gax.rpc.ServerStream;
Expand Down Expand Up @@ -52,37 +54,10 @@
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;

// [START bigtable_filters_limit_row_sample]
// [START bigtable_filters_limit_row_regex]
// [START bigtable_filters_limit_cells_per_col]
// [START bigtable_filters_limit_cells_per_row]
// [START bigtable_filters_limit_cells_per_row_offset]
// [START bigtable_filters_limit_col_family_regex]
// [START bigtable_filters_limit_col_qualifier_regex]
// [START bigtable_filters_limit_col_range]
// [START bigtable_filters_limit_value_range]
// [START bigtable_filters_limit_value_regex]
// [START bigtable_filters_limit_timestamp_range]
// [START bigtable_filters_limit_block_all]
// [START bigtable_filters_composing_chain]
// [START bigtable_filters_composing_interleave]

public class Filters {
// [END bigtable_filters_limit_row_sample]
// [END bigtable_filters_limit_row_regex]
// [END bigtable_filters_limit_cells_per_col]
// [END bigtable_filters_limit_cells_per_row]
// [END bigtable_filters_limit_cells_per_row_offset]
// [END bigtable_filters_limit_col_family_regex]
// [END bigtable_filters_limit_col_qualifier_regex]
// [END bigtable_filters_limit_col_range]
// [END bigtable_filters_limit_value_range]
// [END bigtable_filters_limit_value_regex]
// [END bigtable_filters_limit_timestamp_range]
// [END bigtable_filters_limit_block_all]
// [END bigtable_filters_composing_chain]
// [END bigtable_filters_composing_interleave]

// Write your code here.
// [START_EXCLUDE]
// [START bigtable_filters_limit_row_sample]
public static void filterLimitRowSample() {
// TODO(developer): Replace these variables before running the sample.
Expand Down Expand Up @@ -362,21 +337,8 @@ public static void filterComposingInterleave(
readWithFilter(projectId, instanceId, tableId, scan);
}
// [END bigtable_filters_composing_interleave]
// [END_EXCLUDE]

// [START bigtable_filters_limit_row_sample]
// [START bigtable_filters_limit_row_regex]
// [START bigtable_filters_limit_cells_per_col]
// [START bigtable_filters_limit_cells_per_row]
// [START bigtable_filters_limit_cells_per_row_offset]
// [START bigtable_filters_limit_col_family_regex]
// [START bigtable_filters_limit_col_qualifier_regex]
// [START bigtable_filters_limit_col_range]
// [START bigtable_filters_limit_value_range]
// [START bigtable_filters_limit_value_regex]
// [START bigtable_filters_limit_timestamp_range]
// [START bigtable_filters_limit_block_all]
// [START bigtable_filters_composing_chain]
// [START bigtable_filters_composing_interleave]
public static void readWithFilter(
String projectId, String instanceId, String tableId, Scan scan) {
// Initialize client that will be used to send requests. This client only needs to be created
Expand Down Expand Up @@ -414,17 +376,4 @@ private static void printRow(Result row) {
System.out.println();
}
}
// [END bigtable_filters_limit_row_sample]
// [END bigtable_filters_limit_row_regex]
// [END bigtable_filters_limit_cells_per_col]
// [END bigtable_filters_limit_cells_per_row]
// [END bigtable_filters_limit_cells_per_row_offset]
// [END bigtable_filters_limit_col_family_regex]
// [END bigtable_filters_limit_col_qualifier_regex]
// [END bigtable_filters_limit_col_range]
// [END bigtable_filters_limit_value_range]
// [END bigtable_filters_limit_value_regex]
// [END bigtable_filters_limit_timestamp_range]
// [END bigtable_filters_limit_block_all]
// [END bigtable_filters_composing_chain]
// [END bigtable_filters_composing_interleave]
// [END bigtable_filters_print]
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,7 @@

package com.example.bigtable;

// [START bigtable_reads_row]
// [START bigtable_reads_row_partial]
// [START bigtable_reads_rows]
// [START bigtable_reads_row_range]
// [START bigtable_reads_row_ranges]
// [START bigtable_reads_prefix]
// [START bigtable_reads_filter]
// [START bigtable_reads_print]

import com.google.cloud.bigtable.hbase.BigtableConfiguration;
import java.io.IOException;
Expand All @@ -44,15 +38,11 @@
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;


public class Reads {
// [END bigtable_reads_row]
// [END bigtable_reads_row_partial]
// [END bigtable_reads_rows]
// [END bigtable_reads_row_range]
// [END bigtable_reads_row_ranges]
// [END bigtable_reads_prefix]
// [END bigtable_reads_filter]

// Write your code here.
// [START_EXCLUDE]
// [START bigtable_reads_row]
public static void readRow() {
// TODO(developer): Replace these variables before running the sample.
Expand All @@ -79,6 +69,8 @@ public static void readRow(String projectId, String instanceId, String tableId)
"Unable to initialize service client, as a network error occurred: \n" + e.toString());
}
}
// [END bigtable_reads_row]

// [START bigtable_reads_row_partial]
public static void readRowPartial() {
// TODO(developer): Replace these variables before running the sample.
Expand Down Expand Up @@ -276,14 +268,8 @@ public static void readFilter(String projectId, String instanceId, String tableI
}
}
// [END bigtable_reads_filter]
// [END_EXCLUDE]

// [START bigtable_reads_row]
// [START bigtable_reads_row_partial]
// [START bigtable_reads_rows]
// [START bigtable_reads_row_range]
// [START bigtable_reads_row_ranges]
// [START bigtable_reads_prefix]
// [START bigtable_reads_filter]
private static void printRow(Result row) {
System.out.printf("Reading data for %s%n", Bytes.toString(row.rawCells()[0].getRowArray()));
String colFamily = "";
Expand All @@ -302,10 +288,4 @@ private static void printRow(Result row) {
System.out.println();
}
}
// [END bigtable_reads_row]
// [END bigtable_reads_row_partial]
// [END bigtable_reads_rows]
// [END bigtable_reads_row_range]
// [END bigtable_reads_row_ranges]
// [END bigtable_reads_prefix]
// [END bigtable_reads_filter]
// [END bigtable_reads_print]
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.hadoop.hbase.util.Bytes;

public class WriteBatch {

private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");

public static void writeBatch(String projectId, String instanceId, String tableId) {
Expand All @@ -36,9 +37,9 @@ public static void writeBatch(String projectId, String instanceId, String tableI
// String tableId = "mobile-time-series";

try (Connection connection = BigtableConfiguration.connect(projectId, instanceId)) {
Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
final Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
long timestamp = System.currentTimeMillis();
byte[] one = new byte[] {0, 0, 0, 0, 0, 0, 0, 1};
byte[] one = new byte[]{0, 0, 0, 0, 0, 0, 0, 1};

List<Put> puts = new ArrayList<Put>();
puts.add(new Put(Bytes.toBytes("tablet#a0b81f74#20190501")));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.hadoop.hbase.util.Bytes;

public class WriteConditionally {

private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");

public static void writeConditionally(String projectId, String instanceId, String tableId) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.hadoop.hbase.util.Bytes;

public class WriteIncrement {

private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");

public static void writeIncrement(String projectId, String instanceId, String tableId) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.hadoop.hbase.util.Bytes;

public class WriteSimple {

private static final byte[] COLUMN_FAMILY_NAME = Bytes.toBytes("stats_summary");

public static void writeSimple(String projectId, String instanceId, String tableId) {
Expand All @@ -34,9 +35,9 @@ public static void writeSimple(String projectId, String instanceId, String table
// String tableId = "mobile-time-series";

try (Connection connection = BigtableConfiguration.connect(projectId, instanceId)) {
Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
final Table table = connection.getTable(TableName.valueOf(Bytes.toBytes(tableId)));
long timestamp = System.currentTimeMillis();
byte[] one = new byte[] {0, 0, 0, 0, 0, 0, 0, 1};
byte[] one = new byte[]{0, 0, 0, 0, 0, 0, 0, 1};

String rowKey = "phone#4c410523#20190501";
Put put = new Put(Bytes.toBytes(rowKey));
Expand Down
10 changes: 10 additions & 0 deletions bigtable/snippets/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,16 @@
<version>1.0-SNAPSHOT</version>
<name>docs-samples</name>

<!--
The parent pom defines common style checks and testing strategies for our samples.
Removing or replacing it should not affect the execution of the samples in anyway.
-->
<parent>
<groupId>com.google.cloud.samples</groupId>
<artifactId>shared-configuration</artifactId>
<version>1.0.18</version>
</parent>

<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
Expand Down
Loading