Skip to content

Add min max timestamp to filter the row range as per https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/client/Scan.html#setTimeRange-long-long- #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,15 @@ public interface CloudBigtableOptions extends DataflowPipelineOptions {

void setBigtableTableId(String bigtableTableId);

@Description("Provide the Start timestamp." )
String getStartTimestamp();


void setStartTimestamp(String startTimestamp);

@Description("Provide the End timestamp." )
String getEndTimestamp();

void setEndTimestamp(String endTimestamp);

}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
*/
package com.google.cloud.bigtable.dataflow.example;

import java.io.IOException;

import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.io.TextIO;
Expand Down Expand Up @@ -61,6 +63,8 @@ public class SourceRowCount {
/**
* Options needed for running the pipelne. It needs a
*/
public static long startTime;
public static long endTime;
public static interface CountOptions extends CloudBigtableOptions {

void setResultLocation(String resultLocation);
Expand All @@ -78,16 +82,32 @@ public void processElement(DoFn<Long, String>.ProcessContext context) throws Exc
}
};

public static void main(String[] args) {
public static void main(String[] args) throws IOException {
CountOptions options =
PipelineOptionsFactory.fromArgs(args).withValidation().as(CountOptions.class);
String PROJECT_ID = options.getBigtableProjectId();
String INSTANCE_ID = options.getBigtableInstanceId();
String TABLE_ID = options.getBigtableTableId();

String START_TIME = options.getStartTimestamp();
String END_TIME = options.getEndTimestamp();
try {
startTime = Long.parseLong(START_TIME);
endTime = Long.parseLong(END_TIME);
} catch (NumberFormatException e) {
// Handle the case where the string cannot be parsed as a long
System.err.println("Invalid timestamp format: " + e.getMessage());
// Additional error handling as needed
}
// [START bigtable_dataflow_connector_scan_config]
Scan scan = new Scan();
scan.setCacheBlocks(false);

// add min and maxtimestamp range
if (START_TIME !=null && END_TIME!=null)
{
scan.setTimeRange(startTime,endTime);
}

scan.setFilter(new FirstKeyOnlyFilter());

// CloudBigtableTableConfiguration contains the project, zone, cluster and table to connect to.
Expand All @@ -100,6 +120,7 @@ public static void main(String[] args) {
.withScan(scan)
.build();


Pipeline p = Pipeline.create(options);

p.apply(Read.from(CloudBigtableIO.read(config)))
Expand Down