package com.google.cloud.bigquery.samples;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Job;
import com.google.api.services.bigquery.model.JobConfiguration;
import com.google.api.services.bigquery.model.JobConfigurationLoad;
import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.TableSchema;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Collections;
import java.util.Scanner;
/**
* TODO: Insert description here. (generated by elibixby)
*/
public class LoadDataCSVSample extends BigqueryUtils {
// [START main]
public static void main(String[] args) throws IOException, InterruptedException{
Scanner scanner = new Scanner(System.in);
System.out.println("Enter your project id: ");
String projectId = scanner.nextLine();
System.out.println("Enter your dataset id: ");
String datasetId = scanner.nextLine();
System.out.println("Enter your table id: ");
String tableId = scanner.nextLine();
System.out.println("Enter the Google Cloud Storage Path to the data you'd like to load: ");
String cloudStoragePath = scanner.nextLine();
System.out.println("Enter the filepath to your schema: ");
String sourceSchemaPath = scanner.nextLine();
System.out.println("Enter how often to check if your job is complete (milliseconds): ");
long interval = scanner.nextLong();
scanner.close();
run(cloudStoragePath,
projectId,
datasetId,
tableId,
new FileReader(new File(sourceSchemaPath)),
interval);
}
// [END main]
// [START run]
public static void run(
String cloudStoragePath,
String projectId,
String datasetId,
String tableId,
Reader schemaSource,
long interval) throws IOException, InterruptedException{
Bigquery bigquery = BigqueryServiceFactory.getService();
Job loadJob = loadJob(
bigquery,
cloudStoragePath,
new TableReference()
.setProjectId(projectId)
.setDatasetId(datasetId)
.setTableId(tableId),
loadSchema(schemaSource));
Bigquery.Jobs.Get get_job = bigquery.jobs().get(
loadJob.getJobReference().getProjectId(),
loadJob.getJobReference().getJobId());
pollJob(get_job, interval);
System.out.println("Load is Done!");
}
// [END run]
// [START load_job]
public static Job loadJob(
Bigquery bigquery,
String cloudStoragePath,
TableReference table,
TableSchema schema) throws IOException{
JobConfigurationLoad load = new JobConfigurationLoad()
.setDestinationTable(table)
.setSchema(schema)
.setSourceUris(Collections.singletonList(cloudStoragePath));
return bigquery.jobs().insert(table.getProjectId(),
new Job().setConfiguration(new JobConfiguration().setLoad(load)))
.execute();
}
// [END load_job]
}