here is the code taken from GitHub, to load data from the local file on Google Cloud Storage to BigQuery table (https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/bigquery/cloud-client/load_data_from_gcs.py)
import argparse
import uuid
import pprint
from google.cloud import bigquery
dataset_name = 'test'
table_name = 'partition1'
source = "gs://a_eu/has/con/c-1489230000-8U3bTN.csv"
format = 'CSV'
def load_data_from_gcs(dataset_name, table_name, source):
bigquery_client = bigquery.Client()
dataset = bigquery_client.dataset(dataset_name)
table = dataset.table(table_name)
job_name = str(uuid.uuid4())
job = bigquery_client.load_table_from_storage(
job_name, table, source)
job.source_format = format
job.begin()
print job.begin
job.result()
print job.result
print('Loaded {} rows into {}:{}.'.format(
job.output_rows, dataset_name, table_name))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('dataset_name')
parser.add_argument('table_name')
parser.add_argument('source')
args = parser.parse_args()
load_data_from_gcs(
args.dataset_name,
args.table_name,
args.source)
load_data_from_gcs(dataset_name, table_name, source)
When I run it there is the following error:
usage: partition2.py [-h] dataset_name table_name source
partition2.py: error: too few arguments
Could you please give me a hint, what's this problem about? Thanks.