We are attempting to use the node.js google_cloud sdk to load data into BigQuery from Google Cloud Storage. Here is a snippet of the Load code:
//init Google Cloud storage
var gcs = require("@google-cloud/storage")({
projectId: bq_projectId,
keyFilename: "./" + keyfile
});
//init Google BigQuery
var bq = require("@google-cloud/bigquery")({
projectId: bq_projectId,
keyFilename: "./" + keyfile
});
const datasetId = "my-dataset";
const tableId = "data_load";
const fileName = "data-20170518-082238.csv";
bq
.dataset(datasetId)
.table(tableId)
.import(gcs.bucket(bucketName).file(fileName), (err, job, apiResponse) => {
if (err){
throw err
return;
}
console.log(apiResponse);
console.log(`Job ${job.id} started.`);
});
The job kicks off and returns a jobId, no error is thrown and the apiResponse is:
{ kind: 'bigquery#job',
etag: '"TcVKUQ2ft7DS9Q8U3noJdmpEDQ4/tFIJTWM2yuacXB5EvzWR1ffuKig"',
id: 'my-project:job_FTO4_Jb5ctr2oEy2IsDSAUCWrgw',
selfLink: 'https://www.googleapis.com/bigquery/v2/projects/my-project/jobs/job_FTO4_Jb5ctr2oEy2IsDSAUCWrgw',
jobReference:
{ projectId: 'my-project',
jobId: 'job_FTO4_Jb5ctr2oEy2IsDSAUCWrgw' },
configuration:
{ load:
{ sourceUris: [Object],
schema: [Object],
destinationTable: [Object],
sourceFormat: 'CSV' } },
status: { state: 'RUNNING' },
statistics: { creationTime: '1495151832686', startTime: '1495151833523' },
user_email: 'service-account' }
The exact same file loads fine from the BigQuery interface into the specified table.