Similar approach to Mikhail's answer - but with more code:
With Google Cloud Functions you can automate BigQuery each time you receive a new file:
Codewise, import BigQuery inside package.json
:
{
"dependencies": {
"@google-cloud/bigquery": "^0.9.6"
}
}
And on index.js
you can act on the new file in any appropriate way:
var bigQuery = BigQuery({ projectId: 'your-project-id' });
exports.processFile = (event, callback) => {
console.log('Processing: ' + JSON.stringify(event.data));
query(event.data);
callback();
};
var BigQuery = require('@google-cloud/bigquery');
function query(data) {
const filename = data.name.split('/').pop();
const full_filename = `gs://${data.bucket}/${data.name}`
// if you want to run a query:
query = '...'
bigQuery.query({
query: query,
useLegacySql: false
});
};