If you were able to use gsutil, you could use du, like this:
gsutil du -sh YOUR_BUCKET/YOUR_DIRECTORY
The -s flag will give you only the size of the directory, if you remove it you will also see the size of the files inside. The -h flag returns the size in a human-readable format (KiB,MiB, etc). If not present, it will display the size in bytes.
But, since you mention you can't do it like that, you will have to use, as mentioned, getMetadata.
This simple script will show the size in the console, in bytes, of all of your files in the folder you wish. You can later modify it to add up those sizes.
var http = require("http");
http.createServer(function (request, response) {
const Storage = require('@google-cloud/storage');
const projectId = "PROJECT-ID";
const bucketName = "BUCKET-NAME";
const storage = Storage({
projectId: projectId,
});
const options = {
prefix: "FOLDER-IN-BUCKET/"
}
storage
.bucket(bucketName)
.getFiles(options)
.then(results => {
const files = results[0];
files.forEach(file => {
storage.
bucket(bucketName)
.file(file.name)
.getMetadata()
.then(metadata_results => {
const metadata = metadata_results[0];
console.log(metadata.size);
console.log(metadata.name);
}).catch(metadata_err => {
console.error(metadata_err);
});
});
}).catch(err => {
console.error(err);
});
response.writeHead(200, {'Content-Type': 'text/plain'});
response.end('Hello World\n');
}).listen(8080);
console.log("Hello World");
Don't forget to add the dependency for Cloud Storage in your package.json.
You can follow this suggestion, use daily storage logs, and read said logs from Node