如何使用Nodejs将本地Avro上传到谷歌大查询

时间:2016-06-09 11:21:02

标签: node.js google-bigquery avro

我正在尝试使用nodejs

将本地Avro上传到Google大查询
var gcloud = require('gcloud')({
  keyFilename: './config/keyfile.json',
  projectId: 'my-project'
});

var bigquery = gcloud.bigquery();


var schoolsDataset = bigquery.dataset('my_dataset');
var schoolsTable = schoolsDataset.table('person_data');


schoolsTable.import('./examples/yob1900.avro', function(err, job) {
console.log(job)
console.log(err)
});

我收到了以下错误:

{ [ApiError: No schema specified on job or table.]
  code: 400,
  errors: 
   [ { domain: 'global',
       reason: 'invalid',
       message: 'No schema specified on job or table.' } ],
  response: undefined,
  message: 'No schema specified on job or table.' }

如何添加架构? ('yob1900'文件是从bigquery文档下载的 https://cloud.google.com/bigquery/loading-data#loading_json_files

1 个答案:

答案 0 :(得分:0)

这里是答案:

var gcloud = require('gcloud')({
  keyFilename: '../config/keyfile.json',
  projectId: 'my-project'
});

var request = require('request');

var bigquery = gcloud.bigquery();


var dataset = bigquery.dataset('my_dataset');
var table = dataset.table('my_table');

var metadata = {
  sourceFormat: 'AVRO'
};

fs = require('fs');

fs.createReadStream('./yob1900.avro')
  .pipe(table.createWriteStream(metadata))
  .on('complete', function(job) {
   job
      .on('error', console.log)
      .on('complete', function(metadata) {
        console.log('job completed', metadata);
      });
  });