AdWord脚本导出到BigQuery"空响应"

时间:2016-04-22 04:16:56

标签: google-bigquery google-adwords

利用以下AdWords脚本导出到BigQuery,BigQuery.Jobs.insert导致脚本由于"空响应"而终止。电话没有得到回应的原因是什么?

    var ACCOUNTS = ['xxx','xxx']; 

    var CONFIG = {
      BIGQUERY_PROJECT_ID: 'xxx',
      BIGQUERY_DATASET_ID: 'xxx',

      // Truncate existing data, otherwise will append.
      TRUNCATE_EXISTING_DATASET: true,
      TRUNCATE_EXISTING_TABLES: true,

      // Back up reports to Google Drive.
      WRITE_DATA_TO_DRIVE: false,
      // Folder to put all the intermediate files.
      DRIVE_FOLDER: 'Adwords Big Query Test',

      // Default date range over which statistics fields are retrieved.
      DEFAULT_DATE_RANGE: '20140101,20140105',

      // Lists of reports and fields to retrieve from AdWords.
      REPORTS: [{NAME: 'KEYWORDS_PERFORMANCE_REPORT',
         CONDITIONS: 'WHERE Impressions>0',
         FIELDS: {'AccountDescriptiveName' : 'STRING',
                  'Date' : 'STRING',
                  'CampaignId' : 'STRING',
                  'CampaignName' : 'STRING',
                  'AdGroupId' : 'STRING',
                  'AdGroupName' : 'STRING',
                  'Id' : 'STRING',
                  'Criteria' : 'STRING',
                  'KeywordMatchType' : 'STRING',
                  'AdNetworkType1' : 'STRING',
                  'AdNetworkType2' : 'STRING',
                  'Device' : 'STRING',
                  'AveragePosition' : 'STRING',
                  'QualityScore' : 'STRING',
                  'CpcBid' : 'STRING',
                  'TopOfPageCpc' : 'STRING',
                  'Impressions' : 'STRING',
                  'Clicks' : 'STRING',
                  'ConvertedClicks' : 'STRING',
                  'Cost' : 'STRING',
                  'Conversions' : 'STRING'
                 }
        }],

      RECIPIENT_EMAILS: [
        'xxx',
      ]
    };

    function main() {
      createDataset();
      for (var i = 0; i < CONFIG.REPORTS.length; i++) {
        var reportConfig = CONFIG.REPORTS[i];
        createTable(reportConfig);
      }

      folder = getDriveFolder();

      // Get an account iterator.
      var accountIterator = MccApp.accounts().withIds(ACCOUNTS).withLimit(10).get();
      var jobIdMap = {};
      while (accountIterator.hasNext()) {
         // Get the current account.
         var account = accountIterator.next();

         // Select the child account.
         MccApp.select(account);

         // Run reports against child account.
         var accountJobIds = processReports(folder, account.getCustomerId());
         jobIdMap[account.getCustomerId()] = accountJobIds;
      }

      waitTillJobsComplete(jobIdMap);
      sendEmail(jobIdMap);
    }


    function createDataset() {
       if (datasetExists()) {
        if (CONFIG.TRUNCATE_EXISTING_DATASET) {
          BigQuery.Datasets.remove(CONFIG.BIGQUERY_PROJECT_ID,
            CONFIG.BIGQUERY_DATASET_ID, {'deleteContents' : true});
          Logger.log('Truncated dataset.');
        } else {
          Logger.log('Dataset %s already exists.  Will not recreate.',
           CONFIG.BIGQUERY_DATASET_ID);
          return;
        }
      }

      // Create new dataset.
      var dataSet = BigQuery.newDataset();
      dataSet.friendlyName = CONFIG.BIGQUERY_DATASET_ID;
      dataSet.datasetReference = BigQuery.newDatasetReference();
      dataSet.datasetReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
      dataSet.datasetReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;

      dataSet = BigQuery.Datasets.insert(dataSet, CONFIG.BIGQUERY_PROJECT_ID);
      Logger.log('Created dataset with id %s.', dataSet.id);
    }

    /**
     * Checks if dataset already exists in project.
     *
     * @return {boolean} Returns true if dataset already exists.
     */
    function datasetExists() {
      // Get a list of all datasets in project.
      var datasets = BigQuery.Datasets.list(CONFIG.BIGQUERY_PROJECT_ID);
      var datasetExists = false;
      // Iterate through each dataset and check for an id match.
      if (datasets.datasets != null) {
        for (var i = 0; i < datasets.datasets.length; i++) {
          var dataset = datasets.datasets[i];
          if (dataset.datasetReference.datasetId == CONFIG.BIGQUERY_DATASET_ID) {
            datasetExists = true;
            break;
          }
        }
      }
      return datasetExists;
    }

    function createTable(reportConfig) {
      if (tableExists(reportConfig.NAME)) {
        if (CONFIG.TRUNCATE_EXISTING_TABLES) {
          BigQuery.Tables.remove(CONFIG.BIGQUERY_PROJECT_ID,
              CONFIG.BIGQUERY_DATASET_ID, reportConfig.NAME);
          Logger.log('Truncated dataset %s.', reportConfig.NAME);
        } else {
          Logger.log('Table %s already exists.  Will not recreate.',
              reportConfig.NAME);
          return;
        }
      }

      // Create new table.
      var table = BigQuery.newTable();
      var schema = BigQuery.newTableSchema();
      var bigQueryFields = [];

      // Add account column to table.
      var accountFieldSchema = BigQuery.newTableFieldSchema();
      accountFieldSchema.description = 'AccountId';
      accountFieldSchema.name = 'AccountId';
      accountFieldSchema.type = 'STRING';
      bigQueryFields.push(accountFieldSchema);

      // Add each field to table schema.
      var fieldNames = Object.keys(reportConfig.FIELDS);
      for (var i = 0; i < fieldNames.length; i++) {
        var fieldName = fieldNames[i];
        var bigQueryFieldSchema = BigQuery.newTableFieldSchema();
        bigQueryFieldSchema.description = fieldName;
        bigQueryFieldSchema.name = fieldName;
        bigQueryFieldSchema.type = reportConfig.FIELDS[fieldName];

        bigQueryFields.push(bigQueryFieldSchema);
      }

      schema.fields = bigQueryFields;
      table.schema = schema;
      table.friendlyName = reportConfig.NAME;

      table.tableReference = BigQuery.newTableReference();
      table.tableReference.datasetId = CONFIG.BIGQUERY_DATASET_ID;
      table.tableReference.projectId = CONFIG.BIGQUERY_PROJECT_ID;
      table.tableReference.tableId = reportConfig.NAME;

      table = BigQuery.Tables.insert(table, CONFIG.BIGQUERY_PROJECT_ID,
          CONFIG.BIGQUERY_DATASET_ID);

      Logger.log('Created table with id %s.', table.id);
    }

    function tableExists(tableId) {
      // Get a list of all tables in the dataset.
      var tables = BigQuery.Tables.list(CONFIG.BIGQUERY_PROJECT_ID,
          CONFIG.BIGQUERY_DATASET_ID);
      var tableExists = false;
      // Iterate through each table and check for an id match.
      if (tables.tables != null) {
        for (var i = 0; i < tables.tables.length; i++) {
          var table = tables.tables[i];
          if (table.tableReference.tableId == tableId) {
            tableExists = true;
            break;
          }
        }
      }
      return tableExists;
    }

    function processReports(folder, accountId) {
      var jobIds = [];

      // Iterate over each report type.
      for (var i = 0; i < CONFIG.REPORTS.length; i++) {
        var reportConfig = CONFIG.REPORTS[i];
        Logger.log('Running report %s for account %s', reportConfig.NAME,
            accountId);
        // Get data as csv
        var csvData = retrieveAdwordsReport(reportConfig, accountId);

        // If configured, back up data.
        if (CONFIG.WRITE_DATA_TO_DRIVE) {
          var fileName = reportConfig.NAME + '_' + accountId;
          folder.createFile(fileName, csvData, MimeType.CSV);
          Logger.log('Exported data to Drive folder ' +
                 CONFIG.DRIVE_FOLDER + ' for report ' + fileName);
        }

        // Convert to Blob format.
        var blobData = Utilities.newBlob(csvData, 'application/octet-stream');
        // Load data
        var jobId = loadDataToBigquery(reportConfig, blobData);
        jobIds.push(jobId);
      }
      return jobIds;
    }

    function retrieveAdwordsReport(reportConfig, accountId) {
      var fieldNames = Object.keys(reportConfig.FIELDS);
      var report = AdWordsApp.report(
        'SELECT ' + fieldNames.join(',') +
        ' FROM ' + reportConfig.NAME + ' ' + reportConfig.CONDITIONS +
        ' DURING ' + CONFIG.DEFAULT_DATE_RANGE);
      var rows = report.rows();
      var csvRows = [];
      // Header row
      csvRows.push('AccountId,'+fieldNames.join(','));

      // Iterate over each row.
      while (rows.hasNext()) {
        var row = rows.next();
        var csvRow = [];
        csvRow.push(accountId);

        for (var i = 0; i < fieldNames.length; i++) {
          var fieldName = fieldNames[i];
          var fieldValue = row[fieldName].toString();
          var fieldType = reportConfig.FIELDS[fieldName];
          /* Strip off % and perform any other formatting here.
          if ((fieldType == 'FLOAT' || fieldType == 'INTEGER') &&
              fieldValue.charAt(fieldValue.length - 1) == '%') {
            fieldValue = fieldValue.substring(0, fieldValue.length - 1);
          }*/
          // Add double quotes to any string values.
          if (fieldType == 'STRING') {
            fieldValue = fieldValue.replace(',', ''); //Handle fields with comma in value returned
            fieldValue = fieldValue.replace('"', ''); //Handle fields with double quotes in value returned
            fieldValue = fieldValue.replace('+', ''); //Handle fields with "+" in value returned
            fieldValue = '"' + fieldValue + '"';
          }
          csvRow.push(fieldValue);
        }
        csvRows.push(csvRow.join(','));
      }
      Logger.log('Downloaded ' + reportConfig.NAME + ' for account ' + accountId +
          ' with ' + csvRows.length + ' rows.');
      return csvRows.join('\n');
    }

    function getDriveFolder() {
      var folders = DriveApp.getFoldersByName(CONFIG.DRIVE_FOLDER);
      // Assume first folder is the correct one.
      if (folders.hasNext()) {
       Logger.log('Folder name found.  Using existing folder.');
       return folders.next();
      }
      return DriveApp.createFolder(CONFIG.DRIVE_FOLDER);
    }

    function loadDataToBigquery(reportConfig, data) {
  function guid() {
  function s4() {
    return Math.floor((1 + Math.random()) * 0x10000)
      .toString(16)
      .substring(1);
  }
  return s4() + s4() + s4() + s4() + s4() + s4() + s4() + s4();
  }

  var makeId = guid();
  var job = {
        jobReference: {
          jobId: makeId
        },
        configuration: {
          load: {
            destinationTable: {
              projectId: CONFIG.BIGQUERY_PROJECT_ID,
              datasetId: CONFIG.BIGQUERY_DATASET_ID,
              tableId: reportConfig.NAME
            },
            skipLeadingRows: 1,
            ignoreUnknownValues: true,
            allowJaggedRows: true,
            allowLargeResults: true
          }
        }
      };

        var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
        Logger.log('Load job started for %s. Check on the status of it here: ' +
                   'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
                   CONFIG.BIGQUERY_PROJECT_ID);
        return job.jobReference.jobId;

    }

    function waitTillJobsComplete(jobIdMap) {
      var complete = false;
      var remainingJobs = [];
      var accountIds = Object.keys(jobIdMap);
      for (var i = 0; i < accountIds.length; i++){
        var accountJobIds = jobIdMap[accountIds[i]];
        remainingJobs.push.apply(remainingJobs, accountJobIds);
      }
      while (!complete) {
        if (AdWordsApp.getExecutionInfo().getRemainingTime() < 5){
          Logger.log('Script is about to timeout, jobs ' + remainingJobs.join(',') +
            ' are still incomplete.');
        }
        remainingJobs = getIncompleteJobs(remainingJobs);
        if (remainingJobs.length == 0) {
          complete = true;
        }
        if (!complete) {
          Logger.log(remainingJobs.length + ' jobs still being processed.');
          // Wait 5 seconds before checking status again.
          Utilities.sleep(5000);
        }
      }
      Logger.log('All jobs processed.');
    }

    function getIncompleteJobs(jobIds) {
      var remainingJobIds = [];
      for (var i = 0; i < jobIds.length; i++) {
        var jobId = jobIds[i];
        var getJob = BigQuery.Jobs.get(CONFIG.BIGQUERY_PROJECT_ID, jobId);
        if (getJob.status.state != 'DONE') {
          remainingJobIds.push(jobId);
        }
      }
      return remainingJobIds;
    }

它似乎是&#34;空响应&#34;错误被抛出:

var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);

尝试了很多调整,但答案对我来说并不明显。谢谢你的帮助!

3 个答案:

答案 0 :(得分:1)

我可能错了但是 - 我认为问题出在jobId上,因为guid()函数存在问题 - 缺少&#34; +&#34;标志。

  function guid() {
    function s4() {
      return Math.floor((1 + Math.random()) * 0x10000)
        .toString(16)
        .substring(1);
    }
    return s4() + s4() + s4() + s4() + s4() s4() + s4() + s4();
  }

为什么不在下面的Response中使用jobId?

var job = {
  configuration: {
      load: {
        destinationTable: {
          projectId: CONFIG.BIGQUERY_PROJECT_ID,
          datasetId: CONFIG.BIGQUERY_DATASET_ID,
          tableId: reportConfig.NAME
        },
        skipLeadingRows: 1,
        ignoreUnknownValues: true,
        allowJaggedRows: true,
        allowLargeResults: true
      }
  }
};

var insertJob = BigQuery.Jobs.insert(job, CONFIG.BIGQUERY_PROJECT_ID, data);
Logger.log('Load job started for %s. Check on the status of it here: ' +
    'https://bigquery.cloud.google.com/jobs/%s', reportConfig.NAME,
     CONFIG.BIGQUERY_PROJECT_ID);
return insertJob.jobReference.jobId;
  

在这种情况下,我建议记录jobId(makeId = guid())并获得以下链接https://cloud.google.com/bigquery/docs/reference/v2/jobs/get#try-it

之后的工作状态

输入ProjectId和JobId,你至少会看到你的工作发生了什么!!

答案 1 :(得分:0)

AdWords会发布&#34; - &#34; in为null值。如果您将报表字段定义为除字符串之外的任何内容(例如,浮点数,整数等),则插入将失败,因为它无法将短划线转换为浮点数或整数。

尝试将所有字段设置为字符串,看看是否能解决问题。

答案 2 :(得分:0)

您是否尝试将WRITE_DATA_TO_DRIVE参数设置为true以确认报告导出成功?结果有多大?尝试插入大于10MB(根据列的行数约为25k)时出现相同的错误。如果导出到Google云端硬盘的文件看起来不错,您可以在retrieveAdwordsReport中为while循环添加条件以限制文件大小。在https://groups.google.com/forum/#!forum/adwords-scripts上还有一篇帖子提到包含AdNetworkType列的问题:https://groups.google.com/forum/#!searchin/adwords-scripts/adnetworktype2%7Csort:relevance/adwords-scripts/yK57JHCt3Cw/Cl1SjFaQBQAJ

限制结果大小:

var processedRows = 0;

// Iterate over each row.
while (rows.hasNext() && ++processedRows < 5000) {
    var row = rows.next();
    var csvRow = [];
    csvRow.push(accountId);

    if (processedRows % 1000 == 0)
    {
        Logger.log('Processed %s rows.',processedRows);
    }
...