我们一直在研究Google App脚本,以基于查询结果自动生成Big Query中存储的不同Analtyics实时数据集的报告。 基本上,每隔10分钟,我就会清空报表仪表板使用的表,触发对我们不同表的查询,将每一行流式传输到新表,并将结果写入仪表板表,依此类推。
问题在于,在该脚本的某些执行中,出现以下错误:
TypeError: Cannot read the property "length" from undefined.{
insertId: "c7sl56g1qhnngx"
resource: {
labels: {
function_name: "exportData"
invocation_type: "event"
我想说这发生在30%的死刑中,所以我很难真正理解问题的根源。
完整代码可在下面找到。有人可以帮助我了解我们在做什么错吗?
非常感谢!
安东尼
var jobReferences = [];
var executions = [];
var dataStudioExecutions = [];
// Defining BQ Target Table Details
var project = 'PROJECT';
var targetDataset = 'Analytics';
var table = 'realtime_rollup_data';
function exportData() {
// Datasets to iterate over
var datasets = [
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "],
['ID', "countryName "]
];
// Run the script from 8 AM to 2 PM
function officeHours() {
var nowH=new Date().getHours();
var nowD=new Date().getDay();
console.log('Hour : '+nowH)
if(nowH>14||nowH<8) {
return
}
function exportDataToTable() {
jobReferences.length = 0;
executions.length = 0;
// Emptying out the table while preserving the schema
var deleteDataQuery = 'SELECT * FROM `PROJECT.Analytics.realtime_rollup_data` LIMIT 0';
var deleteJob = {
'configuration': {
'query': {
'query': deleteDataQuery,
'useLegacySql': false,
'writeDisposition':'WRITE_TRUNCATE',
'destinationTable': {
'projectId': project,
'datasetId': targetDataset,
'tableId': table
}
}
}
};
var deleteQueryResults = BigQuery.Jobs.insert(deleteJob, project);
Utilities.sleep(200);
var deleteJobId = deleteQueryResults.jobReference.jobId;
var getDeleteJobStatus = BigQuery.Jobs.getQueryResults(project, deleteJobId).jobComplete;
var deleteJobStatus;
if (getDeleteJobStatus == true) {
deleteJobStatus = 'O.K.'
}
else {
deleteJobStatus = 'Echec'
};
console.log('Statut job suppression données : ' + deleteJobStatus);
executions.push('Statut job suppression données : ' + deleteJobStatus);
// Appending data from each rollup
datasets.forEach(function(value) {
var datasetId = value[0];
var countryName = value[1];
// Defining the queries
var query = 'SELECT TO_JSON_STRING(t,true)' +
'FROM (SELECT \"' + countryName + '\" as country,' +
' EXTRACT(HOUR FROM TIMESTAMP_SECONDS(visitStartTime) AT TIME ZONE "Europe/Paris") AS hour,' +
' (SELECT MAX(sourcePropertyInfo.sourcePropertyDisplayName)' +
' FROM UNNEST(hit.hits) AS hits) AS service,' +
' (SELECT MAX(CASE WHEN index = 1 THEN value END) FROM UNNEST(hit.customDimensions)) AS cd1,' +
' (SELECT MAX(CASE WHEN index = 2 THEN value END) FROM UNNEST(hit.customDimensions)) AS cd2,' +
' IFNULL(SUM(totals.visits),0) as sessions,' +
' IFNULL(SUM(totals.transactions),0) as transactions' +
' FROM \`PROJECT.' + datasetId + '.ga_realtime_view\` AS hit' +
' LEFT JOIN unnest(hit.customDimensions)' +
' WHERE value != "null" AND index = 1 AND EXTRACT(DAY FROM TIMESTAMP_SECONDS(visitStartTime) AT TIME ZONE "Europe/Paris") = EXTRACT(DAY FROM CURRENT_DATE())' +
' GROUP BY hour, cd1, cd2, service' +
' ORDER BY hour) as t';
var queryFrance = 'SELECT TO_JSON_STRING(t,true)' +
'FROM (SELECT \"' + countryName + '\" as country,' +
' EXTRACT(HOUR FROM TIMESTAMP_SECONDS(visitStartTime) AT TIME ZONE "Europe/Paris") AS hour,' +
' (SELECT MAX(sourcePropertyInfo.sourcePropertyDisplayName)' +
' FROM UNNEST(hit.hits) AS hits) AS service,' +
' (SELECT MAX(CASE WHEN index = 2 THEN value END) FROM UNNEST(hit.customDimensions)) AS cd2,' +
' (SELECT MAX(CASE WHEN index = 3 THEN value END) FROM UNNEST(hit.customDimensions)) AS cd3,' +
' IFNULL(SUM(totals.visits),0) as sessions,' +
' IFNULL(SUM(totals.transactions),0) as transactions' +
' FROM \`PROJECT.' + datasetId + '.ga_realtime_view\` AS hit' +
' LEFT JOIN unnest(hit.customDimensions)' +
' WHERE value != "null" AND index = 2 AND EXTRACT(DAY FROM TIMESTAMP_SECONDS(visitStartTime) AT TIME ZONE "Europe/Paris") = EXTRACT(DAY FROM CURRENT_DATE())' +
' GROUP BY hour, cd2, cd3, service' +
' ORDER BY hour) as t';
if (countryName == "FRA") {
// Job Definition for France (different Custom Dimension indexes)
var job = {
'configuration': {
'query': {
'query': queryFrance,
'useLegacySql': false
}
}
};
}
else {
// Job Definition Remaining countries
var job = {
'configuration': {
'query': {
'query': query,
'useLegacySql': false
}
}
}
};
// Job Execution
var queryResults = BigQuery.Jobs.insert(job, project);
var jobId = queryResults.jobReference.jobId;
jobReferences.push([countryName, jobId])
console.log(countryName + ' job ID : ' + jobId);
Utilities.sleep(2000);
var getResults = BigQuery.Jobs.getQueryResults(project, jobId);
var rows = getResults.rows;
console.log(countryName + ' : ' + rows.length);
var finalRows = [];
// Iterating over each row of the query results and constructing the resource parameter
for (i = 0; i < rows.length; i++) {
var values = JSON.parse(rows[i]["f"][0]["v"]);
var formattedRows = {
"json" : values
}
finalRows.push(formattedRows);
}
// Declaring the resource we will send to the API
var resource = {
"rows": finalRows
}
// Streaming insert
var insertRows = BigQuery.Tabledata.insertAll(resource, project, targetDataset, table)
Utilities.sleep(2000);
});
};
function showQueryResults() {
// Retrieving status for each query job and logging it
jobReferences.forEach(function(result) {
var countryName = result[0];
var jobId = result[1];
var getJobStatus = BigQuery.Jobs.getQueryResults(project, jobId).jobComplete;
var bytesProcessed = BigQuery.Jobs.getQueryResults(project, jobId).totalBytesProcessed;
var mbProcessed = Math.round(bytesProcessed/1048576);
var jobStatus;
if (getJobStatus == true) {
jobStatus = 'O.K.'
}
else {
jobStatus = 'Echec'
};
console.log('Statut job ' + countryName + ' : ' + jobStatus + ' (' + mbProcessed + ' Mb processed)');
executions.push('Statut job ' + countryName + ' : ' + jobStatus + ' (' + mbProcessed + ' Mb processed)');
});
};
function dataStudioTable() {
// Requesting the data from the streaming table and sending it to a new table
// Two reasons for that: 1) it will avoid users from having partial data while the requests are running
// 2) displaying data from the streaming table directly is unstable/incomplete as there are delays
var tableId = 'realtime_rollup_data_datastudio';
var dataStudioQuery = 'SELECT * FROM `wide-oasis-135923.Analytics.realtime_rollup_data`';
var dataStudioJob = {
'configuration': {
'query': {
'query': dataStudioQuery,
'useLegacySql': false,
'writeDisposition':'WRITE_TRUNCATE',
'destinationTable': {
'projectId': project,
'datasetId': targetDataset,
'tableId': tableId
}
}
}
};
var dataStudioQueryResults = BigQuery.Jobs.insert(dataStudioJob, project);
Utilities.sleep(200);
var dataStudioJobId = dataStudioQueryResults.jobReference.jobId;
var getDataStudioJobStatus = BigQuery.Jobs.getQueryResults(project, dataStudioJobId).jobComplete;
var dataStudioJobStatus;
if (getDataStudioJobStatus == true) {
dataStudioJobStatus = 'O.K.'
}
else {
dataStudioJobStatus = 'Echec'
};
console.log('Statut job écriture données table Data Studio : ' + dataStudioJobStatus);
dataStudioExecutions.push('Statut job écriture données table Data Studio : ' + dataStudioJobStatus);
};
try {
exportDataToTable();
showQueryResults();
dataStudioTable();
}
catch (err) {
var t = HtmlService.createTemplateFromFile('email');
t.error = err;
t.data = executions;
t.datasets = datasets;
t.datastudio = dataStudioExecutions;
var html = t.evaluate().getContent();
GmailApp.sendEmail('email1, email2',
'[Script Real time] Fail de l\'exécution',
'Test',
{
name: 'Google Apps Script',
htmlBody: html
});
console.error(err);
};
};
officeHours();
}