仅在第3次运行时获取BigQuery结果

时间:2017-07-05 14:18:32

标签: python google-app-engine google-bigquery google-oauth2 oauth2client

在python中创建了一个GAE应用程序。作为其中一部分,我需要查询BigQuery表。我编写了代码,但代码仅在第3次运行后才能运行(需要运行三次才能成功运行)。我尝试通过添加print语句进行调试,并发现了getCredentials函数的问题。

我没有看到getCredentials函数的任何问题。

当我在第三次运行时打印凭据时,我得到以下输出,前两次运行没有输出。

凭据输出:

<oauth2client.contrib.appengine.AppAssertionCredentials object at 0x04CF3B90>

通话功能:

class _fbGetReport(webapp.RequestHandler):
    def get(self):
        cfg=appsettings()
        result=fbbqfuns._fbbqquery(cfg)
        _response = json.loads(json.dumps(result['stacktrace']))

        for _r in _response['rows']:
            self.response.write( _r['f'][0]['v'] +","+_r['f'][1]['v']+","+_r['f'][2]['v'])
            fbutils._downloadReport(_r['f'][0]['v'],_r['f'][1]['v'],_r['f'][2]['v'],cfg)

BigQuery函数:

import os, json,traceback,sys,uuid,time
import httplib2
from config import config
from google.appengine.runtime import apiproxy_errors
from google.appengine.api import app_identity,mail,urlfetch
from googleapiclient import discovery
from oauth2client.client import GoogleCredentials
from fbconfigsettings import appsettings


def getCredentials():
    print GoogleCredentials.get_application_default()
    return GoogleCredentials.get_application_default().create_scoped(config._projectscopes)

def getScopeInstance(scope,scopeversion):
    urlfetch.set_default_fetch_deadline(50)
    credentials = getCredentials()
    print credentials
    return discovery.build(scope,scopeversion,credentials=credentials)


def _fbbqquery(cfg):
    _response = {"stacktrace":None,"error":None}
    try:
        #cfg=appsettings()
        _bq   = getScopeInstance("bigquery","v2")
        _qry  = "SELECT report_run_id,loaddate,description from [xyz:temp.fbrunlog] group by report_run_id,loaddate,description having sum(download)=0 limit 1" 
        job_data = {
            'jobReference': {
                'projectId': cfg._projectid,
                'job_id': str(uuid.uuid4())
            },
            'configuration': {
                'query': {
                    'query': _qry,
                    'timeoutMs': 60000 
                    },
                    'allowLargeResults': True,
                    'priority': 'BATCH'
                }
            }
        print job_data
        _jbslist = _bq.jobs()
        _jbq_response = _jbslist.insert(projectId=cfg._projectid,body=job_data).execute()
        print _jbq_response
        _jbslist = _bq.jobs()
        _job = None
        while True:
            _job = _jbslist.get(projectId=cfg._projectid,jobId=_jbq_response['jobReference']['jobId']).execute()
            if 'DONE' == _job['status']['state']:
                break
            time.sleep(10)

        print _job
        if _job:
            #getTableData:
            _tableid   = _job['configuration']['query']['destinationTable']['tableId']
            _datasetid = _job['configuration']['query']['destinationTable']['datasetId']
            _projectid = _job['configuration']['query']['destinationTable']['projectId']

            _tbldata = _bq.tabledata().list(projectId=_projectid,
                                            datasetId=_datasetid,
                                            tableId=_tableid).execute()
            #print _tbldata

        _response["stacktrace"] = _tbldata # _job


    except:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        _response["error"] = _err

    finally:
        return _response

0 个答案:

没有答案