我在Django中使用google-cloud-pubsub和Google Cloud Pub / Sub Emulator。 我试图以这种方式创建一个主题:
publisher = pubsub.PublisherClient ()
topic_path = publisher.topic_path ('my-project', 'my-new-topic')
topic = publisher.create_topic (topic_path)
topic.publish (topic, request.data ['url']. encode ('utf-8'))
但它给了我这个例外:
StatusCode.PERMISSION_DENIED,未授权用户执行此操作。
我想知道除了PUBSUB_EMULATOR_HOST环境变量之外是否还有其他配置。 我是否必须设置一些权限,即使是模拟器? 该教程没有解释更多。
提前致谢。
答案 0 :(得分:0)
晚了 4 年 :) 但这应该可以解决它:
gcloud beta emulators pubsub env-init
为什么
google tutorial (echo $PUBSUB_EMULATOR_HOST
) 中的命令出于某种原因没有设置环境变量。因此,您的 pubsub api 会调用服务帐户而不是本地模拟器,从而导致权限错误。
手动设置并使用 import pyodbc
import sqlalchemy as db
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session,sessionmaker,mapper
from sqlalchemy import MetaData,create_engine
from datetime import datetime
import urllib
import os
import glob
import chardet
import re
import csv
def rtn_date(dtStr):
if dtStr =='':
return None
else:
try:
# rtnDate =datetime.strptime(dtStr,'%d/%m/%Y')
rtnDate =datetime.strftime(dtStr,'%F')
return rtnDate
except:
return None
log_file =r'XXXXXX.txt'
database = 'XXXXXXX'
driver_name ='ODBC Driver 11 for SQL Server'
engine = create_engine('mssql+pyodbc:{0}?driver={1}?Trusted_Connection=yes'.format(database,driver_name))
connection= engine.connect()
Base = automap_base()
Base.prepare(engine, reflect=True)
date_test_table = Base.classes.XXXXXXXXX
Session = sessionmaker(bind=engine)
session = Session()
#define folder where all csv files are
folder =r'XXXXXXX'
os.chdir(folder)
extension ='csv'
skipped =[]
all_filenames =[i for i in glob.glob('*.{}'.format(extension))]
for i in range(len(all_filenames)):
try:
with open(all_filenames[i], 'r') as csv_file:
csv_reader = csv.reader(csv_file)
buffer = []
firstline= True
for row in csv_reader :
if firstline: #skip first line
firstline = False
continue
if len(re.findall(r'\d+',str(all_filenames[i]))) > 0:
strType = 'Normal'
else:
strType = 'MonthEnd'
buffer.append({
'Date': row[0],
'ISIN': row[1],
'CUSIP': row[2],
'Ticker': row[3],
'Issuer': row[4],
'Coupon': row[5],
'Final_Maturity': row[6],
'Workout_date': row[7],
'Expected_Remaining_Life': row[8],
'Time_to_Maturity': row[9],
'Coupon_Frequency': row[10],
'Notional_Amount_Unconstrained': row[11],
'Notional_Amount_Constrained': row[12],
'PIK_Original_Amount_Issued': row[13],
'PIK_Factor': row[14],
'Redemption_Factor': row[15],
'Bid_Price': row[16],
'Ask_Price': row[17],
'Accrued_Interest': row[18],
'Coupon_Payment': row[19],
'Coupon_Adjustment': row[20],
'Ex_Dividend_Flag': row[21],
'Dirty_Price': row[22],
'Market_Value_Unconstrained': row[23],
'Market_Value_Constrained': row[24],
'Cash_Payment_Unconstrained': row[25],
'Cash_Payment_Constrained': row[26],
'Street_Yield_to_Maturity': row[27],
'Annual_Yield_to_Maturity': row[28],
'Semi_Annual_Yield_to_Maturity': row[29],
'Street_Yield_to_Worst': row[30],
'Annual_Yield_to_Worst': row[31],
'Semi_Annual_Yield_to_Worst': row[32],
'OAS_Street_Yield': row[33],
'OAS_Annual_Yield': row[34],
'OAS_Semi_Annual_Yield': row[35],
'Annual_Benchmark_Spread': row[36],
'Semi_Annual_Benchmark_Spread': row[37],
'Z_Spread': row[38],
'OAS_Spread': row[39],
'Asset_Swap_Margin': row[40],
'Simple_Margin': row[41],
'Discount_Margin': row[42],
'Duration_to_Maturity': row[43],
'Street_Modified_Duration_to_Maturity': row[44],
'Annual_Modified_Duration_to_Maturity': row[45],
'Semi_Annual_Modified_Duration_to_Maturity': row[46],
'Duration_to_Worst': row[47],
'Street_Modified_Duration_to_Worst': row[48],
'Annual_Modified_Duration_to_Worst': row[49],
'Semi_Annual_Modified_Duration_to_Worst': row[50],
'OAS_Duration': row[51],
'OAS_Modified_Duration': row[52],
'OAS_Annual_Modified_Duration': row[53],
'OAS_Semi_Annual_Modified_Duration': row[54],
'Spread_Duration': row[55],
'Z_Spread_Duration': row[56],
'Street_Convexity_to_Maturity': row[57],
'Annual_Convexity_to_Maturity': row[58],
'Semi_Annual_Convexity_to_Maturity': row[59],
'Street_Convexity_to_Worst': row[60],
'Annual_Convexity_to_Worst': row[61],
'Semi_Annual_Convexity_to_Worst': row[62],
'OAS_Convexity': row[63],
'Benchmark_ISIN': row[64],
'Daily_Return': row[65],
'Month_to_Date_Return': row[66],
'Quarter_to_Date_Return': row[67],
'Year_to_Date_Return': row[68],
'Daily_Excess_Return': row[69],
'Month_to_date_Excess_Return': row[70],
'Level_0': row[71],
'Level_1': row[72],
'Level_2': row[73],
'Level_3': row[74],
'Level_4': row[75],
'Level_5': row[76],
'Debt': row[77],
'Rating': row[78],
'Is_Callable': row[79],
'Is_Core_index': row[80],
'Is_Crossover': row[81],
'Is_Fixed_to_Float': row[82],
'Is_FRN': row[83],
'Is_Hybrid': row[84],
'Is_Perpetual': row[85],
'Is_PIK': row[86],
'Is_Sinking': row[87],
'Is_Zero_Coupon': row[88],
'_1_3_years': row[89],
'_1_5_years': row[90],
'_1_10_years': row[91],
'_3_5_years': row[92],
'_5_7_years': row[93],
'_5_10_years': row[94],
'_7_10_years': row[95],
'_5_years': row[96],
'_10_years': row[97],
'Source':str(all_filenames[i]),
'File_Type': strType
})
session.bulk_insert_mappings(date_test_table,buffer)
session.commit()
except Exception as e:
print(e)
skipped.append('{0} {1}'.format(str(all_filenames[i]),r'\n'))
print('processed {0} out of {1} files'.format(str(i+1), str(len(all_filenames)+1)))
session.close()
if len(skipped)>0:
open(log_file, 'w').close()
skip_log = open(log_file,'w')
skip_log.writelines(skipped)
skip_log.close()