我填写谷歌融合表。我应该将大约2个条目添加到融合表中。
我使用Google.Apis.Fusiontables.v2 1.10.0.50
这适用于一些monthes。但现在我的配额超出了例外。
Google.Apis.Requests.RequestError超出配额[403]错误[ 消息[超出配额]位置[ - ]原因[quotaExceeded]域[usageLimits]]
由于有更多数据,这并不令人惊讶。
但是当我查看https://console.developers.google.com/apis/api/fusiontables/quotas上的配额时,我看不到任何限制。
同样在全球配额信息中心,我看不到任何限制。
我以为每张桌子可能达到250MB。但是当我下载数据源时,csv只有40 MB 但我认为这只是读取配额不写?
private static void WriteItem()
{
await WriteAsync("map_data", index.AllEdges<BusEdge<TId>>().Where(x => x.MinFrequentPriceInEur > 0), (busEdge, csvWriter) =>
{
WriteItem(busEdge, csvWriter);
itemHandled();
});
}
private static void WriteItem(BusEdge<TId> busEdge, CsvWriter csvWriter)
{
csvWriter.WriteField(Source.Id.ToString());
csvWriter.WriteField(sink.Id.ToString());
csvWriter.WriteField(string.Format(CultureInfo.InvariantCulture, "{0} {1}",
sink.Geolocation.Latitude,
sink.Geolocation.Longitude));
csvWriter.WriteField(sink.Population);
foreach (Language language in Languages.AllLanguages)
{
csvWriter.WriteField(sink.DisplayName[language], true);
}
var priceWithDate =
busEdge.Prices.GroupBy(x => x.Value.MinimumFrequentInEur)
.OrderByDescending(x => x.Count()).SelectMany(x => x)
.OrderBy(x => x.Key)
.FirstOrDefault();
double price = priceWithDate.Value.MinimumFrequentInEur;
csvWriter.WriteField(Math.Round(price, 2));
csvWriter.WriteField(Math.Round(CurrencyInfo.EUR.ConvertMoney(price, CurrencyInfo.GBP), 2));
csvWriter.WriteField(Math.Round(CurrencyInfo.EUR.ConvertMoney(price, CurrencyInfo.HRK), 2));
csvWriter.WriteField(Math.Round(CurrencyInfo.EUR.ConvertMoney(price, CurrencyInfo.PLN), 2));
csvWriter.WriteField(priceWithDate.Key.ToString("s"));
public static async Task WriteAsync<T>(string tableName, IEnumerable<T> list, Action<T, CsvWriter> writerAction)
{
using (FusiontablesService service = Connect())
{
var tables = service.Table.List().Execute();
var tableId = tables.Items.Where(x => x.Name == tableName).Select(x => x.TableId).First();
MemoryStream memoryStream = new MemoryStream();
bool hasWritten = false;
Func<Task> uploadAsync = async () =>
{
if (memoryStream.Length > 0)
{
memoryStream.Position = 0;
ResumableUpload<string> request =
hasWritten ?
(ResumableUpload<string>)service.Table.ImportRows(tableId, memoryStream, "application/octet-stream") :
(ResumableUpload<string>)service.Table.ReplaceRows(tableId, memoryStream, "application/octet-stream");
var response = await request.UploadAsync();
if (response.Exception != null)
{
throw response.Exception;
}
memoryStream = new MemoryStream();
hasWritten = true;
}
};
using (StreamWriter writer = new StreamWriter(memoryStream, Encoding.UTF8, 1024, true))
{
using (CsvWriter csvWriter = new CsvWriter(writer, new CsvConfiguration { Delimiter = "," }))
{
foreach (T item in list)
{
writerAction(item, csvWriter);
csvWriter.NextRecord();
writer.Flush();
if (memoryStream.Length >= Limit)
{
await uploadAsync();
}
}
}
}
await uploadAsync();
}
我有什么建议可以找出具体的限制吗?以及关于代码改进的任何建议?
答案 0 :(得分:0)
我通过电子邮件发送了支持。他们告诉我他们会看。我没有听到任何新的声音,但是在那之后它没有任何改变。
现在我的数据量增加了大约20%,但仍然有效。所以它看起来像是一个谷歌bug