我有一个http处理程序,我将每个请求存储到内存中的并发队列集合中。经过一段时间后,我将集合大量插入数据库。
这是一个坏主意吗?因为数量很大,这似乎是IMO的一个更好的方法。
我确实看到一些差异(数据库中的命中数与数据库中存储的元素数),由于线程,而我正在刷新并发集合,我锁定它并批量插入其内容然后清空集合。然后从集合中删除锁。
有更好的做法吗?或者你做过类似的事情?
答案 0 :(得分:2)
抱歉,但我会说这是一个坏主意。有以下问题:
我们编写了Web应用程序,在峰值负载下每秒写入1000行到SQL Server数据库。
首先尝试尽可能简单地编写应用程序,然后对其进行性能测试。
您可以插入数据库的速度取决于您的硬件,但您可以在程序中执行以下操作:
答案 1 :(得分:1)
我确实看到因线程而导致的一些差异
这里最基本的是使用2个队列并循环它们。 1表示接收,1表示插入。您只需要锁定接收,几乎没有争用。
答案 2 :(得分:1)
我完成了与您在下面的代码中描述的完全相同的事情。它的线程安全并且有一个flush方法,你可以调用flush和pending write。一旦达到要写入的阈值数量的对象,它就会将队列(在我的情况下为List)发送到另一个线程进行保存。请注意,它使用manualResetEvent来处理末尾的数据刷新(您可以等待64个重置事件的限制,因此如果我们有超过64个后台线程待写,这就是为什么会手动等待,但这应该是除非您的数据库真的很慢,否则几乎不会发生。此代码用于处理流入其中的数百万条记录(从内存大约需要5分钟写入20米行,但在保存服务器上作为数据库运行,因此没有网络跳... SQL当然可以处理数千个使用BulkSqlCopy对象和IDataReader的一秒行,所以它应该处理你的请求加载(但当然这将取决于你正在编写什么和你的数据库,但我认为代码可以胜任任务!)。
此外,为了设置批量写入,我创建了一个IDataReader的最小实现来流式传输我的数据。您需要为要求使用以下代码执行此操作。
public class DataImporter<T>
{
public DataImporter(string tableName, string readerName)
{
_tableName = tableName;
_readerName = readerName;
}
/// <summary>
/// This is the size of our bulk staging list.
/// </summary>
/// <remarks>
/// Note that the SqlBulkCopy object has a batch size property, which may not be the same as this value,
/// so records may not be going into the database in sizes of this staging value.
/// </remarks>
private int _bulkStagingListSize = 20000;
private List<ManualResetEvent> _tasksWaiting = new List<ManualResetEvent>();
private string _tableName = String.Empty;
private string _readerName = String.Empty;
public void QueueForImport(T record)
{
lock (_listLock)
{
_items.Add(record);
if (_items.Count > _bulkStagingListSize)
{
SaveItems(_items);
_items = new List<T>();
}
}
}
/// <summary>
/// This method should be called at the end of the queueing work to ensure to clear down our list
/// </summary>
public void Flush()
{
lock (_listLock)
{
SaveItems(_items);
_items = new List<T>();
while (_tasksWaiting.Count > 64)
{
Thread.Sleep(2000);
}
WaitHandle.WaitAll(_tasksWaiting.ToArray());
}
}
private void SaveItems(List<T> items)
{
ManualResetEvent evt = new ManualResetEvent(false);
_tasksWaiting.Add(evt);
IDataReader reader = DataReaderFactory.GetReader<T>(_readerName,_items);
Tuple<ManualResetEvent, IDataReader> stateInfo = new Tuple<ManualResetEvent, IDataReader>(evt, reader);
ThreadPool.QueueUserWorkItem(new WaitCallback(saveData), stateInfo);
}
private void saveData(object info)
{
using (new ActivityTimer("Saving bulk data to " + _tableName))
{
Tuple<ManualResetEvent, IDataReader> stateInfo = info as Tuple<ManualResetEvent, IDataReader>;
IDataReader r = stateInfo.Item2;
try
{
Database.DataImportStagingDatabase.BulkLoadData(r, _tableName);
}
catch (Exception ex)
{
//Do something
}
finally
{
_tasksWaiting.Remove(stateInfo.Item1);
stateInfo.Item1.Set();
}
}
}
private object _listLock = new object();
private List<T> _items = new List<T>();
}
下面提到的DataReaderFactory只选择用于流式传输的正确的IDataReader implmentation,如下所示:
internal static class DataReaderFactory
{
internal static IDataReader GetReader<T>(string typeName, List<T> items)
{
IDataReader reader = null;
switch(typeName)
{
case "ProductRecordDataReader":
reader = new ProductRecordDataReader(items as List<ProductRecord>) as IDataReader;
break;
case "RetailerPriceRecordDataReader":
reader = new RetailerPriceRecordDataReader(items as List<RetailerPriceRecord>) as IDataReader;
break;
default:
break;
}
return reader;
}
}
我在这种情况下使用的数据读取器实现(此代码适用于任何数据读取器)如下所示:
/// <summary>
/// This class creates a data reader for ProductRecord data. This is used to stream the records
/// to the SqlBulkCopy object.
/// </summary>
public class ProductRecordDataReader:IDataReader
{
public ProductRecordDataReader(List<ProductRecord> products)
{
_products = products.ToList();
}
List<ProductRecord> _products;
int currentRow;
int rowCounter = 0;
public int FieldCount
{
get
{
return 14;
}
}
#region IDataReader Members
public void Close()
{
//Do nothing.
}
public bool Read()
{
if (rowCounter < _products.Count)
{
currentRow = rowCounter;
rowCounter++;
return true;
}
else
{
return false;
}
}
public int RecordsAffected
{
get { throw new NotImplementedException(); }
}
public string GetName(int i)
{
switch (i)
{
case 0:
return "ProductSku";
case 1:
return "UPC";
case 2:
return "EAN";
case 3:
return "ISBN";
case 4:
return "ProductName";
case 5:
return "ShortDescription";
case 6:
return "LongDescription";
case 7:
return "DFFCategoryNumber";
case 8:
return "DFFManufacturerNumber";
case 9:
return "ManufacturerPartNumber";
case 10:
return "ManufacturerModelNumber";
case 11:
return "ProductImageUrl";
case 12:
return "LowestPrice";
case 13:
return "HighestPrice";
default:
return null;
}
}
public int GetOrdinal(string name)
{
switch (name)
{
case "ProductSku":
return 0;
case "UPC":
return 1;
case "EAN":
return 2;
case "ISBN":
return 3;
case "ProductName":
return 4;
case "ShortDescription":
return 5;
case "LongDescription":
return 6;
case "DFFCategoryNumber":
return 7;
case "DFFManufacturerNumber":
return 8;
case "ManufacturerPartNumber":
return 9;
case "ManufacturerModelNumber":
return 10;
case "ProductImageUrl":
return 11;
case "LowestPrice":
return 12;
case "HighestPrice":
return 13;
default:
return -1;
}
}
public object GetValue(int i)
{
switch (i)
{
case 0:
return _products[currentRow].ProductSku;
case 1:
return _products[currentRow].UPC;
case 2:
return _products[currentRow].EAN;
case 3:
return _products[currentRow].ISBN;
case 4:
return _products[currentRow].ProductName;
case 5:
return _products[currentRow].ShortDescription;
case 6:
return _products[currentRow].LongDescription;
case 7:
return _products[currentRow].DFFCategoryNumber;
case 8:
return _products[currentRow].DFFManufacturerNumber;
case 9:
return _products[currentRow].ManufacturerPartNumber;
case 10:
return _products[currentRow].ManufacturerModelNumber;
case 11:
return _products[currentRow].ProductImageUrl;
case 12:
return _products[currentRow].LowestPrice;
case 13:
return _products[currentRow].HighestPrice;
default:
return null;
}
}
#endregion
#region IDisposable Members
public void Dispose()
{
//Do nothing;
}
#endregion
#region IDataRecord Members
public bool NextResult()
{
throw new NotImplementedException();
}
public int Depth
{
get { throw new NotImplementedException(); }
}
public DataTable GetSchemaTable()
{
throw new NotImplementedException();
}
public bool IsClosed
{
get { throw new NotImplementedException(); }
}
public bool GetBoolean(int i)
{
throw new NotImplementedException();
}
public byte GetByte(int i)
{
throw new NotImplementedException();
}
public long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length)
{
throw new NotImplementedException();
}
public char GetChar(int i)
{
throw new NotImplementedException();
}
public long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length)
{
throw new NotImplementedException();
}
public IDataReader GetData(int i)
{
throw new NotImplementedException();
}
public string GetDataTypeName(int i)
{
throw new NotImplementedException();
}
public DateTime GetDateTime(int i)
{
throw new NotImplementedException();
}
public decimal GetDecimal(int i)
{
throw new NotImplementedException();
}
public double GetDouble(int i)
{
throw new NotImplementedException();
}
public Type GetFieldType(int i)
{
throw new NotImplementedException();
}
public float GetFloat(int i)
{
throw new NotImplementedException();
}
public Guid GetGuid(int i)
{
throw new NotImplementedException();
}
public short GetInt16(int i)
{
throw new NotImplementedException();
}
public int GetInt32(int i)
{
throw new NotImplementedException();
}
public long GetInt64(int i)
{
throw new NotImplementedException();
}
public string GetString(int i)
{
throw new NotImplementedException();
}
public int GetValues(object[] values)
{
throw new NotImplementedException();
}
public bool IsDBNull(int i)
{
throw new NotImplementedException();
}
public object this[string name]
{
get { throw new NotImplementedException(); }
}
public object this[int i]
{
get { throw new NotImplementedException(); }
}
#endregion
}
最后,批量加载数据方法如下所示:
public void BulkLoadData(IDataReader reader, string tableName)
{
using (SqlConnection cnn = new SqlConnection(cnnString))
{
SqlBulkCopy copy = new SqlBulkCopy(cnn);
copy.DestinationTableName = tableName;
copy.BatchSize = 10000;
cnn.Open();
copy.WriteToServer(reader);
}
}
然而,尽管如此,我建议您不要在asp.net中使用此代码,原因是有人在另一个答案中指出(特别是在IIS中回收工作进程)。我建议你使用一个非常轻量级的队列来首先将请求数据发送到另一个不会重启的服务(我们使用ZeroMQ来传输请求并将数据记录到我正在编写的ASP.NET应用程序中....非常高效。)
麦克
答案 3 :(得分:0)
你可以做的其他事情是发送到像sqlite这样的数据库中的磁盘(以避免池解决问题)并将其发送到你的sql server数据库。
我使用反应扩展来创建插入队列并以良好的速度工作。