我正在使用ajax帖子将我的列标题和数据从handontable发送回ashx处理程序。
$.ajax({
type: 'POST',
url: "Scripts/SaveExcelData.ashx",
contentType: "application/json; charset=utf-8",
data: JSON.stringify({"columns": hot.getColHeader(), "rows": hot.getData()}),
success: function (data) {}
});
目前我正在使用以下来反序列化请求,但是无法成功地提出将行转换为DataBaseRow类对象数组的任何内容。
var jsonString = String.Empty;
context.Request.InputStream.Position = 0;
using (var inputStream = new StreamReader(context.Request.InputStream))
{
jsonString = inputStream.ReadToEnd();
var results = JsonConvert.DeserializeObject<dynamic>(jsonString);
var columns = results.columns;
var rows = results.rows;
//use columns here to convert rows into DataBaseRow class
}
列如下:{["Col1","Col2","Col3"]}
行看起来像:{[["Val1","Val2","Val3"],["Val1","Val2","Val3"],["Val1","Val2","Val3"]]}
我该怎么做?
更新
我没有尝试将动态类转换为DataBaseRow类,而是发现我实际上只需手动遍历数组值并将它们写入DataBaseRow类的新实例。
using (DBEntities edmx = new DBEntities())
{
foreach (var row in rows)
{
DataBaseRow dbr = new DataBaseRow();
edmx.DataBaseRow.Add(dbr);
dbr.LoadedTime = DateTime.Now;
for (int i = 0; i < row.Count; i++)
{
string colval = row[i].ToString();
string colname = columns[i].ToString();
switch (colname)
{
case "Col1":
dbr.DBCol1 = colval;
break;
case "Col2":
dbr.DBCol2 = colval;
break;
case "Col3":
dbr.DBCol3 = colval;
break;
}
}
}
edmx.SaveChanges();
}
这有效,但速度很慢(请参阅时间评论)。是否有更快/更好的方法来处理这些数据? (如果重要 - 我实际上有14列我在交换机中映射)
答案 0 :(得分:1)
因此我可以在我添加的更新中找到我的问题的技术答案(仅将动态对象引用为数组,不要尝试转换它们。)
然而,似乎Entity Framework在处理保存大型数据集时非常差。这可以通过将保存分组为块并为每个块重新创建上下文来加速。 https://stackoverflow.com/a/5942176/266592
我最后重写了这个以将值插入DataTable
,然后使用SqlBulkCopy
将记录保存到数据库中。
var jsonString = String.Empty;
context.Request.InputStream.Position = 0;
using (var inputStream = new StreamReader(context.Request.InputStream))
{
jsonString = inputStream.ReadToEnd();
var results = JsonConvert.DeserializeObject<dynamic>(jsonString);
var columns = results.columns;
var rows = results.rows;
var dt = new DataTable();
for (int i = 0; i < columns.Count; i++)
{
dt.Columns.Add(columns[i].ToString());
}
foreach (var row in rows)
{
var datarow = dt.NewRow();
for (int i = 0; i < row.Count; i++)
{
datarow[i] = row[i];
}
dt.Rows.Add(datarow);
}
using (var connection = new SqlConnection(ConnectionString))
{
SqlTransaction transaction = null;
connection.Open();
try
{
transaction = connection.BeginTransaction();
using (var sqlBulkCopy = new SqlBulkCopy(connection, SqlBulkCopyOptions.TableLock, transaction))
{
sqlBulkCopy.DestinationTableName = "TABLENAME";
sqlBulkCopy.BatchSize = 100000;
sqlBulkCopy.BulkCopyTimeout = 0;
foreach (DataColumn col in dt.Columns)
{
sqlBulkCopy.ColumnMappings.Add(col.ColumnName, col.ColumnName);
}
sqlBulkCopy.WriteToServer(dt);
}
transaction.Commit();
}
catch (Exception)
{
transaction.Rollback();
}
}
}