我正在尝试通过IHttpHandler传输文件,代码非常简单。但是,当我开始单次传输时,它占用了大约20%的CPU。如果我将其扩展为20个同时传输,则CPU非常高。有没有更好的方法可以做到这一点来保持CPU更低?客户端代码一次只发送64KB的文件块。
public void ProcessRequest(HttpContext context)
{
if (context.Request.Params["secretKey"] == null)
{
}
else
{
accessCode = context.Request.Params["secretKey"].ToString();
}
if (accessCode == "test")
{
string fileName = context.Request.Params["fileName"].ToString();
byte[] buffer = Convert.FromBase64String(context.Request.Form["data"]);
string fileGuid = context.Request.Params["smGuid"].ToString();
string user = context.Request.Params["user"].ToString();
SaveFile(fileName, buffer, user);
}
}
public void SaveFile(string fileName, byte[] buffer, string user)
{
string DirPath = @"E:\Filestorage\" + user + @"\";
if (!Directory.Exists(DirPath))
{
Directory.CreateDirectory(DirPath);
}
string FilePath = @"E:\Filestorage\" + user + @"\" + fileName;
FileStream writer = new FileStream(FilePath, File.Exists(FilePath) ? FileMode.Append : FileMode.Create, FileAccess.Write, FileShare.ReadWrite);
writer.Write(buffer, 0, buffer.Length);
writer.Close();
}
这是我的客户代码:
//Set filename from object
string FileName;
FileName = System.IO.Path.GetFileName(pubAttFullPath.ToString());
//Open file
string file = System.IO.Path.GetFileName(pubAttFullPath.ToString());
FileStream fileStream = new FileStream(file, FileMode.Open, FileAccess.Read);
//Chunk size that will be sent to Server
int chunkSize = 65536;
// Unique file name
string fileName = smGuid.ToString() + "_" + FileName;
int totalChunks = (int)Math.Ceiling((double)fileStream.Length / chunkSize);
// Loop through the whole stream and send it chunk by chunk;
for (int i = 0; i < totalChunks; i++)
{
bool doRecieve = true;
int cpt = 0;
do
{
int startIndex = i * chunkSize;
int endIndex = (int)(startIndex + chunkSize > fileStream.Length ? fileStream.Length : startIndex + chunkSize);
int length = endIndex - startIndex;
byte[] bytes = new byte[length];
fileStream.Read(bytes, 0, bytes.Length);
//Request url, Method=post Length and data.
string requestURL = "http://localhost:16935/Transfer.doit";
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(requestURL);
// Wait 5 min for answer before close connection.
request.Timeout = 300000;
request.Method = "POST";
request.ContentType = "application/x-www-form-urlencoded";
// Chunk(buffer) is converted to Base64 string that will be convert to Bytes on the handler.
string requestParameters = @"fileName=" + fileName + @"&secretKey=test" + @"¤tChunk=" + i + @"&totalChunks=" + totalChunks + @"&smGuid=" + smGuid
+ "&user=" + userSID.ToString() +
"&data=" + HttpUtility.UrlEncode(Convert.ToBase64String(bytes));
// finally whole request will be converted to bytes that will be transferred to HttpHandler
byte[] byteData = Encoding.UTF8.GetBytes(requestParameters);
request.ContentLength = byteData.Length;
try
{
Stream writer = request.GetRequestStream();
writer.Write(byteData, 0, byteData.Length);
writer.Close();
// here we will receive the response from HttpHandler
StreamReader stIn = new StreamReader(request.GetResponse().GetResponseStream());
string strResponse = stIn.ReadToEnd();
stIn.Close();
doRecieve = true;
}
catch (WebException webException)
{
if (webException.Status == WebExceptionStatus.ConnectFailure ||
webException.Status == WebExceptionStatus.ConnectionClosed ||
webException.Status == WebExceptionStatus.ReceiveFailure ||
webException.Status == WebExceptionStatus.SendFailure ||
webException.Status == WebExceptionStatus.Timeout)
{
Thread.Sleep(5000);
doRecieve = false;
cpt++;
}
else {
// if the exception is not those ones then get out
doRecieve = true;
}
}
catch (Exception e)
{
doRecieve = true;
}
}
// will try to send 3 times the current chunk before quitting
// can't try it so try it and give me the feedback
while(doRecieve == false && cpt < 3);
}
答案 0 :(得分:1)
我没有测试过这个理论,但是与FromBase64String
合作可能是原因。我发现this case使用这种方法,某人的内存不足。
您可以尝试使用FromBase64Transform,它旨在处理数据流。
或者如果您因任何原因不需要使用base64,请查看this solution from Scott Hanselman。