代码:
static void MultipleFilesToSingleFile(string dirPath, string filePattern, string destFile)
{
string[] fileAry = Directory.GetFiles(dirPath, filePattern);
Console.WriteLine("Total File Count : " + fileAry.Length);
using (TextWriter tw = new StreamWriter(destFile, true))
{
foreach (string filePath in fileAry)
{
using (TextReader tr = new StreamReader(filePath))
{
tw.WriteLine(tr.ReadToEnd());
tr.Close();
tr.Dispose();
}
Console.WriteLine("File Processed : " + filePath);
}
tw.Close();
tw.Dispose();
}
}
我需要优化它,因为它非常慢:45个平均大小为40 - 50 Mb XML文件的文件需要3分钟。
请注意:平均45 MB的45个文件只是一个示例,它可以是n
个m
大小的文件数,其中n
是数千个& m
平均为128 Kb。简而言之,它可能会有所不同。
您能否提供有关优化的任何观点?
答案 0 :(得分:29)
为什么不使用Stream.CopyTo()
method?
private static void CombineMultipleFilesIntoSingleFile(string inputDirectoryPath, string inputFileNamePattern, string outputFilePath)
{
string[] inputFilePaths = Directory.GetFiles(inputDirectoryPath, inputFileNamePattern);
Console.WriteLine("Number of files: {0}.", inputFilePaths.Length);
using (var outputStream = File.Create(outputFilePath))
{
foreach (var inputFilePath in inputFilePaths)
{
using (var inputStream = File.OpenRead(inputFilePath))
{
// Buffer size can be passed as the second argument.
inputStream.CopyTo(outputStream);
}
Console.WriteLine("The file {0} has been processed.", inputFilePath);
}
}
}
答案 1 :(得分:2)
你可以做几件事:
根据我的经验,默认缓冲区大小可以增加到120K左右的显着优势,我怀疑在所有流上设置一个大缓冲区将是最简单和最引人注目的性能助推器:
new System.IO.FileStream("File.txt", System.IO.FileMode.Open, System.IO.FileAccess.Read, System.IO.FileShare.Read, 150000);
使用Stream
课程,而不是StreamReader
课程。
using
声明。答案 2 :(得分:2)
一种选择是使用copy命令,并让它做得很好。
类似的东西:
static void MultipleFilesToSingleFile(string dirPath, string filePattern, string destFile)
{
var cmd = new ProcessStartInfo("cmd.exe",
String.Format("/c copy {0} {1}", filePattern, destFile));
cmd.WorkingDirectory = dirPath;
cmd.UseShellExecute = false;
Process.Start(cmd);
}
答案 3 :(得分:2)
我会使用BlockingCollection来读取,以便您可以同时读写
显然应该写入单独的物理磁盘以避免硬件争用。
此代码将保留订单
读取比写入更快,因此不需要并行读取
再次,因为读取将更快地限制集合的大小,因此读取不会比写入更快地进行写入。
编写当前文件时,并行读取单个下一个简单任务的问题是文件大小不同 - 写一个小文件比读一个大文件要快。
我使用此模式在T1上读取和解析文本,然后在T2上插入SQL。
public void WriteFiles()
{
using (BlockingCollection<string> bc = new BlockingCollection<string>(10))
{
// play with 10 if you have several small files then a big file
// write can get ahead of read if not enough are queued
TextWriter tw = new StreamWriter(@"c:\temp\alltext.text", true);
// clearly you want to write to a different phyical disk
// ideally write to solid state even if you move the files to regular disk when done
// Spin up a Task to populate the BlockingCollection
using (Task t1 = Task.Factory.StartNew(() =>
{
string dir = @"c:\temp\";
string fileText;
int minSize = 100000; // play with this
StringBuilder sb = new StringBuilder(minSize);
string[] fileAry = Directory.GetFiles(dir, @"*.txt");
foreach (string fi in fileAry)
{
Debug.WriteLine("Add " + fi);
fileText = File.ReadAllText(fi);
//bc.Add(fi); for testing just add filepath
if (fileText.Length > minSize)
{
if (sb.Length > 0)
{
bc.Add(sb.ToString());
sb.Clear();
}
bc.Add(fileText); // could be really big so don't hit sb
}
else
{
sb.Append(fileText);
if (sb.Length > minSize)
{
bc.Add(sb.ToString());
sb.Clear();
}
}
}
if (sb.Length > 0)
{
bc.Add(sb.ToString());
sb.Clear();
}
bc.CompleteAdding();
}))
{
// Spin up a Task to consume the BlockingCollection
using (Task t2 = Task.Factory.StartNew(() =>
{
string text;
try
{
while (true)
{
text = bc.Take();
Debug.WriteLine("Take " + text);
tw.WriteLine(text);
}
}
catch (InvalidOperationException)
{
// An InvalidOperationException means that Take() was called on a completed collection
Debug.WriteLine("That's All!");
tw.Close();
tw.Dispose();
}
}))
Task.WaitAll(t1, t2);
}
}
}
答案 4 :(得分:1)
sergey-brunov发布的用于合并2GB文件的解决方案。系统需要大约2 GB的RAM才能完成这项工作。我进行了一些更改以进行更多优化,现在需要350MB RAM来合并2GB文件。
private static void CombineMultipleFilesIntoSingleFile(string inputDirectoryPath, string inputFileNamePattern, string outputFilePath)
{
string[] inputFilePaths = Directory.GetFiles(inputDirectoryPath, inputFileNamePattern);
Console.WriteLine("Number of files: {0}.", inputFilePaths.Length);
foreach (var inputFilePath in inputFilePaths)
{
using (var outputStream = File.AppendText(outputFilePath))
{
// Buffer size can be passed as the second argument.
outputStream.WriteLine(File.ReadAllText(inputFilePath));
Console.WriteLine("The file {0} has been processed.", inputFilePath);
}
}
}
答案 5 :(得分:0)
// Binary File Copy
public static void mergeFiles(string strFileIn1, string strFileIn2, string strFileOut, out string strError)
{
strError = String.Empty;
try
{
using (FileStream streamIn1 = File.OpenRead(strFileIn1))
using (FileStream streamIn2 = File.OpenRead(strFileIn2))
using (FileStream writeStream = File.OpenWrite(strFileOut))
{
BinaryReader reader = new BinaryReader(streamIn1);
BinaryWriter writer = new BinaryWriter(writeStream);
// create a buffer to hold the bytes. Might be bigger.
byte[] buffer = new Byte[1024];
int bytesRead;
// while the read method returns bytes keep writing them to the output stream
while ((bytesRead =
streamIn1.Read(buffer, 0, 1024)) > 0)
{
writeStream.Write(buffer, 0, bytesRead);
}
while ((bytesRead =
streamIn2.Read(buffer, 0, 1024)) > 0)
{
writeStream.Write(buffer, 0, bytesRead);
}
}
}
catch (Exception ex)
{
strError = ex.Message;
}
}