合并N个日志文件,保持按时间顺序

时间:2017-04-05 21:42:35

标签: c++ algorithm external-sorting

我有N个不同的日志文件来自我们设备上运行的N个不同的服务。我想将N个文件合并到一个文件中,保持按时间顺序排列。文件大小可以从几KB到GB。

N个日志文件具有相同的格式,如:

**********  LOGGING SESSION STARTED ************
* Hmsoa Version: 2.4.0.12
* Exe Path: c:\program files (x86)\silicon biosystems\deparray300a_driver\deparray300a_driver.exe
* Exe Version: 1.6.0.154
************************************************


TIME = 2017/02/01 11:12:12,180 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'Connect'->Enter;
TIME = 2017/02/01 11:12:12,196 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'Connect'->Exit=0;
TIME = 2017/02/01 11:12:12,196 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'CCisProxyLocal CONNECT - ok'->Enter;
TIME = 2017/02/01 11:12:12,196 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'CRecoveryAxesProxyLocal CONNECT - ok'->Enter;
TIME = 2017/02/01 11:12:12,196 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'CAmplifierProxyLocalV3 CONNECT - ok'->Enter;
TIME = 2017/02/01 11:12:12,196 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'SYSTEM_DIAGNOSIS_GET'->Enter;
TIME = 2017/02/01 11:12:12,211 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'SYSTEM_DIAGNOSIS_GET'->Exit=0;
TIME = 2017/02/01 11:12:12,211 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'LBL_SQUARE_SET'->Enter;
TIME = 2017/02/01 11:12:12,219 ; THID = 4924; CAT = ; LVL = 1000; LOG = API 'LBL_SQUARE_SET'->Exit=0;

由于我已有N个不同的文件,到目前为止我所做的是应用一个外部排序算法,为每个文件读取一行:

#include "stdafx.h"
#include "boost/regex.hpp"
#include "boost/lexical_cast.hpp"
#include "boost\filesystem.hpp"
#include <string>
#include <fstream>
#include <iostream>
#include <algorithm>
#include <sstream>
#include <climits>
#include <ctime>
namespace fs = boost::filesystem;

static const boost::regex expression(R"(^(?:(?:TIME\s=\s\d{4}\/\d{2}\/\d{2}\s)|(?:@))([0-9:.,]+))");
static const boost::regex nameFileEx(R"(^[\d\-\_]+(\w+\s?\w+|\w+))");
static const std::string path("E:\\2017-02-01"); 
//static const std::string path("E:\\TestLog");

unsigned long time2Milleseconds(const std::string & time)
{
    int a, b, c, d;
    if (sscanf_s(time.c_str(), "%d:%d:%d,%d", &a, &b, &c, &d) >= 3)
        return a * 3600000 + b * 60000 + c * 1000 + d;
}

void readAllFilesUntilLine7(std::vector<std::pair<std::ifstream, std::string>> & vifs)
{
    std::string line;
    for (int i = 0; i < vifs.size(); ++i)
    {
        int lineNumber = 0;
        while (lineNumber != 7 && std::getline(vifs[i].first, line))
        { 
            ++lineNumber;
        }
    }
}

void checkRegex(std::vector<std::pair<std::ifstream, std::string>> & vifs, std::vector<unsigned long> & logTime, std::vector<std::string> & lines, int index, int & counter)
{
    std::string line;
    boost::smatch what;
    if (std::getline(vifs[index].first, line))
    {
        if (boost::regex_search(line, what, expression))
        {
            logTime[index] = time2Milleseconds(what[1]);
        }
        lines[index] = line;
    }
    else
    {
        --counter;
        logTime[index] = ULONG_MAX;
    }
}

void mergeFiles(std::vector<std::pair<std::ifstream, std::string>> & vifs, std::vector<unsigned long> & logTime, std::vector<std::string> & lines, std::ofstream & file, int & counter)
{
    std::string line;
    boost::smatch what;
    int index = 0;
    for (int i = 0; i < vifs.size(); ++i)
    {
        checkRegex(vifs, logTime, lines, i, counter);
    }
    index = min_element(logTime.begin(), logTime.end()) - logTime.begin();
    file << lines[index] << " --> " << vifs[index].second << "\n";
    while (true)
    {
        checkRegex(vifs, logTime, lines, index, counter);
        index = min_element(logTime.begin(), logTime.end()) - logTime.begin();
        if (0 == counter)
            break;
        file << lines[index] << " --> " << vifs[index].second << "\n";
    }
}

int main()
{
    clock_t begin = clock();
    int cnt = std::count_if(fs::directory_iterator(path),fs::directory_iterator(),static_cast<bool(*)(const fs::path&)>(fs::is_regular_file));
    std::vector<std::pair<std::ifstream, std::string>> vifs(cnt);
    int index = 0;
    boost::smatch what;
    std::string file;
    for (fs::directory_iterator d(path); d != fs::directory_iterator(); ++d)
    {
        if (fs::is_regular_file(d->path()))
        {
            file = d->path().filename().string();
            if (boost::regex_search(file, what, nameFileEx))
            {
                vifs[index++] = std::make_pair(std::ifstream(d->path().string()), what[1]);
            }
        }
    }
    std::vector<unsigned long> logTime(cnt, ULONG_MAX);
    std::vector<std::string> lines(cnt);
    std::ofstream filename(path + "\\TestLog.txt");
    readAllFilesUntilLine7(vifs);
    mergeFiles(vifs, logTime, lines, filename, cnt);
    filename.close();
    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    std::cout << "Elapsed time = " << elapsed_secs << "\n";
    return 0;
}

它完全按照它应该做的但它很慢。要合并82个文件,大小范围从1 KB到250 MB,并创建一个超过6000000行的最终文件,需要70分钟。

如何加快算法速度?非常感谢任何帮助!

更新

我也用堆实现了这个版本:

Data.h:

#pragma once

#include <string>

class Data
{
public:
    Data(DWORD index,
         const std::string & line,
         ULONG time);
    ~Data();
    inline const ULONG getTime() const  {return time; }
    inline const DWORD getIndex() const { return index; }
    inline const std::string getLine() const { return line; }
private:
    DWORD index;
    std::string line;
    ULONG time;
};

class Compare
{
public:
    bool operator()(const Data & lhs, const Data & rhs) { return lhs.getTime() > rhs.getTime(); };
};

Data.cpp:

#include "stdafx.h"
#include "Data.h"


Data::Data(DWORD i_index,
           const std::string & i_line,
           ULONG i_time)
    : index(i_index)
    , line(i_line)
    , time(i_time)
{
}


Data::~Data()
{
}

Main.cpp的:

#include "stdafx.h"
#include "boost/regex.hpp"
#include "boost/lexical_cast.hpp"
#include "boost\filesystem.hpp"
#include <string>
#include <fstream>
#include <iostream>
#include <algorithm>
#include <sstream>
#include <climits>
#include <ctime>
#include <queue>
#include "Data.h"
namespace fs = boost::filesystem;

static const boost::regex expression(R"(^(?:(?:TIME\s=\s\d{4}\/\d{2}\/\d{2}\s)|(?:@))([0-9:.,]+))");
static const boost::regex nameFileEx(R"(^[\d\-\_]+(\w+\s?\w+|\w+))");
static const std::string path("E:\\2017-02-01");
//static const std::string path("E:\\TestLog");

unsigned long time2Milleseconds(const std::string & time)
{
    int a, b, c, d;
    if (sscanf_s(time.c_str(), "%d:%d:%d,%d", &a, &b, &c, &d) >= 3)
        return a * 3600000 + b * 60000 + c * 1000 + d;
}

void initializeHeap(std::ifstream & ifs, std::priority_queue<Data, std::vector<Data>, Compare> & myHeap, const int index)
{
    ULONG time;
    std::string line;
    boost::smatch what;
    bool match = false;
    while (!match && std::getline(ifs, line))
    {
        if (boost::regex_search(line, what, expression))
        {
            time = time2Milleseconds(what[1]);
            myHeap.push(Data(index, line, time));
            match = true;
        }
    }
}

void checkRegex(std::vector<std::pair<std::ifstream, std::string>> & vifs, std::priority_queue<Data, std::vector<Data>, Compare> & myHeap, ULONG time, const int index)
{
    std::string line;
    boost::smatch what;
    if (std::getline(vifs[index].first, line))
    {
        if (boost::regex_search(line, what, expression))
        {
            time = time2Milleseconds(what[1]);
        }
        myHeap.push(Data(index, line, time));
    }
}

void mergeFiles(std::vector<std::pair<std::ifstream, std::string>> & vifs, std::priority_queue<Data, std::vector<Data>, Compare> & myHeap, std::ofstream & file)
{
    int index = 0;
    ULONG time = 0;
    while (!myHeap.empty())
    {
        index = myHeap.top().getIndex();
        time = myHeap.top().getTime();
        file << myHeap.top().getLine() << " --> " << vifs[index].second << "\n";
        myHeap.pop();
        checkRegex(vifs, myHeap, time, index);
    }
}

int main()
{
    clock_t begin = clock();
    int cnt = std::count_if(fs::directory_iterator(path), fs::directory_iterator(), static_cast<bool(*)(const fs::path&)>(fs::is_regular_file));
    std::priority_queue<Data, std::vector<Data>, Compare> myHeap;
    std::vector<std::pair<std::ifstream, std::string>> vifs(cnt);
    int index = 0;
    boost::smatch what;
    std::string file;
    for (fs::directory_iterator d(path); d != fs::directory_iterator(); ++d)
    {
        if (fs::is_regular_file(d->path()))
        {
            file = d->path().filename().string();
            if (boost::regex_search(file, what, nameFileEx))
            {
                vifs[index] = std::make_pair(std::ifstream(d->path().string()), what[1]);
                initializeHeap(vifs[index].first, myHeap, index);
                ++index;
            }
        }
    }
    std::ofstream filename(path + "\\TestLog.txt");
    mergeFiles(vifs, myHeap, filename);
    filename.close();
    clock_t end = clock();
    double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
    std::cout << "Elapsed time = " << elapsed_secs << "\n";
    return 0;
}

完成所有这些工作后,我意识到昨天我在Debug中运行程序。启动Release中的两个实现,我得到了以下结果:

  • 矢量实现:约25秒
  • 堆实施:约27秒

因此,或者我对堆结构的实现没有进行优化,或者两个实现在运行时是相同的。

我还能做些什么来加快执行速度吗?

1 个答案:

答案 0 :(得分:2)

这可以更快地完成并且内存不足。先考虑一下:

  • 从每个文件中读取一行(因此任何时候只有N行在内存中。)
  • 查找最小的N行,输出它。
  • 在内存中,将输出的值替换为当前输出来自的文件中的下一行(处理EOF情况)。

如果M是输出文件的长度(即所有日志的总长度),那么简单的实现将是O(N * M)

但是,通过使用堆可以改善上述情况,从而将时间缩短到O(M log N)。也就是说,将N内存中的元素放在堆上。从顶部弹出以输出最小元素。然后,当您读取新行时,只需将该行放回堆中。