您好我是stackoverflow的新手,所以请原谅我所犯的任何新手错误。我有一个程序,我试图用c ++构建,我遇到了一些问题。该程序应该让用户输入文件名然后读取文件并计算文本文件中的单词数量并打印最后十个和前十个。一旦我在文本文件上查找带有大量单词的间距或文本文件的任何返回,我的程序似乎不起作用。任何人都可以帮助指导我朝着正确的方向前进。
谢谢!
#include <iostream>
#include <cmath>
#include <cstdlib>
#include <fstream>
#include <string>
#include <iomanip>
#include <stdio.h>
#include <sstream>
#include <algorithm>
using namespace std;
string *tokenizedWords = new string[5000000]; //array to hold tokenized words
string *tokenizedReversed = new string[5000000]; //array to hold tokenized words reverse
int tokenize(string linesFromFile, string tokenizedWords[]);
int main()
{
string linesFromFile; //holds the line read from a file
int wordCount = 0; //int to keep wordcount in the while loop
int firstLast = 10; //the amount for first ten and last ten words to print
string theNameofTheFile; //to hold the filename
cout << "Please enter a filename: " << endl; //asks the user
cin >> theNameofTheFile;
ifstream inputStream(theNameofTheFile.c_str());
if(inputStream.is_open()) //check if the file opened correctly
{
while(std::getline(inputStream, linesFromFile)) //reads all of the lines
{
wordCount++;
if(linesFromFile.length() > 0) //checks if the line is empty
{
wordCount = tokenize(linesFromFile,tokenizedWords);
if(wordCount < firstLast) //if text file has less than ten words
{
cout << "This text file is smaller than 10 words so I can not print first and last 10 words." << endl;
return 0;
}
if(wordCount > firstLast) //if textfile has more than ten words
{
cout << endl;
cout << "The first ten words of the document are:" << endl;
cout << endl;
for(int j = 0; j < firstLast; j++)
{
cout << tokenizedWords[j] << endl;
}
cout << endl;
cout << "The last ten words of the document are:" << endl;
cout << endl;
std::reverse_copy(tokenizedWords, tokenizedWords+wordCount, tokenizedReversed);
for(int i = 0; i < firstLast; i++)
{
cout << tokenizedReversed[i] << endl;
}
}
}
inputStream.close(); //close the file
cout<< "Total amount of words is: " << wordCount << endl;
}
}
else
{
//if wrong file is inputed
cout << "Sorry the file " << theNameofTheFile << " does not exists " << endl;
}
return 0;
}
int tokenize(string linesFromFile, string tokenizedWords[])
{
int totalWords (0);
istringstream toTokenize(linesFromFile);
while (toTokenize.good())
{
toTokenize >> tokenizedWords[totalWords++];
}
return (totalWords);
}