list <string> text;
list <string>::iterator c1_Iter;
using namespace std;
std::list<std::string> text;
unsigned long ret_code=0;
const int READ_BUFFER_SIZE=1024;
char readBuffer[READ_BUFFER_SIZE]={0};
bool bFindKey=false;
string line;
fstream rcFile(strFileName.c_str(), ios::in);
if(!rcFile){
ret_code=false;
goto EXIT_FUNCTION;
}
while(rcFile.getline(readBuffer,READ_BUFFER_SIZE,'\n')){
line=readBuffer;
if(line.empty()) continue;
text.push_back(line);
}
rcFile.close();
//排序
text.sort( );
for ( c1_Iter = text.begin( ); c1_Iter != text.end( ); c1_Iter++ ){
string line=*c1_Iter;
vector <string> tokens;
Tokenize(line, tokens);
cout << (tokens.at(0)=="NULL"?" ":tokens.at(0)) << "," << (tokens.at(1)=="NULL"?" ":tokens.at(1)) << "," << (tokens.at(2)=="NULL"?" ":tokens.at(2)) << endl;
}
//解析文件行中的字段
void Tokenize(const string& str, vector <string>& tokens)
{
const string& delimiters = ",";
// Skip delimiters at beginning.
string::size_type lastPos = str.find_first_not_of(delimiters, 0);
// Find first "non-delimiter".
string::size_type pos = str.find_first_of(delimiters, lastPos);
while (string::npos != pos || string::npos != lastPos)
{
if(_stricmp(str.substr(lastPos, pos - lastPos).c_str(),"NULL")==0){
//tokens.push_back(" ");
tokens.push_back("NULL");
}else{
// Found a token, add it to the vector.
tokens.push_back(str.substr(lastPos, pos - lastPos));
}
// Found a token, add it to the vector.
//tokens.push_back(str.substr(lastPos, pos - lastPos));
// Skip delimiters. Note the "not_of"
lastPos = str.find_first_not_of(delimiters, pos);
// Find next "non-delimiter"
pos = str.find_first_of(delimiters, lastPos);
}
}