terrible bug in PenaltyDAG and Penalty2DAG.
[vspell.git] / tests / tokenize-test.cpp
blob4a2bc1a0ba4536436cad41e691cf70019de828d8
1 #include "tokenize.h"
2 #include <algorithm>
3 #include <iostream>
4 #include <iterator>
5 #include <spell.h>
6 #include <dictionary.h>
8 using namespace std;
10 int main()
12 string str;
14 dic_init();
16 while (getline(cin,str)) {
18 Tokens output;
19 tokenize(str,output);
20 //copy(output.begin(),output.end(),ostream_iterator<string>(cout,"\n"));
21 for (int i = 0;i < output.size();i ++)
22 if (output[i].is_token)
23 cout << output[i].value << endl;
25 Sentence st(str);
26 st.standardize();
27 st.tokenize();
28 cout << st << endl;
30 return 0;