1 // Implementation of Dynamic Computational Network learning class
2 // Copyright © 2009 The University of Chicago
3 #include "DCNlearning.h"
9 #include <Q3TextStream>
11 #include "DCNgrammar.h"
12 #include "DCNnetwork.h"
13 #include "DCNsnetwork.h"
14 #include "ui/Status.h"
18 using linguistica::ui::status_user_agent
;
31 void learning::runHelper(Q3TextStream
& logstream
, status_user_agent
& status
)
36 srand(static_cast<unsigned>(time(0)));
40 //careful, i'm specifying values
42 int cutoff
= cutoffFromUser
;
45 int maxCorpusIndex
= numberOfWords
;
61 theGrammar
.setValues(startingAlpha
,startingBeta
, startingI
, startingF
, 0, 0);
63 status
.progress
.clear();
64 status
.progress
.set_denominator(cutoffFromUser
);
66 while (T
> 0.01 && cutoff
>= 0) {
67 status
.progress
= updateNumber
;
70 << "it: " << cutoffFromUser
- cutoff
<< "\t"
71 << "alpha: " << theGrammar
.getAlpha() << "\t"
72 << "beta: " << theGrammar
.getBeta() << "\t"
73 << "initial: " << theGrammar
.getI() << "\t"
74 << "final: " << theGrammar
.getF() << "\n";
75 QString string
= corpus
[corpusIndex
];
77 syl
= string
.length();
78 network
theNetwork(syl
);
79 theNetwork
.setGrammar(&theGrammar
);
80 theNetwork
.equilibrium();
82 QString word
= theNetwork
.getStress();
84 if (word
!= corpus
[corpusIndex
]) {
85 using linguistica::random_small_float
;
87 // random float between -1 and 1
88 float deltaAlpha
= T
* random_small_float();
89 alpha
= theGrammar
.getAlpha() + deltaAlpha
;
91 float deltaBeta
= T
* random_small_float();
92 beta
= theGrammar
.getBeta() + deltaBeta
;
94 float deltaI
= T
* random_small_float();
95 I
= theGrammar
.getI() + deltaI
;
97 float deltaF
= T
* random_small_float();
98 F
= theGrammar
.getF() + deltaF
;
100 //theGrammar.setValues(alpha, beta, I, F, 0, 0);
102 possibleGrammar
.setValues(alpha
, beta
, I
, F
, 0, 0);
103 snet
.setGrammar(&possibleGrammar
);
106 if (snet
.isTotallyConverged())
108 // alpha * beta > .3 may be another thing to put in here
113 theGrammar
.setValues(alpha
, beta
, I
, F
, 0, 0);
115 T
= T
+ increaseWhenWrong
;
116 //T = T + sqrt( pow(deltaAlpha, 2.0) + pow(deltaBeta, 2.0)
117 // + pow(deltaI, 2.0) + pow(deltaF, 2.0) );
120 else if (word
== corpus
[corpusIndex
])
122 T
= T
* decreaseWhenRight
;
125 cutoff
--; updateNumber
++;
126 corpusIndex
= (corpusIndex
+ 1)%maxCorpusIndex
;
128 status
.progress
.clear();
130 possibleGrammar
.setValues(alpha
, beta
, I
, F
, 0, 0);
131 snet
.setGrammar(&possibleGrammar
);
133 if (cutoff
> 2 && snet
.isTotallyConverged())
137 void learning::run(status_user_agent
& status_display
)
143 QFile
logfile( "DCNlog.txt" ); // added : -cs-
144 if( logfile
.open( QIODevice::WriteOnly
| QIODevice::Append
) ) // added : -cs-
146 Q3TextStream
logstream( &logfile
); // added : -cs-
150 logstream
<< "Learning Algorithm run at " << ctime(&rawtime
);
152 for (int i
= 0; i
< numberOfTries
; i
++)
154 logstream
<< "\n\tTRIAL NUMBER " << i
+1 << "\n";
155 runHelper(logstream
, status_display
);
156 if (this->isSuccessful()) break;
162 void learning::setCorpus(corpusType corpus
, int numberOfWords
)
164 this->corpus
= corpus
;
165 this->numberOfWords
= numberOfWords
;
168 grammar
* learning::returnGrammar()
170 grammar
* returnGrammar
= new grammar(theGrammar
);
171 return returnGrammar
;