1 // Implementation of network methods
2 // Copyright © 2009 The University of Chicago
3 #include "DCNnetwork.h"
4 #include "DCNgrammar.h"
8 //////////////////////////////////////////////////////////////////////
9 // Construction/Destruction
10 //////////////////////////////////////////////////////////////////////
12 network::network(int syl
)
15 node
= new float[syl
];
16 for (int i
= 0; i
< syl
; i
++)
27 void network::setGrammar(grammar
* theGrammar
)
29 this->theGrammar
= theGrammar
;
32 void network::equilibrium()
34 //definitions of network values
35 float alpha
= theGrammar
->getAlpha();
36 float beta
= theGrammar
->getBeta();
37 float I
= theGrammar
->getI();
38 float F
= theGrammar
->getF();
39 float P
= theGrammar
->getP();
40 float bias
= theGrammar
->getBias();
44 // the following is the meat of DCNs
46 // oldIteration lets us know when we've converged
47 float* oldIteration
= new float[syl
];
48 for (int i
= 0; i
< syl
; i
++)
49 oldIteration
[i
] = -2.0;
51 // the DCN goes for 255 cylces before giving up
52 for (int cycle
= 0; cycle
< 256; cycle
++)
55 for (int s
= 0; s
< syl
; s
++)
58 node
[s
] = I
+ alpha
* node
[s
+1] + bias
;
60 node
[s
] = F
+ beta
* node
[s
-1] + bias
;
61 else if ((s
== syl
-2) && (syl
!= 2))
62 node
[s
] = P
+ beta
* node
[s
-1] + alpha
* node
[s
+1] + bias
;
64 node
[s
] = beta
* node
[s
-1] + alpha
* node
[s
+1] + bias
;
67 //calculate the distance
69 for (int i
= 0; i
< syl
; i
++)
70 distance
+= ((node
[i
] - oldIteration
[i
]) * (node
[i
] - oldIteration
[i
]));
72 //has the network converged?
73 float DELTA
= 0.0001f
;
80 //store this iteration as old iteration
81 for (int j
= 0; j
< syl
; j
++)
82 oldIteration
[j
] = node
[j
];
84 //last cycle, the network has failed to converge
89 delete[] oldIteration
;
92 void network::print(QLabel
* label
)
94 //definitions of network values
95 float alpha
= theGrammar
->getAlpha();
96 float beta
= theGrammar
->getBeta();
97 float I
= theGrammar
->getI();
98 float F
= theGrammar
->getF();
99 float P
= theGrammar
->getP();
100 float bias
= theGrammar
->getBias();
102 // partially ganked from grammar::print(label)
104 QString partialString
;
106 totalString
+= "The values of the grammar:\n";
107 partialString
.setNum(alpha
);
108 totalString
+= "\talpha:\t\t" + partialString
+ '\n';
109 partialString
.setNum(beta
);
110 totalString
+= "\tbeta:\t\t" + partialString
+ '\n';
111 partialString
.setNum(I
);
112 totalString
+= "\tinitial:\t\t" + partialString
+ '\n';
113 partialString
.setNum(F
);
114 totalString
+= "\tfinal:\t\t" + partialString
+ '\n';
115 partialString
.setNum(P
);
116 totalString
+= "\tpenult:\t\t" + partialString
+ '\n';
117 partialString
.setNum(bias
);
118 totalString
+= "\tbias:\t\t" + partialString
+ '\n';
119 totalString
+= "\n\n";
121 totalString
+= "The value of the nodes:\n";
122 for (int i
= 0; i
< syl
; i
++)
124 partialString
.setNum(i
);
125 totalString
+= "\tnode # " + partialString
+ ":\t";
126 partialString
.setNum(node
[i
]);
127 totalString
+= partialString
+ "\n";
129 totalString
+= "\n\n";
131 totalString
+= "The stress of the word is:\n\t";
133 totalString
+= getStress();
135 totalString
+= "not converged!";
137 label
->setText(totalString
);
141 // 1 means a stress, 0 means no stress
142 QString
network::getStress() {
144 for (int s
= 0; s
< syl
; s
++)
148 if (node
[s
] > node
[s
+1]) stress
+= "1";
153 if (node
[s
] > node
[s
-1]) stress
+= "1";
158 if ((node
[s
] > node
[s
-1]) && (node
[s
] > node
[s
+1]))