5 * Created by Alyssa Milburn on Wed Apr 11 2007.
6 * Copyright (c) 2007 Alyssa Milburn. All rights reserved.
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
23 #include <boost/format.hpp>
25 float dummyValues
[8] = { 0.0f
, 0.0f
, 0.0f
, 0.0f
, 0.0f
, 0.0f
, 0.0f
, 0.0f
};
28 * c2ebraincomponentorder::operator()
30 * A functor to sort brain components by their update time.
33 bool c2ebraincomponentorder::operator()(const class c2eBrainComponent
*b1
, const class c2eBrainComponent
*b2
) const {
34 return b1
->updatetime
< b2
->updatetime
;
40 * Constructor for a c2eTract. Pass it the relevant gene.
43 c2eTract::c2eTract(c2eBrain
*b
, c2eBrainTractGene
*g
) : c2eBrainComponent(b
) {
46 updatetime
= g
->updatetime
;
48 initrule
.init(g
->initialiserule
);
49 updaterule
.init(g
->updaterule
);
53 * c2eTract::setupTract
55 * Internal function which sets up the details of a tract (which needs to wait until
56 * after the lobes are constructed).
59 void c2eTract::setupTract() {
60 c2eBrainTractGene
*g
= ourGene
;
63 std::string srclobename
= std::string((char *)g
->srclobe
, 4);
64 std::string destlobename
= std::string((char *)g
->destlobe
, 4);
66 if (b
->lobes
.find(srclobename
) == b
->lobes
.end() || b
->lobes
.find(destlobename
) == b
->lobes
.end()) {
67 std::cout
<< "brain debug: failed to create dendrites for " << dump() << " (missing lobe)" << std::endl
;
70 c2eLobe
*srclobe
= b
->lobes
[srclobename
];
71 c2eLobe
*destlobe
= b
->lobes
[destlobename
];
73 for (unsigned int i
= g
->srclobe_lowerbound
; i
<= g
->srclobe_upperbound
; i
++) {
74 if (i
>= srclobe
->getNoNeurons()) break;
75 src_neurons
.push_back(srclobe
->getNeuron(i
));
78 for (unsigned int i
= g
->destlobe_lowerbound
; i
<= g
->destlobe_upperbound
; i
++) {
79 if (i
>= destlobe
->getNoNeurons()) break;
80 dest_neurons
.push_back(destlobe
->getNeuron(i
));
83 if (src_neurons
.size() == 0 || dest_neurons
.size() == 0) {
84 std::cout
<< "brain debug: failed to create dendrites for " << dump() << " (no neurons)" << std::endl
;
88 // create/distribute dendrites as needed
90 // You can't have *both* sides of the tract unconstrained, we'd have no idea how many dendrites to make!
91 if (g
->src_noconnections
== 0 && g
->dest_noconnections
== 0) {
92 std::cout
<< "brain debug: failed to create dendrites for " << dump() << " (both connections unconstrained)" << std::endl
;
94 } else if (g
->src_noconnections
!= 0 && g
->dest_noconnections
!= 0) {
95 // TODO: correct behaviour? seems to be, given CL's brain-in-a-vat behaviour
96 std::cout
<< "brain debug: failed to create dendrites for " << dump() << " (no unconstrained connections)" << std::endl
;
100 // assume we're doing src->dest
101 unsigned int neuronsize
= src_neurons
.size();
102 unsigned int noconnections
= g
->src_noconnections
;
103 // change things if we're doing dest->src :)
104 if (g
->src_noconnections
== 0) {
105 neuronsize
= dest_neurons
.size();
106 noconnections
= g
->dest_noconnections
;
109 // distribute neurons
110 // TODO: work out if this algorithm works vaguely correctly
111 // TODO: low-order bit badness in the randomness?
112 for (unsigned int i
= 0; i
< neuronsize
; i
++) {
113 unsigned int noconns
= noconnections
;
114 if (g
->norandomconnections
)
115 noconns
= 1 + (rand() % noconnections
);
117 for (unsigned int j
= 0; j
< noconns
; j
++) {
119 if (g
->src_noconnections
== 0) {
120 d
.source
= src_neurons
[rand() % src_neurons
.size()];
121 d
.dest
= dest_neurons
[i
];
123 d
.source
= src_neurons
[i
];
124 d
.dest
= dest_neurons
[rand() % dest_neurons
.size()];
126 dendrites
.push_back(d
);
130 // if the genome tells us to make no connections, give up
131 if (g
->src_noconnections
== 0 || g
->dest_noconnections
== 0) {
132 std::cout
<< "brain debug: failed to create dendrites for " << dump() << " (no connections)" << std::endl
;
136 // distribute neurons
137 // this seems identical to CL's brain-in-a-vat for the default brain and for some test cases fuzzie made up
138 // TODO: test the algorithm a bit more
139 // TODO: take notice of norandomconnections? (doesn't look like it)
140 unsigned int srcneuron
= 0, srcconns
= 0;
141 unsigned int destneuron
= 0, destconns
= 0;
143 c2eNeuron
*src
= src_neurons
[srcneuron
];
144 c2eNeuron
*dest
= dest_neurons
[destneuron
];
146 // if there's already a dendrite like the one we're about to create, we're done
147 if (getDendriteFromTo(src
, dest
)) return;
152 dendrites
.push_back(d
);
155 if (srcconns
>= g
->src_noconnections
) {
158 if (destneuron
>= dest_neurons
.size())
162 if (destconns
>= g
->dest_noconnections
) {
165 if (srcneuron
>= src_neurons
.size())
175 * Returns a textual string describing the tract for use in debug messages.
178 std::string
c2eTract::dump() {
179 c2eBrainTractGene
*g
= ourGene
;
181 std::string srclobename
= std::string((char *)g
->srclobe
, 4);
182 std::string destlobename
= std::string((char *)g
->destlobe
, 4);
184 std::string data
= boost::str(boost::format("tract %s->%s, src neurons %d-%d #cons %d, dest neurons %d-%d #cons %d") % srclobename
% destlobename
185 % (int)g
->srclobe_lowerbound
% (int)g
->srclobe_upperbound
% (int)g
->src_noconnections
186 % (int)g
->destlobe_lowerbound
% (int)g
->destlobe_upperbound
% (int)g
->dest_noconnections
189 if (g
->migrates
) data
+= ", migratory";
195 * c2eTract::getDendriteFromTo
197 * Returns the dendrite from this tract between the two neurons, or null if there isn't one.
200 c2eDendrite
*c2eTract::getDendriteFromTo(c2eNeuron
*from
, c2eNeuron
*to
) {
201 for (std::vector
<c2eDendrite
>::iterator i
= dendrites
.begin(); i
!= dendrites
.end(); i
++) {
202 if (i
->source
== from
&& i
->dest
== to
) return &(*i
);
211 * Do a single update of the tract.
214 void c2eTract::tick() {
215 // attempt to migrate dendrites, if enabled
216 if (ourGene
->migrates
)
219 // run the svrule(s) against every neuron
220 for (std::vector
<c2eDendrite
>::iterator i
= dendrites
.begin(); i
!= dendrites
.end(); i
++) {
221 if (ourGene
->initrulealways
) initrule
.runRule(i
->source
->variables
[0], i
->source
->variables
, i
->dest
->variables
, dummyValues
, i
->variables
, parent
->getParent());
222 updaterule
.runRule(i
->source
->variables
[0], i
->source
->variables
, i
->dest
->variables
, dummyValues
, i
->variables
, parent
->getParent());
225 // TODO: reward/punishment? anything else? scary brains!
228 void c2eTract::wipe() {
229 for (std::vector
<c2eDendrite
>::iterator i
= dendrites
.begin(); i
!= dendrites
.end(); i
++) {
230 for (unsigned int j
= 0; j
< 8; j
++)
231 i
->variables
[j
] = 0.0f
;
235 void c2eTract::init() {
241 for (std::vector
<c2eDendrite
>::iterator i
= dendrites
.begin(); i
!= dendrites
.end(); i
++) {
242 // TODO: good way to run rule?
243 if (!ourGene
->initrulealways
)
244 initrule
.runRule(0.0f
, dummyValues
, dummyValues
, dummyValues
, i
->variables
, parent
->getParent());
248 void c2eTract::doMigration() {
250 * TODO: this is utter guesswork(tm)
252 for (std::vector
<c2eDendrite
>::iterator i
= dendrites
.begin(); i
!= dendrites
.end(); i
++) {
256 // TODO: prbly "Migration Parameters" catalogue tag thing
257 if (d
.variables
[7] == 0.0f
) {
258 // this one is loose!
260 // if we migrate to make limited connections to the *src*
261 if (ourGene
->src_noconnections
!= 0) {
264 // search for the highest NGF in d->source
265 c2eNeuron
*highestsrc
= 0;
266 for (std::vector
<c2eNeuron
*>::iterator i
= src_neurons
.begin(); i
!= src_neurons
.end(); i
++) {
268 if ((highestsrc
&& n
->variables
[ourGene
->srcvar
] > highestsrc
->variables
[ourGene
->srcvar
]) || n
->variables
[ourGene
->srcvar
] > 0.0f
) {
272 if (!highestsrc
) continue;
274 // search for the highest NGF in d->dest which isn't already linked
275 c2eNeuron
*highestdest
= 0;
276 for (std::vector
<c2eNeuron
*>::iterator i
= dest_neurons
.begin(); i
!= dest_neurons
.end(); i
++) {
278 if ((highestdest
&& n
->variables
[ourGene
->destvar
] > highestdest
->variables
[ourGene
->destvar
]) || n
->variables
[ourGene
->destvar
] > 0.0f
) {
279 if (!getDendriteFromTo(highestsrc
, n
))
283 if (!highestdest
) continue;
286 d
.source
= highestsrc
;
287 d
.dest
= highestdest
;
289 if (ourGene
->initrulealways
) {
291 for (unsigned int j
= 0; j
< 8; j
++)
292 d
.variables
[j
] = 0.0f
;
295 initrule
.runRule(0.0f
, dummyValues
, dummyValues
, dummyValues
, d
.variables
, parent
->getParent());
297 // else if we migrate to make limited connections to the *dest*
299 std::cout
<< "wah, you used something which isn't in the standard brain model, meanie" << std::endl
; // TODO
308 * Constructor for a c2eLobe. Pass it the relevant gene.
311 c2eLobe::c2eLobe(c2eBrain
*b
, c2eBrainLobeGene
*g
) : c2eBrainComponent(b
) {
314 updatetime
= g
->updatetime
;
318 unsigned int width
= g
->width
, height
= g
->height
;
319 if (width
< 1) width
= 1;
320 if (height
< 1) height
= 1;
322 neurons
.reserve(width
* height
);
325 for (unsigned int i
= 0; i
< width
* height
; i
++) {
326 neurons
.push_back(n
);
329 initrule
.init(g
->initialiserule
);
330 updaterule
.init(g
->updaterule
);
336 * Causes a c2eLobe to wipe its variables.
339 void c2eLobe::wipe() {
340 for (std::vector
<c2eNeuron
>::iterator i
= neurons
.begin(); i
!= neurons
.end(); i
++) {
341 for (unsigned int j
= 0; j
< 8; j
++)
342 i
->variables
[j
] = 0.0f
;
349 * Do a single update of the lobe.
352 void c2eLobe::tick() {
353 // run the svrule(s) against every neuron
354 for (unsigned int i
= 0; i
< neurons
.size(); i
++) {
355 if (ourGene
->initrulealways
&& initrule
.runRule(neurons
[i
].input
, dummyValues
, neurons
[i
].variables
, neurons
[spare
].variables
, dummyValues
, parent
->getParent()))
357 if (updaterule
.runRule(neurons
[i
].input
, dummyValues
, neurons
[i
].variables
, neurons
[spare
].variables
, dummyValues
, parent
->getParent()))
359 neurons
[i
].input
= 0.0f
;
366 * Initialise the lobe, resetting variables and running the initialisation rule.
369 void c2eLobe::init() {
374 for (std::vector
<c2eNeuron
>::iterator i
= neurons
.begin(); i
!= neurons
.end(); i
++) {
375 // TODO: good way to run rule?
376 if (!ourGene
->initrulealways
)
377 initrule
.runRule(0.0f
, dummyValues
, i
->variables
, dummyValues
, dummyValues
, parent
->getParent());
378 i
->input
= 0.0f
; // TODO: good to do that here?
383 * c2eLobe::setNeuronInput
385 * Set the input value for the specified neuron.
387 void c2eLobe::setNeuronInput(unsigned int i
, float input
) {
388 assert(i
< neurons
.size());
389 neurons
[i
].input
= input
; // TODO: always stomp over any existing input?
395 * Convenience function to return the Lobe ID of this lobe.
398 std::string
c2eLobe::getId() {
399 return std::string((char *)ourGene
->id
, 4);
405 * Initialises an SVRule from the provided data, precalculating any constant data.
408 void c2eSVRule::init(uint8 ruledata
[48]) {
411 for (unsigned int i
= 0; i
< 16; i
++) {
414 rule
.opcode
= ruledata
[i
* 3];
415 rule
.operandtype
= ruledata
[(i
* 3) + 1];
416 rule
.operanddata
= ruledata
[(i
* 3) + 2];
418 switch (rule
.operandtype
) {
419 // for neuron/dendrite values, sanitise value (there are only 8 options)
421 case 1: // input neuron
424 case 4: // spare neuron
425 // TODO: what should we do here?
426 if (rule
.operanddata
> 7) {
427 std::cout
<< "brain debug: had a too-high variable number" << std::endl
;
428 rule
.operanddata
= 7;
432 // for constant values, precalculate data
435 rule
.operandvalue
= 0.0f
;
439 rule
.operandvalue
= 1.0f
;
443 rule
.operandvalue
= (float)rule
.operanddata
* (1.0f
/ 248);
446 case 12: // negative value
447 rule
.operandvalue
= (float)rule
.operanddata
* (-1.0f
/ 248);
450 case 13: // value * 10
451 rule
.operandvalue
= (float)rule
.operanddata
* (10.0f
/ 248);
454 case 14: // value / 10
455 rule
.operandvalue
= (float)rule
.operanddata
* (0.1f
/ 248);
458 case 15: // value integer
459 rule
.operandvalue
= (float)rule
.operanddata
;
463 rules
.push_back(rule
);
467 // convenience function for c2eSVRule::runRule
468 inline float bindFloatValue(float val
, float min
= -1.0f
, float max
= 1.0f
) {
469 if (val
> max
) return max
;
470 else if (val
< min
) return min
;
474 // warn-once function for unimplemented svrule opcodes/operand types in c2eSVRule::runRule
475 inline void warnUnimplementedSVRule(unsigned char data
, bool opcode
= true) {
476 static bool warnedalready
= false;
477 if (warnedalready
) return;
478 warnedalready
= true;
480 std::cout
<< "brain debug: something tried using unimplemented " << (opcode
? "opcode" : "operand type" ) <<
481 (unsigned int)data
<< ", will not warn about unimplemented svrule bits again." << std::endl
;
484 // goto locations are one-based
485 // we must never jump backwards, only forwards
486 #define HANDLE_GOTO if ((unsigned int)operandvalue - 2 > i) i = (unsigned int)operandvalue - 2;
491 * Executes the SVRule using the provided variables.
493 * Returns whether the 'register as spare' opcode was executed or not.
496 bool c2eSVRule::runRule(float acc
, float srcneuron
[8], float neuron
[8], float spareneuron
[8], float dendrite
[8], c2eCreature
*creature
) {
497 float accumulator
= acc
;
498 float operandvalue
= 0.0f
; // valid rules should never use this
499 float tendrate
= 0.0f
;
500 float *operandpointer
;
502 static float stw
= 0.0f
; // TODO: good default?
503 bool is_spare
= false;
504 bool skip_next
= false;
506 for (unsigned int i
= 0; i
< rules
.size(); i
++) {
507 c2erule
&rule
= rules
[i
];
509 if (skip_next
) { // if the last if opcode was *false*..
510 // .. then don't execute the next line
515 // We don't always *need* the operand and/or pointer, but for now we'll always calculate it anyway.
516 operandpointer
= &dummy
; // point into nowhere, by default.. TODO: good choice?
517 switch (rule
.operandtype
) {
518 case 0: // accumulator
519 operandvalue
= accumulator
;
520 // accumulator does *not* set operandpointer
521 // (eg, 'blank accumulator' and 'add to and store in accumulator' do not change it)
524 case 1: // input neuron
525 operandpointer
= &srcneuron
[rule
.operanddata
];
526 operandvalue
= *operandpointer
;
530 operandpointer
= &dendrite
[rule
.operanddata
];
531 operandvalue
= *operandpointer
;
535 operandpointer
= &neuron
[rule
.operanddata
];
536 operandvalue
= *operandpointer
;
539 case 4: // spare neuron
540 operandpointer
= &spareneuron
[rule
.operanddata
];
541 operandvalue
= *operandpointer
;
545 // TODO: find a quicker RNG?
547 operandvalue
= rand() / (float)RAND_MAX
;
550 case 6: // source chemical
552 warnUnimplementedSVRule(rule
.operandtype
, false);
556 // Ratboy sez: "chemicals appear to be read-only; cannot write data to them"
557 operandvalue
= creature
->getChemical(rule
.operanddata
);
560 case 8: // destination chemical
562 warnUnimplementedSVRule(rule
.operandtype
, false);
568 case 12: // negative value
569 case 13: // value * 10
570 case 14: // value / 10
571 case 15: // value integer
572 // precalculated constants
573 operandvalue
= rule
.operandvalue
;
577 warnUnimplementedSVRule(rule
.operandtype
, false);
581 switch (rule
.opcode
) {
586 *operandpointer
= 0.0f
;
590 *operandpointer
= bindFloatValue(accumulator
);
594 accumulator
= operandvalue
;
598 if (!(accumulator
== operandvalue
))
603 if (!(accumulator
!= operandvalue
))
608 if (!(accumulator
> operandvalue
))
613 if (!(accumulator
< operandvalue
))
618 if (!(accumulator
>= operandvalue
))
623 if (!(accumulator
<= operandvalue
))
628 if (!(operandvalue
== 0.0f
))
632 case 11: // if non-zero
633 if (!(operandvalue
!= 0.0f
))
637 case 12: // if positive
638 if (!(operandvalue
> 0.0f
)) // TODO: correct?
642 case 13: // if negative
643 if (!(operandvalue
< 0.0f
)) // TODO: correct?
647 case 14: // if non-positive // TODO: should be non-negative?
648 if (!(operandvalue
<= 0.0f
)) // TODO: correct?
652 case 15: // if non-negative // TODO: should be non-positive?
653 if (!(operandvalue
>= 0.0f
)) // TODO: correct?
658 accumulator
+= operandvalue
;
662 accumulator
-= operandvalue
;
665 case 18: // subtract from
666 accumulator
= operandvalue
- accumulator
;
669 case 19: // multiply by
670 accumulator
*= operandvalue
;
673 case 20: // divide by
674 // TODO: make sure this is correct
675 if (operandvalue
!= 0.0f
)
676 accumulator
/= operandvalue
;
679 case 21: // divide into
680 // TODO: make sure this is correct
681 if (accumulator
!= 0.0f
)
682 accumulator
= operandvalue
/ accumulator
;
685 case 22: // minimum with
686 accumulator
= std::min(accumulator
, operandvalue
);
689 case 23: // maximum with
690 accumulator
= std::max(accumulator
, operandvalue
);
693 case 24: // set tend rate
694 tendrate
= operandvalue
;
698 // TODO: make sure this is correct
699 accumulator
+= tendrate
* (operandvalue
- accumulator
);
702 case 26: // load negation of
703 accumulator
= -operandvalue
;
706 case 27: // load abs of
707 accumulator
= fabsf(operandvalue
);
710 case 28: // distance to
711 // TODO: make sure this is correct
712 accumulator
= fabsf(accumulator
- operandvalue
);
715 case 29: // flip around
716 // TODO: make sure this is correct
717 accumulator
= operandvalue
- accumulator
;
720 case 30: // no operation
723 case 31: // register as spare
727 case 32: // bound in range [0, 1]
728 // TODO: make sure this is correct
729 accumulator
= bindFloatValue(operandvalue
, 0.0f
);
732 case 33: // bound in range [-1, 1]
733 // TODO: make sure this is correct
734 accumulator
= bindFloatValue(operandvalue
);
737 case 34: // add and store in
738 *operandpointer
= bindFloatValue(accumulator
+ operandvalue
);
741 case 35: // tend to and store in
742 // TODO: make sure this is correct
743 *operandpointer
= bindFloatValue(accumulator
+ tendrate
* (operandvalue
- accumulator
));
746 case 36: // nominal threshold
747 // TODO: make sure this is correct
748 if (accumulator
< operandvalue
)
752 case 37: // leakage rate
754 warnUnimplementedSVRule(rule
.opcode
);
757 case 38: // rest state
759 warnUnimplementedSVRule(rule
.opcode
);
762 case 39: // input gain hi-lo
764 warnUnimplementedSVRule(rule
.opcode
);
767 case 40: // persistence
769 warnUnimplementedSVRule(rule
.opcode
);
772 case 41: // signal noise
774 warnUnimplementedSVRule(rule
.opcode
);
777 case 42: // winner takes all
779 warnUnimplementedSVRule(rule
.opcode
);
782 case 43: // short-term relax rate
783 // TODO: should this be stored in the parent object, maybe?
784 // TODO: make sure this is correct
788 case 44: // long-term relax rate
789 // TODO: make sure this is correct
790 // TODO: is this possible for neurons? (prbly not)
792 float weight
= dendrite
[0];
793 // push weight downwards towards steady state (short-term learning)
794 dendrite
[0] = weight
+ (dendrite
[1] - weight
) * stw
;
795 // pull steady state upwards towards weight (long-term learning)
796 dendrite
[1] = dendrite
[1] + (weight
- dendrite
[1]) * operandvalue
;
800 case 45: // store abs in
801 // TODO: make sure this is correct
802 *operandpointer
= fabsf(accumulator
);
805 case 46: // stop if zero
806 if (operandvalue
== 0.0f
) goto done
;
809 case 47: // stop if non-zero
810 if (operandvalue
!= 0.0f
) goto done
;
813 case 48: // if zero goto
814 if (accumulator
== 0.0f
) HANDLE_GOTO
817 case 49: // if non-zero goto
818 if (accumulator
!= 0.0f
) HANDLE_GOTO
821 case 50: // divide by, add to neuron input
822 if (operandvalue
!= 0.0f
)
823 neuron
[1] += bindFloatValue(accumulator
/ operandvalue
);
826 case 51: // multiply by, add to neuron input
827 neuron
[1] += bindFloatValue(accumulator
* operandvalue
);
830 case 52: // goto line
834 case 53: // stop if <
835 if (accumulator
< operandvalue
) goto done
;
838 case 54: // stop if >
839 if (accumulator
> operandvalue
) goto done
;
842 case 55: // stop if <=
843 if (accumulator
<= operandvalue
) goto done
;
846 case 56: // stop if >=
847 if (accumulator
>= operandvalue
) goto done
;
850 case 57: // reward threshold
852 warnUnimplementedSVRule(rule
.opcode
);
855 case 58: // reward rate
857 warnUnimplementedSVRule(rule
.opcode
);
860 case 59: // use reward with
862 warnUnimplementedSVRule(rule
.opcode
);
865 case 60: // punish threshold
867 warnUnimplementedSVRule(rule
.opcode
);
870 case 61: // punish rate
872 warnUnimplementedSVRule(rule
.opcode
);
875 case 62: // use punish with
877 warnUnimplementedSVRule(rule
.opcode
);
880 case 63: // preserve neuron SV
881 // TODO: this seems too crazy to be true :)
882 { unsigned int index
= (unsigned int)operandvalue
;
883 if (index
> 7) index
= 7; // TODO: binding okay?
884 neuron
[4] = neuron
[index
]; }
887 case 64: // restore neuron SV
888 // TODO: this seems too crazy to be true :)
889 { unsigned int index
= (unsigned int)operandvalue
;
890 if (index
> 7) index
= 7; // TODO: binding okay?
891 neuron
[index
] = neuron
[4]; }
894 case 65: // preserve spare neuron
895 // TODO: this seems too crazy to be true :)
896 { unsigned int index
= (unsigned int)operandvalue
;
897 if (index
> 7) index
= 7; // TODO: binding okay?
898 spareneuron
[4] = spareneuron
[index
]; }
901 case 66: // restore spare neuron
902 // TODO: this seems too crazy to be true :)
903 { unsigned int index
= (unsigned int)operandvalue
;
904 if (index
> 7) index
= 7; // TODO: binding okay?
905 spareneuron
[index
] = spareneuron
[4]; }
908 case 67: // if negative goto
909 // TODO: make sure this is correct
910 if (accumulator
< 0.0f
) HANDLE_GOTO
913 case 68: // if positive goto
914 // TODO: make sure this is correct
915 if (accumulator
> 0.0f
) HANDLE_GOTO
919 // unknown opcode, so do nothing!
920 warnUnimplementedSVRule(rule
.opcode
);
932 * Constructor for a c2eBrain. Pass it the creature it belongs to, and it will construct itself.
935 c2eBrain::c2eBrain(c2eCreature
*p
) {
941 * c2eBrain::processGenes
943 * Called by the parent creature when new genes should be loaded (eg, during creation or lifestage change).
946 void c2eBrain::processGenes() {
947 for (vector
<gene
*>::iterator i
= parent
->getGenome()->genes
.begin(); i
!= parent
->getGenome()->genes
.end(); i
++) {
950 if (!parent
->shouldProcessGene(g
)) continue;
952 if (typeid(*g
) == typeid(c2eBrainLobeGene
)) {
953 c2eLobe
*l
= new c2eLobe(this, (c2eBrainLobeGene
*)g
);
954 components
.insert(l
);
955 lobes
[l
->getId()] = l
;
956 } else if (typeid(*g
) == typeid(c2eBrainTractGene
)) {
957 c2eTract
*t
= new c2eTract(this, (c2eBrainTractGene
*)g
);
958 components
.insert(t
);
967 * Initialises new brain components in order.
970 void c2eBrain::init() {
971 for (std::multiset
<c2eBrainComponent
*, c2ebraincomponentorder
>::iterator i
= components
.begin(); i
!= components
.end(); i
++) {
972 if (!(*i
)->wasInited())
980 * Updates brain components as required, in order.
983 void c2eBrain::tick() {
984 for (std::multiset
<c2eBrainComponent
*, c2ebraincomponentorder
>::iterator i
= components
.begin(); i
!= components
.end(); i
++) {
985 // TODO: good check for this?
986 if ((*i
)->getUpdateTime() != 0)
992 * c2eBrain::getLobeByTissue
994 * Given a tissue ID as used internally in the genome, return the relevant c2eLobe object, if any, or null otherwise.
997 c2eLobe
*c2eBrain::getLobeByTissue(unsigned int id
) {
998 for (std::map
<std::string
, c2eLobe
*>::iterator i
= lobes
.begin(); i
!= lobes
.end(); i
++) {
999 if (i
->second
->getGene()->tissue
== id
)
1007 * c2eBrain::getLobeById
1009 * Given a lobe ID, return the relevant c2eLobe object, if any, or null otherwise.
1012 c2eLobe
*c2eBrain::getLobeById(std::string id
) {
1013 std::map
<std::string
, c2eLobe
*>::iterator i
= lobes
.find(id
);
1015 if (i
!= lobes
.end())
1021 /* vim: set noet: */