add support to SDLBackend for rendering 24bit data
[openc2e.git] / c2eBrain.cpp
blobae69d438d7974334c5cb2187bf0aa5aa3ed03174
1 /*
2 * c2eBrain.cpp
3 * openc2e
5 * Created by Alyssa Milburn on Wed Apr 11 2007.
6 * Copyright (c) 2007 Alyssa Milburn. All rights reserved.
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
20 #include "c2eBrain.h"
21 #include "Creature.h"
22 #include <math.h>
23 #include <boost/format.hpp>
25 float dummyValues[8] = { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f };
28 * c2ebraincomponentorder::operator()
30 * A functor to sort brain components by their update time.
33 bool c2ebraincomponentorder::operator()(const class c2eBrainComponent *b1, const class c2eBrainComponent *b2) const {
34 return b1->updatetime < b2->updatetime;
38 * c2eTract::c2eTract
40 * Constructor for a c2eTract. Pass it the relevant gene.
43 c2eTract::c2eTract(c2eBrain *b, c2eBrainTractGene *g) : c2eBrainComponent(b) {
44 assert(g);
45 ourGene = g;
46 updatetime = g->updatetime;
48 initrule.init(g->initialiserule);
49 updaterule.init(g->updaterule);
53 * c2eTract::setupTract
55 * Internal function which sets up the details of a tract (which needs to wait until
56 * after the lobes are constructed).
59 void c2eTract::setupTract() {
60 c2eBrainTractGene *g = ourGene;
61 c2eBrain *b = parent;
63 std::string srclobename = std::string((char *)g->srclobe, 4);
64 std::string destlobename = std::string((char *)g->destlobe, 4);
66 if (b->lobes.find(srclobename) == b->lobes.end() || b->lobes.find(destlobename) == b->lobes.end()) {
67 std::cout << "brain debug: failed to create dendrites for " << dump() << " (missing lobe)" << std::endl;
68 return;
70 c2eLobe *srclobe = b->lobes[srclobename];
71 c2eLobe *destlobe = b->lobes[destlobename];
73 for (unsigned int i = g->srclobe_lowerbound; i <= g->srclobe_upperbound; i++) {
74 if (i >= srclobe->getNoNeurons()) break;
75 src_neurons.push_back(srclobe->getNeuron(i));
78 for (unsigned int i = g->destlobe_lowerbound; i <= g->destlobe_upperbound; i++) {
79 if (i >= destlobe->getNoNeurons()) break;
80 dest_neurons.push_back(destlobe->getNeuron(i));
83 if (src_neurons.size() == 0 || dest_neurons.size() == 0) {
84 std::cout << "brain debug: failed to create dendrites for " << dump() << " (no neurons)" << std::endl;
85 return;
88 // create/distribute dendrites as needed
89 if (g->migrates) {
90 // You can't have *both* sides of the tract unconstrained, we'd have no idea how many dendrites to make!
91 if (g->src_noconnections == 0 && g->dest_noconnections == 0) {
92 std::cout << "brain debug: failed to create dendrites for " << dump() << " (both connections unconstrained)" << std::endl;
93 return;
94 } else if (g->src_noconnections != 0 && g->dest_noconnections != 0) {
95 // TODO: correct behaviour? seems to be, given CL's brain-in-a-vat behaviour
96 std::cout << "brain debug: failed to create dendrites for " << dump() << " (no unconstrained connections)" << std::endl;
97 return;
100 // assume we're doing src->dest
101 unsigned int neuronsize = src_neurons.size();
102 unsigned int noconnections = g->src_noconnections;
103 // change things if we're doing dest->src :)
104 if (g->src_noconnections == 0) {
105 neuronsize = dest_neurons.size();
106 noconnections = g->dest_noconnections;
109 // distribute neurons
110 // TODO: work out if this algorithm works vaguely correctly
111 // TODO: low-order bit badness in the randomness?
112 for (unsigned int i = 0; i < neuronsize; i++) {
113 unsigned int noconns = noconnections;
114 if (g->norandomconnections)
115 noconns = 1 + (rand() % noconnections);
117 for (unsigned int j = 0; j < noconns; j++) {
118 c2eDendrite d;
119 if (g->src_noconnections == 0) {
120 d.source = src_neurons[rand() % src_neurons.size()];
121 d.dest = dest_neurons[i];
122 } else {
123 d.source = src_neurons[i];
124 d.dest = dest_neurons[rand() % dest_neurons.size()];
126 dendrites.push_back(d);
129 } else {
130 // if the genome tells us to make no connections, give up
131 if (g->src_noconnections == 0 || g->dest_noconnections == 0) {
132 std::cout << "brain debug: failed to create dendrites for " << dump() << " (no connections)" << std::endl;
133 return;
136 // distribute neurons
137 // this seems identical to CL's brain-in-a-vat for the default brain and for some test cases fuzzie made up
138 // TODO: test the algorithm a bit more
139 // TODO: take notice of norandomconnections? (doesn't look like it)
140 unsigned int srcneuron = 0, srcconns = 0;
141 unsigned int destneuron = 0, destconns = 0;
142 while (true) {
143 c2eNeuron *src = src_neurons[srcneuron];
144 c2eNeuron *dest = dest_neurons[destneuron];
146 // if there's already a dendrite like the one we're about to create, we're done
147 if (getDendriteFromTo(src, dest)) return;
149 c2eDendrite d;
150 d.source = src;
151 d.dest = dest;
152 dendrites.push_back(d);
154 srcconns++;
155 if (srcconns >= g->src_noconnections) {
156 srcconns = 0;
157 destneuron++;
158 if (destneuron >= dest_neurons.size())
159 destneuron = 0;
161 destconns++;
162 if (destconns >= g->dest_noconnections) {
163 destconns = 0;
164 srcneuron++;
165 if (srcneuron >= src_neurons.size())
166 srcneuron = 0;
173 * c2eTract::dump
175 * Returns a textual string describing the tract for use in debug messages.
178 std::string c2eTract::dump() {
179 c2eBrainTractGene *g = ourGene;
181 std::string srclobename = std::string((char *)g->srclobe, 4);
182 std::string destlobename = std::string((char *)g->destlobe, 4);
184 std::string data = boost::str(boost::format("tract %s->%s, src neurons %d-%d #cons %d, dest neurons %d-%d #cons %d") % srclobename % destlobename
185 % (int)g->srclobe_lowerbound % (int)g->srclobe_upperbound % (int)g->src_noconnections
186 % (int)g->destlobe_lowerbound % (int)g->destlobe_upperbound % (int)g->dest_noconnections
189 if (g->migrates) data += ", migratory";
191 return data;
195 * c2eTract::getDendriteFromTo
197 * Returns the dendrite from this tract between the two neurons, or null if there isn't one.
200 c2eDendrite *c2eTract::getDendriteFromTo(c2eNeuron *from, c2eNeuron *to) {
201 for (std::vector<c2eDendrite>::iterator i = dendrites.begin(); i != dendrites.end(); i++) {
202 if (i->source == from && i->dest == to) return &(*i);
205 return 0;
209 * c2eTract::tick
211 * Do a single update of the tract.
214 void c2eTract::tick() {
215 // attempt to migrate dendrites, if enabled
216 if (ourGene->migrates)
217 doMigration();
219 // run the svrule(s) against every neuron
220 for (std::vector<c2eDendrite>::iterator i = dendrites.begin(); i != dendrites.end(); i++) {
221 if (ourGene->initrulealways) initrule.runRule(i->source->variables[0], i->source->variables, i->dest->variables, dummyValues, i->variables, parent->getParent());
222 updaterule.runRule(i->source->variables[0], i->source->variables, i->dest->variables, dummyValues, i->variables, parent->getParent());
225 // TODO: reward/punishment? anything else? scary brains!
228 void c2eTract::wipe() {
229 for (std::vector<c2eDendrite>::iterator i = dendrites.begin(); i != dendrites.end(); i++) {
230 for (unsigned int j = 0; j < 8; j++)
231 i->variables[j] = 0.0f;
235 void c2eTract::init() {
236 inited = true;
238 setupTract();
239 wipe();
241 for (std::vector<c2eDendrite>::iterator i = dendrites.begin(); i != dendrites.end(); i++) {
242 // TODO: good way to run rule?
243 if (!ourGene->initrulealways)
244 initrule.runRule(0.0f, dummyValues, dummyValues, dummyValues, i->variables, parent->getParent());
248 void c2eTract::doMigration() {
250 * TODO: this is utter guesswork(tm)
252 for (std::vector<c2eDendrite>::iterator i = dendrites.begin(); i != dendrites.end(); i++) {
253 c2eDendrite &d = *i;
255 // 7 = strength
256 // TODO: prbly "Migration Parameters" catalogue tag thing
257 if (d.variables[7] == 0.0f) {
258 // this one is loose!
260 // if we migrate to make limited connections to the *src*
261 if (ourGene->src_noconnections != 0) {
262 // srcvar, destvar
264 // search for the highest NGF in d->source
265 c2eNeuron *highestsrc = 0;
266 for (std::vector<c2eNeuron *>::iterator i = src_neurons.begin(); i != src_neurons.end(); i++) {
267 c2eNeuron *n = *i;
268 if ((highestsrc && n->variables[ourGene->srcvar] > highestsrc->variables[ourGene->srcvar]) || n->variables[ourGene->srcvar] > 0.0f) {
269 highestsrc = n;
272 if (!highestsrc) continue;
274 // search for the highest NGF in d->dest which isn't already linked
275 c2eNeuron *highestdest = 0;
276 for (std::vector<c2eNeuron *>::iterator i = dest_neurons.begin(); i != dest_neurons.end(); i++) {
277 c2eNeuron *n = *i;
278 if ((highestdest && n->variables[ourGene->destvar] > highestdest->variables[ourGene->destvar]) || n->variables[ourGene->destvar] > 0.0f) {
279 if (!getDendriteFromTo(highestsrc, n))
280 highestdest = n;
283 if (!highestdest) continue;
285 // connect them!
286 d.source = highestsrc;
287 d.dest = highestdest;
289 if (ourGene->initrulealways) {
290 // wipe
291 for (unsigned int j = 0; j < 8; j++)
292 d.variables[j] = 0.0f;
293 } else {
294 // re-run init rule
295 initrule.runRule(0.0f, dummyValues, dummyValues, dummyValues, d.variables, parent->getParent());
297 // else if we migrate to make limited connections to the *dest*
298 } else {
299 std::cout << "wah, you used something which isn't in the standard brain model, meanie" << std::endl; // TODO
306 * c2eLobe::c2eLobe
308 * Constructor for a c2eLobe. Pass it the relevant gene.
311 c2eLobe::c2eLobe(c2eBrain *b, c2eBrainLobeGene *g) : c2eBrainComponent(b) {
312 assert(g);
313 ourGene = g;
314 updatetime = g->updatetime;
316 spare = 0;
318 unsigned int width = g->width, height = g->height;
319 if (width < 1) width = 1;
320 if (height < 1) height = 1;
322 neurons.reserve(width * height);
324 c2eNeuron n;
325 for (unsigned int i = 0; i < width * height; i++) {
326 neurons.push_back(n);
329 initrule.init(g->initialiserule);
330 updaterule.init(g->updaterule);
334 * c2eLobe::wipe
336 * Causes a c2eLobe to wipe its variables.
339 void c2eLobe::wipe() {
340 for (std::vector<c2eNeuron>::iterator i = neurons.begin(); i != neurons.end(); i++) {
341 for (unsigned int j = 0; j < 8; j++)
342 i->variables[j] = 0.0f;
347 * c2eLobe::tick
349 * Do a single update of the lobe.
352 void c2eLobe::tick() {
353 // run the svrule(s) against every neuron
354 for (unsigned int i = 0; i < neurons.size(); i++) {
355 if (ourGene->initrulealways && initrule.runRule(neurons[i].input, dummyValues, neurons[i].variables, neurons[spare].variables, dummyValues, parent->getParent()))
356 spare = i;
357 if (updaterule.runRule(neurons[i].input, dummyValues, neurons[i].variables, neurons[spare].variables, dummyValues, parent->getParent()))
358 spare = i;
359 neurons[i].input = 0.0f;
364 * c2eLobe::init
366 * Initialise the lobe, resetting variables and running the initialisation rule.
369 void c2eLobe::init() {
370 inited = true;
372 wipe();
374 for (std::vector<c2eNeuron>::iterator i = neurons.begin(); i != neurons.end(); i++) {
375 // TODO: good way to run rule?
376 if (!ourGene->initrulealways)
377 initrule.runRule(0.0f, dummyValues, i->variables, dummyValues, dummyValues, parent->getParent());
378 i->input = 0.0f; // TODO: good to do that here?
383 * c2eLobe::setNeuronInput
385 * Set the input value for the specified neuron.
387 void c2eLobe::setNeuronInput(unsigned int i, float input) {
388 assert(i < neurons.size());
389 neurons[i].input = input; // TODO: always stomp over any existing input?
393 * c2eLobe::getId
395 * Convenience function to return the Lobe ID of this lobe.
398 std::string c2eLobe::getId() {
399 return std::string((char *)ourGene->id, 4);
403 * c2eSVRule::init
405 * Initialises an SVRule from the provided data, precalculating any constant data.
408 void c2eSVRule::init(uint8 ruledata[48]) {
409 rules.reserve(16);
411 for (unsigned int i = 0; i < 16; i++) {
412 c2erule rule;
414 rule.opcode = ruledata[i * 3];
415 rule.operandtype = ruledata[(i * 3) + 1];
416 rule.operanddata = ruledata[(i * 3) + 2];
418 switch (rule.operandtype) {
419 // for neuron/dendrite values, sanitise value (there are only 8 options)
421 case 1: // input neuron
422 case 2: // dendrite
423 case 3: // neuron
424 case 4: // spare neuron
425 // TODO: what should we do here?
426 if (rule.operanddata > 7) {
427 std::cout << "brain debug: had a too-high variable number" << std::endl;
428 rule.operanddata = 7;
430 break;
432 // for constant values, precalculate data
434 case 9: // zero
435 rule.operandvalue = 0.0f;
436 break;
438 case 10: // one
439 rule.operandvalue = 1.0f;
440 break;
442 case 11: // value
443 rule.operandvalue = (float)rule.operanddata * (1.0f / 248);
444 break;
446 case 12: // negative value
447 rule.operandvalue = (float)rule.operanddata * (-1.0f / 248);
448 break;
450 case 13: // value * 10
451 rule.operandvalue = (float)rule.operanddata * (10.0f / 248);
452 break;
454 case 14: // value / 10
455 rule.operandvalue = (float)rule.operanddata * (0.1f / 248);
456 break;
458 case 15: // value integer
459 rule.operandvalue = (float)rule.operanddata;
460 break;
463 rules.push_back(rule);
467 // convenience function for c2eSVRule::runRule
468 inline float bindFloatValue(float val, float min = -1.0f, float max = 1.0f) {
469 if (val > max) return max;
470 else if (val < min) return min;
471 else return val;
474 // warn-once function for unimplemented svrule opcodes/operand types in c2eSVRule::runRule
475 inline void warnUnimplementedSVRule(unsigned char data, bool opcode = true) {
476 static bool warnedalready = false;
477 if (warnedalready) return;
478 warnedalready = true;
480 std::cout << "brain debug: something tried using unimplemented " << (opcode ? "opcode" : "operand type" ) <<
481 (unsigned int)data << ", will not warn about unimplemented svrule bits again." << std::endl;
484 // goto locations are one-based
485 // we must never jump backwards, only forwards
486 #define HANDLE_GOTO if ((unsigned int)operandvalue - 2 > i) i = (unsigned int)operandvalue - 2;
489 * c2eSVRule::runRule
491 * Executes the SVRule using the provided variables.
493 * Returns whether the 'register as spare' opcode was executed or not.
496 bool c2eSVRule::runRule(float acc, float srcneuron[8], float neuron[8], float spareneuron[8], float dendrite[8], c2eCreature *creature) {
497 float accumulator = acc;
498 float operandvalue = 0.0f; // valid rules should never use this
499 float tendrate = 0.0f;
500 float *operandpointer;
501 float dummy;
502 static float stw = 0.0f; // TODO: good default?
503 bool is_spare = false;
504 bool skip_next = false;
506 for (unsigned int i = 0; i < rules.size(); i++) {
507 c2erule &rule = rules[i];
509 if (skip_next) { // if the last if opcode was *false*..
510 // .. then don't execute the next line
511 skip_next = false;
512 continue;
515 // We don't always *need* the operand and/or pointer, but for now we'll always calculate it anyway.
516 operandpointer = &dummy; // point into nowhere, by default.. TODO: good choice?
517 switch (rule.operandtype) {
518 case 0: // accumulator
519 operandvalue = accumulator;
520 // accumulator does *not* set operandpointer
521 // (eg, 'blank accumulator' and 'add to and store in accumulator' do not change it)
522 break;
524 case 1: // input neuron
525 operandpointer = &srcneuron[rule.operanddata];
526 operandvalue = *operandpointer;
527 break;
529 case 2: // dendrite
530 operandpointer = &dendrite[rule.operanddata];
531 operandvalue = *operandpointer;
532 break;
534 case 3: // neuron
535 operandpointer = &neuron[rule.operanddata];
536 operandvalue = *operandpointer;
537 break;
539 case 4: // spare neuron
540 operandpointer = &spareneuron[rule.operanddata];
541 operandvalue = *operandpointer;
542 break;
544 case 5: // random
545 // TODO: find a quicker RNG?
546 // TODO: untested
547 operandvalue = rand() / (float)RAND_MAX;
548 break;
550 case 6: // source chemical
551 // TODO: unused?
552 warnUnimplementedSVRule(rule.operandtype, false);
553 break;
555 case 7: // chemical
556 // Ratboy sez: "chemicals appear to be read-only; cannot write data to them"
557 operandvalue = creature->getChemical(rule.operanddata);
558 break;
560 case 8: // destination chemical
561 // TODO: unused?
562 warnUnimplementedSVRule(rule.operandtype, false);
563 break;
565 case 9: // zero
566 case 10: // one
567 case 11: // value
568 case 12: // negative value
569 case 13: // value * 10
570 case 14: // value / 10
571 case 15: // value integer
572 // precalculated constants
573 operandvalue = rule.operandvalue;
574 break;
576 default:
577 warnUnimplementedSVRule(rule.operandtype, false);
578 break;
581 switch (rule.opcode) {
582 case 0: // stop
583 goto done;
585 case 1: // blank
586 *operandpointer = 0.0f;
587 break;
589 case 2: // store in
590 *operandpointer = bindFloatValue(accumulator);
591 break;
593 case 3: // load from
594 accumulator = operandvalue;
595 break;
597 case 4: // if =
598 if (!(accumulator == operandvalue))
599 skip_next = true;
600 break;
602 case 5: // if <>
603 if (!(accumulator != operandvalue))
604 skip_next = true;
605 break;
607 case 6: // if >
608 if (!(accumulator > operandvalue))
609 skip_next = true;
610 break;
612 case 7: // if <
613 if (!(accumulator < operandvalue))
614 skip_next = true;
615 break;
617 case 8: // if >=
618 if (!(accumulator >= operandvalue))
619 skip_next = true;
620 break;
622 case 9: // if <=
623 if (!(accumulator <= operandvalue))
624 skip_next = true;
625 break;
627 case 10: // if zero
628 if (!(operandvalue == 0.0f))
629 skip_next = true;
630 break;
632 case 11: // if non-zero
633 if (!(operandvalue != 0.0f))
634 skip_next = true;
635 break;
637 case 12: // if positive
638 if (!(operandvalue > 0.0f)) // TODO: correct?
639 skip_next = true;
640 break;
642 case 13: // if negative
643 if (!(operandvalue < 0.0f)) // TODO: correct?
644 skip_next = true;
645 break;
647 case 14: // if non-positive // TODO: should be non-negative?
648 if (!(operandvalue <= 0.0f)) // TODO: correct?
649 skip_next = true;
650 break;
652 case 15: // if non-negative // TODO: should be non-positive?
653 if (!(operandvalue >= 0.0f)) // TODO: correct?
654 skip_next = true;
655 break;
657 case 16: // add
658 accumulator += operandvalue;
659 break;
661 case 17: // subtract
662 accumulator -= operandvalue;
663 break;
665 case 18: // subtract from
666 accumulator = operandvalue - accumulator;
667 break;
669 case 19: // multiply by
670 accumulator *= operandvalue;
671 break;
673 case 20: // divide by
674 // TODO: make sure this is correct
675 if (operandvalue != 0.0f)
676 accumulator /= operandvalue;
677 break;
679 case 21: // divide into
680 // TODO: make sure this is correct
681 if (accumulator != 0.0f)
682 accumulator = operandvalue / accumulator;
683 break;
685 case 22: // minimum with
686 accumulator = std::min(accumulator, operandvalue);
687 break;
689 case 23: // maximum with
690 accumulator = std::max(accumulator, operandvalue);
691 break;
693 case 24: // set tend rate
694 tendrate = operandvalue;
695 break;
697 case 25: // tend to
698 // TODO: make sure this is correct
699 accumulator += tendrate * (operandvalue - accumulator);
700 break;
702 case 26: // load negation of
703 accumulator = -operandvalue;
704 break;
706 case 27: // load abs of
707 accumulator = fabsf(operandvalue);
708 break;
710 case 28: // distance to
711 // TODO: make sure this is correct
712 accumulator = fabsf(accumulator - operandvalue);
713 break;
715 case 29: // flip around
716 // TODO: make sure this is correct
717 accumulator = operandvalue - accumulator;
718 break;
720 case 30: // no operation
721 break;
723 case 31: // register as spare
724 is_spare = true;
725 break;
727 case 32: // bound in range [0, 1]
728 // TODO: make sure this is correct
729 accumulator = bindFloatValue(operandvalue, 0.0f);
730 break;
732 case 33: // bound in range [-1, 1]
733 // TODO: make sure this is correct
734 accumulator = bindFloatValue(operandvalue);
735 break;
737 case 34: // add and store in
738 *operandpointer = bindFloatValue(accumulator + operandvalue);
739 break;
741 case 35: // tend to and store in
742 // TODO: make sure this is correct
743 *operandpointer = bindFloatValue(accumulator + tendrate * (operandvalue - accumulator));
744 break;
746 case 36: // nominal threshold
747 // TODO: make sure this is correct
748 if (accumulator < operandvalue)
749 accumulator = 0.0f;
750 break;
752 case 37: // leakage rate
753 // TODO
754 warnUnimplementedSVRule(rule.opcode);
755 break;
757 case 38: // rest state
758 // TODO
759 warnUnimplementedSVRule(rule.opcode);
760 break;
762 case 39: // input gain hi-lo
763 // TODO
764 warnUnimplementedSVRule(rule.opcode);
765 break;
767 case 40: // persistence
768 // TODO
769 warnUnimplementedSVRule(rule.opcode);
770 break;
772 case 41: // signal noise
773 // TODO
774 warnUnimplementedSVRule(rule.opcode);
775 break;
777 case 42: // winner takes all
778 // TODO
779 warnUnimplementedSVRule(rule.opcode);
780 break;
782 case 43: // short-term relax rate
783 // TODO: should this be stored in the parent object, maybe?
784 // TODO: make sure this is correct
785 stw = operandvalue;
786 break;
788 case 44: // long-term relax rate
789 // TODO: make sure this is correct
790 // TODO: is this possible for neurons? (prbly not)
792 float weight = dendrite[0];
793 // push weight downwards towards steady state (short-term learning)
794 dendrite[0] = weight + (dendrite[1] - weight) * stw;
795 // pull steady state upwards towards weight (long-term learning)
796 dendrite[1] = dendrite[1] + (weight - dendrite[1]) * operandvalue;
798 break;
800 case 45: // store abs in
801 // TODO: make sure this is correct
802 *operandpointer = fabsf(accumulator);
803 break;
805 case 46: // stop if zero
806 if (operandvalue == 0.0f) goto done;
807 break;
809 case 47: // stop if non-zero
810 if (operandvalue != 0.0f) goto done;
811 break;
813 case 48: // if zero goto
814 if (accumulator == 0.0f) HANDLE_GOTO
815 break;
817 case 49: // if non-zero goto
818 if (accumulator != 0.0f) HANDLE_GOTO
819 break;
821 case 50: // divide by, add to neuron input
822 if (operandvalue != 0.0f)
823 neuron[1] += bindFloatValue(accumulator / operandvalue);
824 break;
826 case 51: // multiply by, add to neuron input
827 neuron[1] += bindFloatValue(accumulator * operandvalue);
828 break;
830 case 52: // goto line
831 HANDLE_GOTO
832 break;
834 case 53: // stop if <
835 if (accumulator < operandvalue) goto done;
836 break;
838 case 54: // stop if >
839 if (accumulator > operandvalue) goto done;
840 break;
842 case 55: // stop if <=
843 if (accumulator <= operandvalue) goto done;
844 break;
846 case 56: // stop if >=
847 if (accumulator >= operandvalue) goto done;
848 break;
850 case 57: // reward threshold
851 // TODO
852 warnUnimplementedSVRule(rule.opcode);
853 break;
855 case 58: // reward rate
856 // TODO
857 warnUnimplementedSVRule(rule.opcode);
858 break;
860 case 59: // use reward with
861 // TODO
862 warnUnimplementedSVRule(rule.opcode);
863 break;
865 case 60: // punish threshold
866 // TODO
867 warnUnimplementedSVRule(rule.opcode);
868 break;
870 case 61: // punish rate
871 // TODO
872 warnUnimplementedSVRule(rule.opcode);
873 break;
875 case 62: // use punish with
876 // TODO
877 warnUnimplementedSVRule(rule.opcode);
878 break;
880 case 63: // preserve neuron SV
881 // TODO: this seems too crazy to be true :)
882 { unsigned int index = (unsigned int)operandvalue;
883 if (index > 7) index = 7; // TODO: binding okay?
884 neuron[4] = neuron[index]; }
885 break;
887 case 64: // restore neuron SV
888 // TODO: this seems too crazy to be true :)
889 { unsigned int index = (unsigned int)operandvalue;
890 if (index > 7) index = 7; // TODO: binding okay?
891 neuron[index] = neuron[4]; }
892 break;
894 case 65: // preserve spare neuron
895 // TODO: this seems too crazy to be true :)
896 { unsigned int index = (unsigned int)operandvalue;
897 if (index > 7) index = 7; // TODO: binding okay?
898 spareneuron[4] = spareneuron[index]; }
899 break;
901 case 66: // restore spare neuron
902 // TODO: this seems too crazy to be true :)
903 { unsigned int index = (unsigned int)operandvalue;
904 if (index > 7) index = 7; // TODO: binding okay?
905 spareneuron[index] = spareneuron[4]; }
906 break;
908 case 67: // if negative goto
909 // TODO: make sure this is correct
910 if (accumulator < 0.0f) HANDLE_GOTO
911 break;
913 case 68: // if positive goto
914 // TODO: make sure this is correct
915 if (accumulator > 0.0f) HANDLE_GOTO
916 break;
918 default:
919 // unknown opcode, so do nothing!
920 warnUnimplementedSVRule(rule.opcode);
921 break;
925 done:
926 return is_spare;
930 * c2eBrain::c2eBrain
932 * Constructor for a c2eBrain. Pass it the creature it belongs to, and it will construct itself.
935 c2eBrain::c2eBrain(c2eCreature *p) {
936 assert(p);
937 parent = p;
941 * c2eBrain::processGenes
943 * Called by the parent creature when new genes should be loaded (eg, during creation or lifestage change).
946 void c2eBrain::processGenes() {
947 for (vector<gene *>::iterator i = parent->getGenome()->genes.begin(); i != parent->getGenome()->genes.end(); i++) {
948 gene *g = *i;
950 if (!parent->shouldProcessGene(g)) continue;
952 if (typeid(*g) == typeid(c2eBrainLobeGene)) {
953 c2eLobe *l = new c2eLobe(this, (c2eBrainLobeGene *)g);
954 components.insert(l);
955 lobes[l->getId()] = l;
956 } else if (typeid(*g) == typeid(c2eBrainTractGene)) {
957 c2eTract *t = new c2eTract(this, (c2eBrainTractGene *)g);
958 components.insert(t);
959 tracts.push_back(t);
965 * c2eBrain::init
967 * Initialises new brain components in order.
970 void c2eBrain::init() {
971 for (std::multiset<c2eBrainComponent *, c2ebraincomponentorder>::iterator i = components.begin(); i != components.end(); i++) {
972 if (!(*i)->wasInited())
973 (*i)->init();
978 * c2eBrain::tick
980 * Updates brain components as required, in order.
983 void c2eBrain::tick() {
984 for (std::multiset<c2eBrainComponent *, c2ebraincomponentorder>::iterator i = components.begin(); i != components.end(); i++) {
985 // TODO: good check for this?
986 if ((*i)->getUpdateTime() != 0)
987 (*i)->tick();
992 * c2eBrain::getLobeByTissue
994 * Given a tissue ID as used internally in the genome, return the relevant c2eLobe object, if any, or null otherwise.
997 c2eLobe *c2eBrain::getLobeByTissue(unsigned int id) {
998 for (std::map<std::string, c2eLobe *>::iterator i = lobes.begin(); i != lobes.end(); i++) {
999 if (i->second->getGene()->tissue == id)
1000 return i->second;
1003 return 0;
1007 * c2eBrain::getLobeById
1009 * Given a lobe ID, return the relevant c2eLobe object, if any, or null otherwise.
1012 c2eLobe *c2eBrain::getLobeById(std::string id) {
1013 std::map<std::string, c2eLobe *>::iterator i = lobes.find(id);
1015 if (i != lobes.end())
1016 return i->second;
1017 else
1018 return 0;
1021 /* vim: set noet: */