Add: reactionner dead test in end_to_end test
[shinken.git] / test / shinken_test.py
blobf2433e687e078e87348c0385d84dc776e6354eac
1 #!/usr/bin/env python2.6
4 # This file is used to test host- and service-downtimes.
7 import sys
8 import time
9 import datetime
10 import os
11 import string
12 import re
13 import random
14 import unittest
15 sys.path.append("..")
16 sys.path.append("../shinken")
17 from shinken.config import Config
18 from shinken.dispatcher import Dispatcher
19 from shinken.log import Log
20 from shinken.scheduler import Scheduler
21 from shinken.macroresolver import MacroResolver
22 from shinken.external_command import ExternalCommandManager, ExternalCommand
23 from shinken.check import Check
24 from shinken.module import Module
25 from shinken.schedulerlink import SchedulerLink
26 from shinken.pollerlink import PollerLink
27 from shinken.reactionnerlink import ReactionnerLink
28 from shinken.brokerlink import BrokerLink
29 from shinken.notification import Notification
30 from shinken.command import Command
32 class ShinkenTest(unittest.TestCase):
33 def setUp(self):
34 self.setup_with_file('etc/nagios_1r_1h_1s.cfg')
36 def setup_with_file(self, path):
37 # i am arbiter-like
38 Config.fill_usern_macros()
39 self.broks = {}
40 self.me = None
41 self.log = Log()
42 self.log.load_obj(self)
43 self.config_files = [path]
44 self.conf = Config()
45 self.conf.read_config(self.config_files)
46 buf = self.conf.read_config(self.config_files)
47 raw_objects = self.conf.read_config_buf(buf)
48 self.conf.create_objects_for_type(raw_objects, 'arbiter')
49 self.conf.create_objects_for_type(raw_objects, 'module')
50 self.conf.early_arbiter_linking()
51 self.conf.create_objects(raw_objects)
52 self.conf.instance_id = 0
53 self.conf.instance_name = 'test'
54 self.conf.linkify_templates()
55 self.conf.apply_inheritance()
56 self.conf.explode()
57 self.conf.create_reversed_list()
58 self.conf.remove_twins()
59 self.conf.apply_implicit_inheritance()
60 self.conf.fill_default()
61 self.conf.clean_useless()
62 self.conf.pythonize()
63 self.conf.linkify()
64 self.conf.apply_dependancies()
65 self.conf.explode_global_conf()
66 self.conf.is_correct()
67 self.confs = self.conf.cut_into_parts()
68 self.dispatcher = Dispatcher(self.conf, self.me)
69 self.sched = Scheduler(None)
70 m = MacroResolver()
71 m.init(self.conf)
72 self.sched.load_conf(self.conf)
73 e = ExternalCommandManager(self.conf, 'applyer')
74 self.sched.external_command = e
75 e.load_scheduler(self.sched)
76 self.sched.schedule()
79 def add(self, b):
80 self.broks[b.id] = b
83 def fake_check(self, ref, exit_status, output="OK"):
84 #print "fake", ref
85 now = time.time()
86 ref.schedule(force=True)
87 #now checks are schedule and we get them in
88 #the action queue
89 check = ref.actions.pop()
90 self.sched.add(check) # check is now in sched.checks[]
91 # fake execution
92 check.check_time = now
94 elts_line1 = output.split('|')
95 #First line before | is output
96 check.output = elts_line1[0]
97 #After | is perfdata
98 if len(elts_line1) > 1:
99 check.perf_data = elts_line1[1]
100 else:
101 check.perf_data = ''
102 check.exit_status = exit_status
103 check.execution_time = 0.001
104 check.status = 'waitconsume'
105 self.sched.waiting_results.append(check)
108 def scheduler_loop(self, count, reflist, do_sleep=False, sleep_time=61):
109 for ref in reflist:
110 (obj, exit_status, output) = ref
111 obj.checks_in_progress = []
112 for loop in range(1, count + 1):
113 print "processing check", loop
114 for ref in reflist:
115 (obj, exit_status, output) = ref
116 obj.update_in_checking()
117 self.fake_check(obj, exit_status, output)
118 self.sched.consume_results()
119 self.sched.get_new_actions()
120 self.sched.get_new_broks()
121 self.worker_loop()
122 for ref in reflist:
123 (obj, exit_status, output) = ref
124 obj.checks_in_progress = []
125 self.sched.update_downtimes_and_comments()
126 #time.sleep(ref.retry_interval * 60 + 1)
127 if do_sleep:
128 time.sleep(sleep_time)
131 def worker_loop(self):
132 self.sched.delete_zombie_checks()
133 self.sched.delete_zombie_actions()
134 checks = self.sched.get_to_run_checks(True, False)
135 actions = self.sched.get_to_run_checks(False, True)
136 #print "------------ worker loop checks ----------------"
137 #print checks
138 #print "------------ worker loop actions ----------------"
139 self.show_actions()
140 #print "------------ worker loop new ----------------"
141 for a in actions:
142 a.status = 'inpoller'
143 a.check_time = time.time()
144 a.exit_status = 0
145 self.sched.put_results(a)
146 self.show_actions()
147 #print "------------ worker loop end ----------------"
150 def show_logs(self):
151 print "--- logs <<<----------------------------------"
152 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
153 if brok.type == 'log':
154 print "LOG:", brok.data['log']
155 print "--- logs >>>----------------------------------"
158 def show_actions(self):
159 print "--- actions <<<----------------------------------"
160 for a in sorted(self.sched.actions.values(), lambda x, y: x.id - y.id):
161 if a.is_a == 'notification':
162 if a.ref.my_type == "host":
163 ref = "host: %s" % a.ref.get_name()
164 else:
165 ref = "host: %s svc: %s" % (a.ref.host.get_name(), a.ref.get_name())
166 print "NOTIFICATION %d %s %s %s %s" % (a.id, ref, a.type, time.asctime(time.localtime(a.t_to_go)), a.status)
167 elif a.is_a == 'eventhandler':
168 print "EVENTHANDLER:", a
169 print "--- actions >>>----------------------------------"
172 def show_and_clear_logs(self):
173 self.show_logs()
174 self.clear_logs()
177 def show_and_clear_actions(self):
178 self.show_actions()
179 self.clear_actions()
182 def count_logs(self):
183 return len([b for b in self.sched.broks.values() if b.type == 'log'])
186 def count_actions(self):
187 return len(self.sched.actions.values())
190 def clear_logs(self):
191 id_to_del = []
192 for b in self.sched.broks.values():
193 if b.type == 'log':
194 id_to_del.append(b.id)
195 for id in id_to_del:
196 del self.sched.broks[id]
199 def clear_actions(self):
200 self.sched.actions = {}
203 def log_match(self, index, pattern):
204 # log messages are counted 1...n, so index=1 for the first message
205 if index > self.count_logs():
206 return False
207 else:
208 regex = re.compile(pattern)
209 lognum = 1
210 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
211 if brok.type == 'log':
212 if index == lognum:
213 if re.search(regex, brok.data['log']):
214 return True
215 lognum += 1
216 return False
219 def any_log_match(self, pattern):
220 regex = re.compile(pattern)
221 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
222 if brok.type == 'log':
223 if re.search(regex, brok.data['log']):
224 return True
225 return False
228 def get_log_match(self, pattern):
229 regex = re.compile(pattern)
230 res = []
231 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
232 if brok.type == 'log':
233 if re.search(regex, brok.data['log']):
234 res.append(brok.data['log'])
235 return res
239 def print_header(self):
240 print "#" * 80 + "\n" + "#" + " " * 78 + "#"
241 print "#" + string.center(self.id(), 78) + "#"
242 print "#" + " " * 78 + "#\n" + "#" * 80 + "\n"
247 def xtest_conf_is_correct(self):
248 self.print_header()
249 self.assert_(self.conf.conf_is_correct)
253 if __name__ == '__main__':
254 unittest.main()