*Fix test_livestatus. The class-wide Comment-id had to be reset before each testcase
[shinken.git] / test / shinken_test.py
blob44cd639060ff93ce8d36aa3d247f9999a66b9eea
1 #!/usr/bin/env python2.6
4 # This file is used to test host- and service-downtimes.
7 import sys
8 import time
9 import datetime
10 import os
11 import string
12 import re
13 import random
14 import unittest
16 sys.path.append("..")
17 sys.path.append("../shinken")
18 #sys.path.append("../bin")
19 #sys.path.append(os.path.abspath("bin"))
22 import shinken
23 from shinken.objects.config import Config
24 from shinken.objects.command import Command
25 from shinken.objects.module import Module
27 from shinken.dispatcher import Dispatcher
28 from shinken.log import logger
29 from shinken.scheduler import Scheduler
30 from shinken.macroresolver import MacroResolver
31 from shinken.external_command import ExternalCommandManager, ExternalCommand
32 from shinken.check import Check
33 from shinken.message import Message
34 from shinken.arbiterlink import ArbiterLink
35 from shinken.schedulerlink import SchedulerLink
36 from shinken.pollerlink import PollerLink
37 from shinken.reactionnerlink import ReactionnerLink
38 from shinken.brokerlink import BrokerLink
39 from shinken.satellitelink import SatelliteLink
40 from shinken.notification import Notification
42 from shinken.brok import Brok
46 class ShinkenTest(unittest.TestCase):
47 def setUp(self):
48 self.setup_with_file('etc/nagios_1r_1h_1s.cfg')
50 def setup_with_file(self, path):
51 # i am arbiter-like
52 Config.fill_usern_macros()
53 self.broks = {}
54 self.me = None
55 self.log = logger
56 self.log.load_obj(self)
57 self.config_files = [path]
58 self.conf = Config()
59 self.conf.read_config(self.config_files)
60 buf = self.conf.read_config(self.config_files)
61 raw_objects = self.conf.read_config_buf(buf)
62 self.conf.create_objects_for_type(raw_objects, 'arbiter')
63 self.conf.create_objects_for_type(raw_objects, 'module')
64 self.conf.early_arbiter_linking()
65 self.conf.create_objects(raw_objects)
66 self.conf.instance_id = 0
67 self.conf.instance_name = 'test'
68 self.conf.linkify_templates()
69 self.conf.apply_inheritance()
70 self.conf.explode()
71 self.conf.create_reversed_list()
72 self.conf.remove_twins()
73 self.conf.apply_implicit_inheritance()
74 self.conf.fill_default()
75 self.conf.clean_useless()
76 self.conf.pythonize()
77 self.conf.linkify()
78 self.conf.apply_dependancies()
79 self.conf.explode_global_conf()
80 self.conf.propagate_timezone_option()
81 self.conf.create_business_rules()
82 self.conf.create_business_rules_dependencies()
83 self.conf.is_correct()
84 self.confs = self.conf.cut_into_parts()
85 self.dispatcher = Dispatcher(self.conf, self.me)
86 self.sched = Scheduler(None, None)
87 m = MacroResolver()
88 m.init(self.conf)
89 self.sched.load_conf(self.conf)
90 e = ExternalCommandManager(self.conf, 'applyer')
91 self.sched.external_command = e
92 e.load_scheduler(self.sched)
93 e2 = ExternalCommandManager(self.conf, 'dispatcher')
94 e2.load_arbiter(self)
95 self.external_command_dispatcher = e2
96 self.sched.schedule()
99 def add(self, b):
100 if isinstance(b, Brok):
101 self.broks[b.id] = b
102 return
103 if isinstance(b, ExternalCommand):
104 self.sched.run_external_command(b.cmd_line)
107 def fake_check(self, ref, exit_status, output="OK"):
108 #print "fake", ref
109 now = time.time()
110 ref.schedule(force=True)
111 #now checks are schedule and we get them in
112 #the action queue
113 check = ref.actions.pop()
114 self.sched.add(check) # check is now in sched.checks[]
115 # fake execution
116 check.check_time = now
118 elts_line1 = output.split('|')
119 #First line before | is output
120 check.output = elts_line1[0]
121 #After | is perfdata
122 if len(elts_line1) > 1:
123 check.perf_data = elts_line1[1]
124 else:
125 check.perf_data = ''
126 check.exit_status = exit_status
127 check.execution_time = 0.001
128 check.status = 'waitconsume'
129 self.sched.waiting_results.append(check)
132 def scheduler_loop(self, count, reflist, do_sleep=False, sleep_time=61):
133 for ref in reflist:
134 (obj, exit_status, output) = ref
135 obj.checks_in_progress = []
136 for loop in range(1, count + 1):
137 print "processing check", loop
138 for ref in reflist:
139 (obj, exit_status, output) = ref
140 obj.update_in_checking()
141 self.fake_check(obj, exit_status, output)
142 self.sched.manage_internal_checks()
143 self.sched.consume_results()
144 self.sched.get_new_actions()
145 self.sched.get_new_broks()
146 self.worker_loop()
147 for ref in reflist:
148 (obj, exit_status, output) = ref
149 obj.checks_in_progress = []
150 self.sched.update_downtimes_and_comments()
151 #time.sleep(ref.retry_interval * 60 + 1)
152 if do_sleep:
153 time.sleep(sleep_time)
156 def worker_loop(self):
157 self.sched.delete_zombie_checks()
158 self.sched.delete_zombie_actions()
159 checks = self.sched.get_to_run_checks(True, False)
160 actions = self.sched.get_to_run_checks(False, True)
161 #print "------------ worker loop checks ----------------"
162 #print checks
163 #print "------------ worker loop actions ----------------"
164 self.show_actions()
165 #print "------------ worker loop new ----------------"
166 for a in actions:
167 a.status = 'inpoller'
168 a.check_time = time.time()
169 a.exit_status = 0
170 self.sched.put_results(a)
171 self.show_actions()
172 #print "------------ worker loop end ----------------"
175 def show_logs(self):
176 print "--- logs <<<----------------------------------"
177 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
178 if brok.type == 'log':
179 print "LOG:", brok.data['log']
180 print "--- logs >>>----------------------------------"
183 def show_actions(self):
184 print "--- actions <<<----------------------------------"
185 for a in sorted(self.sched.actions.values(), lambda x, y: x.id - y.id):
186 if a.is_a == 'notification':
187 if a.ref.my_type == "host":
188 ref = "host: %s" % a.ref.get_name()
189 else:
190 ref = "host: %s svc: %s" % (a.ref.host.get_name(), a.ref.get_name())
191 print "NOTIFICATION %d %s %s %s %s" % (a.id, ref, a.type, time.asctime(time.localtime(a.t_to_go)), a.status)
192 elif a.is_a == 'eventhandler':
193 print "EVENTHANDLER:", a
194 print "--- actions >>>----------------------------------"
197 def show_and_clear_logs(self):
198 self.show_logs()
199 self.clear_logs()
202 def show_and_clear_actions(self):
203 self.show_actions()
204 self.clear_actions()
207 def count_logs(self):
208 return len([b for b in self.sched.broks.values() if b.type == 'log'])
211 def count_actions(self):
212 return len(self.sched.actions.values())
215 def clear_logs(self):
216 id_to_del = []
217 for b in self.sched.broks.values():
218 if b.type == 'log':
219 id_to_del.append(b.id)
220 for id in id_to_del:
221 del self.sched.broks[id]
224 def clear_actions(self):
225 self.sched.actions = {}
228 def log_match(self, index, pattern):
229 # log messages are counted 1...n, so index=1 for the first message
230 if index > self.count_logs():
231 return False
232 else:
233 regex = re.compile(pattern)
234 lognum = 1
235 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
236 if brok.type == 'log':
237 if index == lognum:
238 if re.search(regex, brok.data['log']):
239 return True
240 lognum += 1
241 return False
244 def any_log_match(self, pattern):
245 regex = re.compile(pattern)
246 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
247 if brok.type == 'log':
248 if re.search(regex, brok.data['log']):
249 return True
250 return False
253 def get_log_match(self, pattern):
254 regex = re.compile(pattern)
255 res = []
256 for brok in sorted(self.sched.broks.values(), lambda x, y: x.id - y.id):
257 if brok.type == 'log':
258 if re.search(regex, brok.data['log']):
259 res.append(brok.data['log'])
260 return res
264 def print_header(self):
265 print "#" * 80 + "\n" + "#" + " " * 78 + "#"
266 print "#" + string.center(self.id(), 78) + "#"
267 print "#" + " " * 78 + "#\n" + "#" * 80 + "\n"
272 def xtest_conf_is_correct(self):
273 self.print_header()
274 self.assert_(self.conf.conf_is_correct)
278 if __name__ == '__main__':
279 unittest.main()