1 #!/usr/bin/env python2.6
4 # This file is used to test host- and service-downtimes.
17 sys
.path
.append("../shinken")
18 #sys.path.append("../bin")
19 #sys.path.append(os.path.abspath("bin"))
23 from shinken
.objects
.config
import Config
24 from shinken
.objects
.command
import Command
25 from shinken
.objects
.module
import Module
27 from shinken
.dispatcher
import Dispatcher
28 from shinken
.log
import logger
29 from shinken
.scheduler
import Scheduler
30 from shinken
.macroresolver
import MacroResolver
31 from shinken
.external_command
import ExternalCommandManager
, ExternalCommand
32 from shinken
.check
import Check
33 from shinken
.message
import Message
34 from shinken
.arbiterlink
import ArbiterLink
35 from shinken
.schedulerlink
import SchedulerLink
36 from shinken
.pollerlink
import PollerLink
37 from shinken
.reactionnerlink
import ReactionnerLink
38 from shinken
.brokerlink
import BrokerLink
39 from shinken
.satellitelink
import SatelliteLink
40 from shinken
.notification
import Notification
42 from shinken
.brok
import Brok
44 from shinken
.daemons
.schedulerdaemon
import Shinken
46 class ShinkenTest(unittest
.TestCase
):
48 self
.setup_with_file('etc/nagios_1r_1h_1s.cfg')
50 def setup_with_file(self
, path
):
55 self
.log
.load_obj(self
)
56 self
.config_files
= [path
]
58 self
.conf
.read_config(self
.config_files
)
59 buf
= self
.conf
.read_config(self
.config_files
)
60 raw_objects
= self
.conf
.read_config_buf(buf
)
61 self
.conf
.create_objects_for_type(raw_objects
, 'arbiter')
62 self
.conf
.create_objects_for_type(raw_objects
, 'module')
63 self
.conf
.early_arbiter_linking()
64 self
.conf
.create_objects(raw_objects
)
65 self
.conf
.instance_id
= 0
66 self
.conf
.instance_name
= 'test'
67 self
.conf
.linkify_templates()
68 self
.conf
.apply_inheritance()
70 self
.conf
.create_reversed_list()
71 self
.conf
.remove_twins()
72 self
.conf
.apply_implicit_inheritance()
73 self
.conf
.fill_default()
74 self
.conf
.clean_useless()
77 self
.conf
.apply_dependancies()
78 self
.conf
.explode_global_conf()
79 self
.conf
.propagate_timezone_option()
80 self
.conf
.create_business_rules()
81 self
.conf
.create_business_rules_dependencies()
82 self
.conf
.is_correct()
83 self
.confs
= self
.conf
.cut_into_parts()
84 self
.dispatcher
= Dispatcher(self
.conf
, self
.me
)
86 scheddaemon
= Shinken(None, False, False, False, None)
87 self
.sched
= Scheduler(scheddaemon
)
89 scheddaemon
.sched
= self
.sched
93 self
.sched
.load_conf(self
.conf
)
94 e
= ExternalCommandManager(self
.conf
, 'applyer')
95 self
.sched
.external_command
= e
96 e
.load_scheduler(self
.sched
)
97 e2
= ExternalCommandManager(self
.conf
, 'dispatcher')
99 self
.external_command_dispatcher
= e2
100 self
.sched
.schedule()
104 if isinstance(b
, Brok
):
107 if isinstance(b
, ExternalCommand
):
108 self
.sched
.run_external_command(b
.cmd_line
)
111 def fake_check(self
, ref
, exit_status
, output
="OK"):
114 ref
.schedule(force
=True)
115 #now checks are schedule and we get them in
117 check
= ref
.actions
.pop()
118 self
.sched
.add(check
) # check is now in sched.checks[]
120 check
.check_time
= now
122 elts_line1
= output
.split('|')
123 #First line before | is output
124 check
.output
= elts_line1
[0]
126 if len(elts_line1
) > 1:
127 check
.perf_data
= elts_line1
[1]
130 check
.exit_status
= exit_status
131 check
.execution_time
= 0.001
132 check
.status
= 'waitconsume'
133 self
.sched
.waiting_results
.append(check
)
136 def scheduler_loop(self
, count
, reflist
, do_sleep
=False, sleep_time
=61):
138 (obj
, exit_status
, output
) = ref
139 obj
.checks_in_progress
= []
140 for loop
in range(1, count
+ 1):
141 print "processing check", loop
143 (obj
, exit_status
, output
) = ref
144 obj
.update_in_checking()
145 self
.fake_check(obj
, exit_status
, output
)
146 self
.sched
.manage_internal_checks()
147 self
.sched
.consume_results()
148 self
.sched
.get_new_actions()
149 self
.sched
.get_new_broks()
152 (obj
, exit_status
, output
) = ref
153 obj
.checks_in_progress
= []
154 self
.sched
.update_downtimes_and_comments()
155 #time.sleep(ref.retry_interval * 60 + 1)
157 time
.sleep(sleep_time
)
160 def worker_loop(self
):
161 self
.sched
.delete_zombie_checks()
162 self
.sched
.delete_zombie_actions()
163 checks
= self
.sched
.get_to_run_checks(True, False, worker_name
='tester')
164 actions
= self
.sched
.get_to_run_checks(False, True, worker_name
='tester')
165 #print "------------ worker loop checks ----------------"
167 #print "------------ worker loop actions ----------------"
169 #print "------------ worker loop new ----------------"
171 a
.status
= 'inpoller'
172 a
.check_time
= time
.time()
174 self
.sched
.put_results(a
)
176 #print "------------ worker loop end ----------------"
180 print "--- logs <<<----------------------------------"
181 for brok
in sorted(self
.sched
.broks
.values(), lambda x
, y
: x
.id - y
.id):
182 if brok
.type == 'log':
183 print "LOG:", brok
.data
['log']
184 print "--- logs >>>----------------------------------"
187 def show_actions(self
):
188 print "--- actions <<<----------------------------------"
189 for a
in sorted(self
.sched
.actions
.values(), lambda x
, y
: x
.id - y
.id):
190 if a
.is_a
== 'notification':
191 if a
.ref
.my_type
== "host":
192 ref
= "host: %s" % a
.ref
.get_name()
194 ref
= "host: %s svc: %s" % (a
.ref
.host
.get_name(), a
.ref
.get_name())
195 print "NOTIFICATION %d %s %s %s %s" % (a
.id, ref
, a
.type, time
.asctime(time
.localtime(a
.t_to_go
)), a
.status
)
196 elif a
.is_a
== 'eventhandler':
197 print "EVENTHANDLER:", a
198 print "--- actions >>>----------------------------------"
201 def show_and_clear_logs(self
):
206 def show_and_clear_actions(self
):
211 def count_logs(self
):
212 return len([b
for b
in self
.sched
.broks
.values() if b
.type == 'log'])
215 def count_actions(self
):
216 return len(self
.sched
.actions
.values())
219 def clear_logs(self
):
221 for b
in self
.sched
.broks
.values():
223 id_to_del
.append(b
.id)
225 del self
.sched
.broks
[id]
228 def clear_actions(self
):
229 self
.sched
.actions
= {}
232 def log_match(self
, index
, pattern
):
233 # log messages are counted 1...n, so index=1 for the first message
234 if index
> self
.count_logs():
237 regex
= re
.compile(pattern
)
239 for brok
in sorted(self
.sched
.broks
.values(), lambda x
, y
: x
.id - y
.id):
240 if brok
.type == 'log':
242 if re
.search(regex
, brok
.data
['log']):
248 def any_log_match(self
, pattern
):
249 regex
= re
.compile(pattern
)
250 for brok
in sorted(self
.sched
.broks
.values(), lambda x
, y
: x
.id - y
.id):
251 if brok
.type == 'log':
252 if re
.search(regex
, brok
.data
['log']):
257 def get_log_match(self
, pattern
):
258 regex
= re
.compile(pattern
)
260 for brok
in sorted(self
.sched
.broks
.values(), lambda x
, y
: x
.id - y
.id):
261 if brok
.type == 'log':
262 if re
.search(regex
, brok
.data
['log']):
263 res
.append(brok
.data
['log'])
268 def print_header(self
):
269 print "#" * 80 + "\n" + "#" + " " * 78 + "#"
270 print "#" + string
.center(self
.id(), 78) + "#"
271 print "#" + " " * 78 + "#\n" + "#" * 80 + "\n"
276 def xtest_conf_is_correct(self
):
278 self
.assert_(self
.conf
.conf_is_correct
)
282 if __name__
== '__main__':