Clean : (Grégory Starck) clean of some getattr code, bis.
[shinken.git] / shinken / modules / pickle_retention_file_scheduler.py
bloba9c24a3b4e9c9e37a7bd3f6843d0a77c1246b957
1 #!/usr/bin/python
2 #Copyright (C) 2009 Gabes Jean, naparuba@gmail.com
4 #This file is part of Shinken.
6 #Shinken is free software: you can redistribute it and/or modify
7 #it under the terms of the GNU Affero General Public License as published by
8 #the Free Software Foundation, either version 3 of the License, or
9 #(at your option) any later version.
11 #Shinken is distributed in the hope that it will be useful,
12 #but WITHOUT ANY WARRANTY; without even the implied warranty of
13 #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 #GNU Affero General Public License for more details.
16 #You should have received a copy of the GNU Affero General Public License
17 #along with Shinken. If not, see <http://www.gnu.org/licenses/>.
20 #This Class is an example of an Scheduler module
21 #Here for the configuration phase AND running one
24 #This text is print at the import
25 print "Detected module : Picle retention file for Scheduler"
28 import cPickle
29 import shutil
32 properties = {
33 'type' : 'pickle_retention_file',
34 'external' : False,
35 'phases' : ['retention'],
39 #called by the plugin manager to get a broker
40 def get_instance(plugin):
41 print "Get a pickle retention scheduler module for plugin %s" % plugin.get_name()
42 path = plugin.path
43 instance = Pickle_retention_scheduler(plugin.get_name(), path)
44 return instance
48 #Just print some stuff
49 class Pickle_retention_scheduler:
50 def __init__(self, name, path):
51 self.name = name
52 self.path = path
54 #Called by Scheduler to say 'let's prepare yourself guy'
55 def init(self):
56 print "Initilisation of the Pickle file retention scheduler module"
57 #self.return_queue = self.properties['from_queue']
60 def get_name(self):
61 return self.name
64 #Ok, main function that is called in the retention creation pass
65 def update_retention_objects(self, sched, log_mgr):
66 print "[PickleRetention] asking me to update the retention objects"
67 #Now the flat file method
68 try:
69 # Open a file near the path, with .tmp extension
70 # so in cae or problem, we do not lost the old one
71 f = open(self.path+'.tmp', 'wb')
72 #Just put hosts/services becauses checks and notifications
73 #are already link into
74 #all_data = {'hosts' : sched.hosts, 'services' : sched.services}
76 # We create a all_data dict with lsit of dict of retention useful
77 # data of our hosts and services
78 all_data = {'hosts' : {}, 'services' : {}}
79 for h in sched.hosts:
80 d = {}
81 running_properties = h.__class__.running_properties
82 for prop in running_properties:
83 entry = running_properties[prop]
84 if entry.retention:
85 d[prop] = getattr(h, prop)
86 # if prop == 'notifications_in_progress':
87 # v = getattr(h, prop)
88 # if v != {}:
89 # print "DUMP", getattr(h, prop)
90 # for n in v.values():
91 # print n.__dict__
92 all_data['hosts'][h.host_name] = d
94 #Now same for services
95 for s in sched.services:
96 d = {}
97 running_properties = s.__class__.running_properties
98 for prop in running_properties:
99 entry = running_properties[prop]
100 if entry.retention:
101 d[prop] = getattr(s, prop)
102 all_data['services'][(s.host.host_name, s.service_description)] = d
104 #s = cPickle.dumps(all_data)
105 #s_compress = zlib.compress(s)
106 cPickle.dump(all_data, f)
107 #f.write(s_compress)
108 f.close()
109 # Now move the .tmp fiel to the real path
110 shutil.move(self.path+'.tmp', self.path)
111 except IOError , exp:
112 log_mgr.log("Error: retention file creation failed, %s" % str(exp))
113 return
114 log_mgr.log("Updating retention_file %s" % self.path)
119 #Should return if it succeed in the retention load or not
120 def load_retention_objects(self, sched, log_mgr):
121 print "[PickleRetention] asking me to load the retention objects"
123 #Now the old flat file way :(
124 log_mgr.log("[PickleRetention]Reading from retention_file %s" % self.path)
125 try:
126 f = open(self.path, 'rb')
127 all_data = cPickle.load(f)
128 f.close()
129 except EOFError , exp:
130 print exp
131 return False
132 except ValueError , exp:
133 print exp
134 return False
135 except IOError , exp:
136 print exp
137 return False
138 except IndexError , exp:
139 s = "WARNING: Sorry, the ressource file is not compatible"
140 log_mgr.log(s)
141 return False
142 except TypeError , exp:
143 s = "WARNING: Sorry, the ressource file is not compatible"
144 log_mgr.log(s)
145 return False
148 #Now load interesting properties in hosts/services
149 #Taging retention=False prop that not be directly load
150 #Items will be with theirs status, but not in checking, so
151 #a new check will be launch like with a normal begining (random distributed
152 #scheduling)
154 ret_hosts = all_data['hosts']
155 for ret_h_name in ret_hosts:
156 #We take the dict of our value to load
157 d = all_data['hosts'][ret_h_name]
158 h = sched.hosts.find_by_name(ret_h_name)
159 if h != None:
160 running_properties = h.__class__.running_properties
161 for prop in running_properties:
162 entry = running_properties[prop]
163 if entry.retention:
164 # Mayeb the save was not with this value, so
165 # we just bypass this
166 if prop in d:
167 setattr(h, prop, d[prop])
168 for a in h.notifications_in_progress.values():
169 # print "AA,", a.__dict__
170 a.ref = h
171 sched.add(a)
172 h.update_in_checking()
173 #And also add downtimes and comments
174 for dt in h.downtimes:
175 dt.ref = h
176 dt.extra_comment.ref = h
177 sched.add(dt)
178 for c in h.comments:
179 c.ref = h
180 sched.add(c)
183 ret_services = all_data['services']
184 for (ret_s_h_name, ret_s_desc) in ret_services:
185 #We take the dict of our value to load
186 d = all_data['services'][(ret_s_h_name, ret_s_desc)]
187 s = sched.services.find_srv_by_name_and_hostname(ret_s_h_name, ret_s_desc)
188 if s != None:
189 running_properties = s.__class__.running_properties
190 for prop in running_properties:
191 entry = running_properties[prop]
192 if entry.retention:
193 # Mayeb the save was not with this value, so
194 # we just bypass this
195 if prop in d:
196 setattr(s, prop, d[prop])
197 for a in s.notifications_in_progress.values():
198 a.ref = s
199 sched.add(a)
200 s.update_in_checking()
201 #And also add downtimes and comments
202 for dt in s.downtimes:
203 dt.ref = s
204 dt.extra_comment.ref = s
205 sched.add(dt)
206 for c in s.comments:
207 c.ref = s
208 sched.add(c)
210 log_mgr.log("[PickleRetention] OK we've load data from retention file")
212 return True