10 from datetime import datetime
11 from collections import defaultdict
12 from core.models import *
13 from django.db.models import F, Q
14 #from openstack.manager import OpenStackManager
15 from openstack.driver import OpenStackDriver
16 from util.logger import Logger, logging, logger
17 #from timeout import timeout
18 from planetstack.config import Config
19 from observer.steps import *
20 from syncstep import SyncStep
21 from toposort import toposort
22 from observer.error_mapper import *
26 logger = Logger(level=logging.INFO)
28 class StepNotReady(Exception):
34 self.dependency_graph = None
36 class PlanetStackObserver:
37 #sync_steps = [SyncNetworks,SyncNetworkSlivers,SyncSites,SyncSitePrivileges,SyncSlices,SyncSliceMemberships,SyncSlivers,SyncSliverIps,SyncExternalRoutes,SyncUsers,SyncRoles,SyncNodes,SyncImages,GarbageCollector]
45 # The Condition object that gets signalled by Feefie events
47 self.load_sync_step_modules()
48 self.load_sync_steps()
49 self.event_cond = threading.Condition()
51 self.driver_kind = getattr(Config(), "observer_driver", "openstack")
52 if self.driver_kind=="openstack":
53 self.driver = OpenStackDriver()
55 self.driver = NoOpDriver()
57 def wait_for_event(self, timeout, cond=self.event_cond):
62 def wake_up(self, cond=self.event_cond):
63 logger.info('Wake up routine called. Event cond %r'%self.event_cond)
68 def load_sync_step_modules(self, step_dir=None):
70 if hasattr(Config(), "observer_steps_dir"):
71 step_dir = Config().observer_steps_dir
73 step_dir = "/opt/planetstack/observer/steps"
75 for fn in os.listdir(step_dir):
76 pathname = os.path.join(step_dir,fn)
77 if os.path.isfile(pathname) and fn.endswith(".py") and (fn!="__init__.py"):
78 module = imp.load_source(fn[:-3],pathname)
79 for classname in dir(module):
80 c = getattr(module, classname, None)
82 # make sure 'c' is a descendent of SyncStep and has a
83 # provides field (this eliminates the abstract base classes
84 # since they don't have a provides)
86 if inspect.isclass(c) and issubclass(c, SyncStep) and hasattr(c,"provides") and (c not in self.sync_steps):
87 self.sync_steps.append(c)
88 logger.info('loaded sync steps: %s' % ",".join([x.__name__ for x in self.sync_steps]))
89 # print 'loaded sync steps: %s' % ",".join([x.__name__ for x in self.sync_steps])
91 def load_sync_steps(self):
92 dep_path = Config().observer_dependency_graph
93 logger.info('Loading model dependency graph from %s' % dep_path)
95 # This contains dependencies between records, not sync steps
96 self.model_dependency_graph = json.loads(open(dep_path).read())
101 backend_path = Config().observer_pl_dependency_graph
102 logger.info('Loading backend dependency graph from %s' % backend_path)
103 # This contains dependencies between backend records
104 self.backend_dependency_graph = json.loads(open(backend_path).read())
106 logger.info('Backend dependency graph not loaded')
107 # We can work without a backend graph
108 self.backend_dependency_graph = {}
111 for s in self.sync_steps:
112 self.step_lookup[s.__name__] = s
115 provides_dict[m.__name__].append(s.__name__)
117 provides_dict[m.__name__]=[s.__name__]
120 for k,v in self.model_dependency_graph.iteritems():
122 for source in provides_dict[k]:
125 for dest in provides_dict[m]:
128 if (dest not in step_graph[source]):
129 step_graph[source].append(dest)
131 step_graph[source]=[dest]
137 # no dependencies, pass
141 if (self.backend_dependency_graph):
143 for s in self.sync_steps:
145 backend_dict[m]=s.__name__
147 for k,v in backend_dependency_graph.iteritems():
149 source = backend_dict[k]
152 dest = backend_dict[m]
156 step_graph[source]=dest
160 # no dependencies, pass
162 self.dependency_graph = step_graph
164 self.ordered_steps = toposort(self.dependency_graph, map(lambda s:s.__name__,self.sync_steps))
165 print "Order of steps=",self.ordered_steps
166 self.load_run_times()
169 def check_duration(self, step, duration):
171 if (duration > step.deadline):
172 logger.info('Sync step %s missed deadline, took %.2f seconds'%(step.name,duration))
173 except AttributeError:
174 # S doesn't have a deadline
177 def update_run_time(self, step, deletion):
179 self.last_run_times[step.__name__]=time.time()
181 self.last_deletion_run_times[step.__name__]=time.time()
184 def check_schedule(self, step, deletion):
185 last_run_times = self.last_run_times if not deletion else self.last_deletion_run_times
187 time_since_last_run = time.time() - last_run_times.get(step.__name__, 0)
189 if (time_since_last_run < step.requested_interval):
191 except AttributeError:
192 logger.info('Step %s does not have requested_interval set'%step.__name__)
195 def load_run_times(self):
197 jrun_times = open('/tmp/observer_run_times').read()
198 self.last_run_times = json.loads(jrun_times)
200 self.last_run_times={}
201 for e in self.ordered_steps:
202 self.last_run_times[e]=0
204 jrun_times = open('/tmp/observer_deletion_run_times').read()
205 self.last_deletion_run_times = json.loads(jrun_times)
207 self.last_deletion_run_times={}
208 for e in self.ordered_steps:
209 self.last_deletion_run_times[e]=0
213 def save_run_times(self):
214 run_times = json.dumps(self.last_run_times)
215 open('/tmp/observer_run_times','w').write(run_times)
217 deletion_run_times = json.dumps(self.last_deletion_run_times)
218 open('/tmp/observer_deletion_run_times','w').write(deletion_run_times)
220 def check_class_dependency(self, step, failed_steps):
221 step.dependenices = []
222 for obj in step.provides:
223 step.dependenices.extend(self.model_dependency_graph.get(obj.__name__, []))
224 for failed_step in failed_steps:
225 if (failed_step in step.dependencies):
228 def sync(self, S, deletion):
229 step = self.step_lookup[S]
230 start_time=time.time()
232 # Wait for step dependencies to be met
233 deps = self.dependency_graph[S]
235 cond = self.step_conditions[d]
237 if (self.step_status is STEP_STATUS_WORKING):
241 sync_step = step(driver=self.driver,error_map=error_mapper)
242 sync_step.__name__ = step.__name__
243 sync_step.dependencies = []
245 mlist = sync_step.provides
248 sync_step.dependencies.extend(self.model_dependency_graph[m.__name__])
251 sync_step.debug_mode = debug_mode
255 # Various checks that decide whether
256 # this step runs or not
257 self.check_class_dependency(sync_step, self.failed_steps) # dont run Slices if Sites failed
258 self.check_schedule(sync_step, deletion) # dont run sync_network_routes if time since last run < 1 hour
261 logging.info('Step not ready: %s'%sync_step.__name__)
262 self.failed_steps.append(sync_step)
264 logging.error('%r',e)
265 logger.log_exc("sync step failed: %r. Deletion: %r"%(sync_step,deletion))
266 self.failed_steps.append(sync_step)
270 duration=time.time() - start_time
272 logger.info('Executing step %s' % sync_step.__name__)
274 failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
276 self.check_duration(sync_step, duration)
279 self.failed_step_objects.update(failed_objects)
281 my_status = STEP_STATUS_OK
282 self.update_run_time(sync_step,deletion)
284 logging.error('Model step failed. This seems like a misconfiguration or bug: %r. This error will not be relayed to the user!',e)
286 self.failed_steps.append(S)
287 my_status = STEP_STATUS_KO
289 my_status = STEP_STATUS_OK
292 my_cond = self.step_conditions[S]
294 self.step_status[S]=my_status
299 if (self.step_conditions.has_key(S)):
304 if not self.driver.enabled:
307 if (self.driver_kind=="openstack") and (not self.driver.has_openstack):
312 error_map_file = getattr(Config(), "error_map_path", "/opt/planetstack/error_map.txt")
313 self.error_mapper = ErrorMapper(error_map_file)
315 # Set of whole steps that failed
316 self.failed_steps = []
318 # Set of individual objects within steps that failed
319 self.failed_step_objects = set()
321 # Set up conditions and step status
322 # This is needed for steps to run in parallel
323 # while obeying dependencies.
326 for v in self.dependency_graph.values():
329 self.step_conditions = {}
330 self.step_status = {}
331 for p in list(providers):
332 self.step_conditions[p] = threading.Condition()
333 self.step_status[p] = STEP_STATUS_IDLE
336 logger.info('Waiting for event')
337 tBeforeWait = time.time()
338 self.wait_for_event(timeout=30)
339 logger.info('Observer woke up')
341 # Two passes. One for sync, the other for deletion.
342 for deletion in [False,True]:
344 logger.info('Deletion=%r...'%deletion)
345 schedule = self.sync_schedule if not deletion else self.delete_schedule
347 thread = threading.Thread(target=self.sync, args=(schedule.start_conditions, schedule.ordered_steps,deletion, schedule.signal_sem))
349 logger.info('Deletion=%r...'%deletion)
350 threads.append(thread)
356 # Wait for all threads to finish before continuing with the run loop
360 self.save_run_times()
362 logging.error('Core error. This seems like a misconfiguration or bug: %r. This error will not be relayed to the user!',e)
363 logger.log_exc("Exception in observer run loop")
364 traceback.print_exc()