File: //usr/lib/python2.7/site-packages/lap/check_baculajob_freeze.py
import os
import re
import yaml
import glob
import datetime
import psycopg2
import ConfigParser
def conf(section, pattern):
config = ConfigParser.RawConfigParser()
config.read('/etc/locaweb/backup/bacula.conf')
value = config.get(section, pattern)
return value
def catalog_connect(query, param):
dbhost = conf('database','db_host')
dbbase = conf('database','db_name')
dbuser = conf('database','db_username')
dbpass = conf('database','db_password')
conn = None
conn = psycopg2.connect(host=dbhost,
port='5432',
database=dbbase,
user=dbuser,
password=dbpass)
cursor = conn.cursor()
cursor.execute(query)
conn.commit()
if param == "fetchall":
return cursor.fetchall()
elif param == "fetchone":
return cursor.fetchone()
def get_delay_seconds(file_path):
try:
return ((datetime.datetime.now() - datetime.datetime.fromtimestamp(os.path.getmtime(file_path))).seconds)
except OSError:
return None
def get_jobs_running():
sql = "select job, jobid, name from job where jobstatus='R' and type='B';"
result = catalog_connect(sql, 'fetchall')
return result
def get_last_log_time(jobid):
sql = "select time from log where jobid = %d order by time desc limit 1;" % int(jobid)
time = catalog_connect(sql,"fetchone")
return time[0]
def __run__(params):
spooldir = params.get('spooldir')
delay_param = params.get('delay')
message = []
try:
for job in get_jobs_running():
last_log_time = get_last_log_time(job[1])
spool_file = glob.glob('%s/*.%s.*.spool' % (spooldir, job[0]))
if spool_file:
delay = get_delay_seconds(spool_file[0])
if delay > delay_param and last_log_time < datetime.datetime.now() - datetime.timedelta(hours=3):
message.append(job[2])
elif last_log_time < datetime.datetime.now() - datetime.timedelta(hours=3):
message.append(job[2])
if message:
return [2, 'CRITICAL: %s' % message]
else:
return [0, 'OK']
except Exception, e:
return [3, 'Unknown - %s' % repr(e)]