#!/usr/bin/env python
#
# Copyright (c) 2009, devdoodles
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import httplib
import os
import socket
import sys
import time
import urlparse
request_timeout = os.getenv('request_timeout') or 10
socket.setdefaulttimeout(float(request_timeout))
def load_urls():
urls = []
n = 1
while 1:
url = os.getenv("url%s_url" % n)
if not url: break
data = { 'url': url }
for v in ('name', 'label', 'timeout', 'warning', 'critical'):
data[v] = os.getenv("url%s_%s" % (n, v))
urls.append(data)
n += 1
return urls
def do_request(protocol, dest, host, path):
if (protocol.startswith('https')):
conn = httplib.HTTPSConnection(dest)
else:
conn = httplib.HTTPConnection(dest)
conn.request('GET', path, headers={ 'Host': host })
resp = conn.getresponse()
data = resp.read()
conn.close()
return resp.status
urls = load_urls()
if len(sys.argv) > 1 and sys.argv[1] == 'config':
title = os.getenv("graph_title") or "Response time"
category = os.getenv("graph_category")
draw = os.getenv("draw")
if category: print "graph_category %s" % (category,)
print "graph_title %s" % (title,)
print "graph_vlabel Time (seconds)"
print "graph_args --lower-limit 0"
for url in urls:
if draw: print "%s.draw %s" % (url['name'], draw)
for v in ('label', 'warning', 'critical'):
if url[v]: print "%s.%s %s" % (url['name'], v, url[v])
sys.exit(0)
for url in urls:
o = urlparse.urlparse(url['url'])
try:
t1 = time.time()
status = do_request(o[0], o[1], o[1], o[2])
t2 = time.time()
if status == 200:
print "%s.value %.2f" % (url['name'], t2 - t1)
else:
print "%s.value U %s" % (url['name'],status)
except socket.timeout:
print "%s.value %s" % (url['name'], request_timeout)
except:
print "%s.value U" % (url['name'],)