Skip to content

Instantly share code, notes, and snippets.

@adeekshith
Forked from eriwen/gist:187610
Last active September 19, 2024 13:11

Revisions

  1. adeekshith revised this gist Apr 26, 2015. 1 changed file with 2 additions and 0 deletions.
    2 changes: 2 additions & 0 deletions website-monitoring-2
    Original file line number Diff line number Diff line change
    @@ -1,3 +1,5 @@
    #!/usr/bin/env python

    from threading import Thread
    import requests ## pip install requests
    import time
  2. adeekshith revised this gist Apr 26, 2015. 2 changed files with 73 additions and 0 deletions.
    File renamed without changes.
    73 changes: 73 additions & 0 deletions website-monitoring-2
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,73 @@
    from threading import Thread
    import requests ## pip install requests
    import time
    import smtplib

    ## email sending function
    def email_sender(input_message, email_to, client):
    ''' function to send email '''
    to = email_to
    gmail_user = '' ## email of sender account
    gmail_pwd = '' ## password of sender account
    smtpserver = smtplib.SMTP("smtp.gmail.com",587)
    smtpserver.ehlo()
    smtpserver.starttls()
    smtpserver.ehlo
    smtpserver.login(gmail_user, gmail_pwd)
    header = 'To:' + to + '\n' + 'From: ' + gmail_user + '\n' + 'Subject:site down! \n'
    input_message = input_message + client
    msg = header + input_message
    smtpserver.sendmail(gmail_user, to, msg)
    smtpserver.close()

    ## list of sites to track along with email address to send the alert
    clients = {"client":"email",
    "client":"email",
    "client":"email"}

    ## temporary dictionary used to do separate monitoring when a site is down
    temp_dic = {}

    ## site 'up' function
    def site_up():
    ''' function to monitor up time '''
    while True:
    for client, email in clients.items():
    try:
    r = requests.get(client)
    if r.status_code == 200:
    print client, 'Site ok'
    time.sleep(60) ## sleep for 1 min
    else:
    print client, 'Site first registered as down - added to the "site down" monitoring'
    temp_dic[client]=email
    del clients[client]
    except requests.ConnectionError:
    print client, 'Site first registered as down - added to the "site down" monitoring'
    temp_dic[client]=email
    del clients[client]

    ## site 'down' function
    def site_down():
    ''' function to monitor site down time '''
    while True:
    time.sleep(900) ## sleeps 15 mins
    for client, email in temp_dic.items():
    try:
    r = requests.get(client)
    if r.status_code == 200:
    print client, 'Site is back up!!'
    email_sender('Site back up!! ', email, client)
    clients[client]=email
    del temp_dic[client]
    else:
    email_sender('Site down!! ', email, client)
    print client, 'Site Currently down - email sent'
    except requests.ConnectionError:
    email_sender('Site down!! ', email, client)
    print client, 'Site Currently down - email sent'

    t1 = Thread(target = site_up)
    t2 = Thread(target = site_down)
    t1.start()
    t2.start()
  3. emwendelin revised this gist Sep 22, 2009. 1 changed file with 20 additions and 14 deletions.
    34 changes: 20 additions & 14 deletions gistfile1.py
    Original file line number Diff line number Diff line change
    @@ -1,14 +1,14 @@
    #!/usr/bin/env python

    # Sample usage: checksites.py eriwen.com nixtutor.com ... etc.
    # sample usage: checksites.py eriwen.com nixtutor.com yoursite.org

    import pickle, os, sys, logging
    from httplib import HTTPConnection, socket
    from smtplib import SMTP

    def email_alert(message, status):
    fromaddr = 'you@gmail.com'
    toaddrs = '5551234567@txt.att.net'
    toaddrs = 'yourphone@txt.att.net'

    server = SMTP('smtp.gmail.com:587')
    server.starttls()
    @@ -17,9 +17,13 @@ def email_alert(message, status):
    server.quit()

    def get_site_status(url):
    if get_response(url).status != 200:
    return 'down'
    return 'up'
    response = get_response(url)
    try:
    if getattr(response, 'status') == 200:
    return 'up'
    except AttributeError:
    pass
    return 'down'

    def get_response(url):
    '''Return response object from URL'''
    @@ -28,35 +32,37 @@ def get_response(url):
    conn.request('HEAD', '/')
    return conn.getresponse()
    except socket.error:
    def headers_unavailable():
    return 'Headers unavailable'
    # Server is up but connection refused
    return ['status': 403, 'getheaders': headers_unavailable]
    return None
    except:
    logging.error('Bad URL:', url)
    exit(1)

    def get_headers(url):
    '''Gets all headers from URL request and returns'''
    return get_response(url).getheaders()
    response = get_response(url)
    try:
    return getattr(response, 'getheaders')()
    except AttributeError:
    return 'Headers unavailable'

    def compare_site_status(prev_results):
    '''Report changed status based on previous results'''

    def is_status_changed(url):
    status = get_site_status(url)
    print '%s is %s' % (url, status)
    friendly_status = '%s is %s' % (url, status)
    print friendly_status
    if url in prev_results and prev_results[url] != status:
    logging.warning(status)
    # Email status messages
    email_alert(str(get_headers(url)), status)
    email_alert(str(get_headers(url)), friendly_status)
    prev_results[url] = status

    return compare_site_status
    return is_status_changed

    def is_internet_reachable():
    '''Checks Google then Yahoo just in case one is down'''
    if not is_url_reachable('www.google.com') and not is_url_reachable('www.yahoo.com'):
    if get_site_status('www.google.com') == 'down' and get_site_status('www.yahoo.com') == 'down':
    return False
    return True

  4. emwendelin revised this gist Sep 22, 2009. 1 changed file with 38 additions and 39 deletions.
    77 changes: 38 additions & 39 deletions gistfile1.py
    Original file line number Diff line number Diff line change
    @@ -6,44 +6,53 @@
    from httplib import HTTPConnection, socket
    from smtplib import SMTP

    def email_alert(message, subject='You have an alert'):
    fromaddr = 'user@gmail.com'
    def email_alert(message, status):
    fromaddr = 'you@gmail.com'
    toaddrs = '5551234567@txt.att.net'

    server = SMTP('smtp.gmail.com:587')
    server.starttls()
    # I encourage you to get this from somewhere more secure
    server.login('gmailuser', 'password')
    server.sendmail(fromaddr, toaddrs, message)
    server.login('you', 'password')
    server.sendmail(fromaddr, toaddrs, 'Subject: %s\r\n%s' % (status, message))
    server.quit()

    def is_url_reachable(url):
    '''Make HEAD request to url'''
    def get_site_status(url):
    if get_response(url).status != 200:
    return 'down'
    return 'up'

    def get_response(url):
    '''Return response object from URL'''
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    if conn.getresponse().status != 200:
    return False
    return True
    conn.request('HEAD', '/')
    return conn.getresponse()
    except socket.error:
    def headers_unavailable():
    return 'Headers unavailable'
    # Server is up but connection refused
    return False
    return ['status': 403, 'getheaders': headers_unavailable]
    except:
    logging.error('Bad URL:', url)
    raise
    exit(1)

    def get_headers(url):
    '''Gets all headers from URL request and returns'''
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    response = conn.getresponse()
    return response.getheaders()
    except socket.error:
    return 'Headers unavailable'
    except:
    logging.error('Bad URL:', url)
    raise
    return get_response(url).getheaders()

    def compare_site_status(prev_results):
    '''Report changed status based on previous results'''

    def is_status_changed(url):
    status = get_site_status(url)
    print '%s is %s' % (url, status)
    if url in prev_results and prev_results[url] != status:
    logging.warning(status)
    # Email status messages
    email_alert(str(get_headers(url)), status)
    prev_results[url] = status

    return compare_site_status

    def is_internet_reachable():
    '''Checks Google then Yahoo just in case one is down'''
    @@ -66,7 +75,7 @@ def store_results(file_path, data):
    pickle.dump(data, output)
    output.close()

    def main(args):
    def main(urls):
    # Setup logging to store time
    logging.basicConfig(level=logging.WARNING, filename='checksites.log',
    format='%(asctime)s %(levelname)s: %(message)s',
    @@ -76,26 +85,16 @@ def main(args):
    pickle_file = 'data.pkl'
    pickledata = load_old_results(pickle_file)

    # Check sites only if Internet available
    # Check sites only if Internet is_available
    if is_internet_reachable():
    # First arg is script name, skip it
    for url in args[1:]:
    available = is_url_reachable(url)
    status = '%s is down' % url
    if available:
    status = '%s is up' % url
    # Print status for those just running without automation
    print status
    if url in pickledata and pickledata[url] != available:
    # Email status messages
    logging.warning(status)
    email_alert(str(get_headers(url)), status)
    pickledata[url] = available
    status_checker = compare_site_status(pickledata)
    map(status_checker, urls)
    else:
    logging.error('Either the world ended or we are not connected to the net.')

    # Store results in pickle file
    store_results(pickle_file, pickledata)

    if __name__ == '__main__':
    main(sys.argv)
    # First arg is script name, skip it
    main(sys.argv[1:])
  5. emwendelin revised this gist Sep 21, 2009. 1 changed file with 87 additions and 75 deletions.
    162 changes: 87 additions & 75 deletions gistfile1.py
    Original file line number Diff line number Diff line change
    @@ -1,89 +1,101 @@
    # Sample usage: "checksites.py eriwen.com nixtutor.com ..."
    #!/usr/bin/env python

    # Sample usage: checksites.py eriwen.com nixtutor.com ... etc.

    import pickle, os, sys, logging
    from httplib import HTTPConnection
    from httplib import HTTPConnection, socket
    from smtplib import SMTP

    def email_alert(alert,subject='You have an alert'):
    fromaddr = "youremail@domain.com"
    toaddrs = "youremail@domain.com"

    # Add the From: and To: headers at the start!
    msg = "From: %s\r\nSubject: %s\r\nTo: %s\r\n\r\n%s" % (fromaddr, subject, toaddrs, alert)

    server = smtplib.SMTP('localhost')
    server.sendmail(fromaddr, toaddrs, msg)
    def email_alert(message, subject='You have an alert'):
    fromaddr = 'user@gmail.com'
    toaddrs = '5551234567@txt.att.net'
    server = SMTP('smtp.gmail.com:587')
    server.starttls()
    # I encourage you to get this from somewhere more secure
    server.login('gmailuser', 'password')
    server.sendmail(fromaddr, toaddrs, message)
    server.quit()

    def is_url_reachable(url):
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    if conn.getresponse().status != 200:
    return False
    return True
    except:
    logging.error('Bad URL:', url)
    raise

    '''Make HEAD request to url'''
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    if conn.getresponse().status != 200:
    return False
    return True
    except socket.error:
    # Server is up but connection refused
    return False
    except:
    logging.error('Bad URL:', url)
    raise

    def get_headers(url):
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    response = conn.getresponse()
    return response.getheaders()
    except:
    logging.error('Bad URL:', url)
    raise
    '''Gets all headers from URL request and returns'''
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    response = conn.getresponse()
    return response.getheaders()
    except socket.error:
    return 'Headers unavailable'
    except:
    logging.error('Bad URL:', url)
    raise

    def is_internet_reachable():
    '''Checks Google then Yahoo just in case one is down'''
    if not is_url_reachable('www.google.com') or not is_url_reachable('www.yahoo.com'):
    return False
    return True
    '''Checks Google then Yahoo just in case one is down'''
    if not is_url_reachable('www.google.com') and not is_url_reachable('www.yahoo.com'):
    return False
    return True
    def load_old_results(file_path):
    pickledata = {}
    if os.path.isfile('data.pkl'):
    picklefile = open('data.pkl','rb')
    pickledata = pickle.load(picklefile)
    picklefile.close()
    return pickledata

    '''Attempts to load most recent results'''
    pickledata = {}
    if os.path.isfile(file_path):
    picklefile = open(file_path, 'rb')
    pickledata = pickle.load(picklefile)
    picklefile.close()
    return pickledata

    def store_results(file_path, data):
    output = open(file_path,'wb')
    pickle.dump(data, output)
    output.close()

    def main(*args):
    # Setup logging - going to store time info in here
    logging.basicConfig(level=logging.WARNING, filename='checksites.log',
    format='%(asctime)s %(levelname)s: %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S')

    # Load previous data
    pickle_file = 'data.pkl'
    pickledata = load_old_results(pickle_file)

    # Check sites only if Internet available
    if is_internet_reachable():
    # Skip the first arg since that is the name of the script
    for url in args[0][1:]:
    available = is_url_reachable(url)
    status = '%s is down' % url
    if available:
    status = '%s is up' % url
    print status
    if url in pickledata and pickledata[url] != available:
    # Send status messages wherever
    logging.warning(status)
    email_alert(str(get_headers(url)), status)
    pickledata[url] = available
    else:
    logging.error('Either the world ended or we are not connected to the net.')

    # Store results in pickle file
    store_results(pickle_file, pickledata)
    '''Pickles results to compare on next run'''
    output = open(file_path, 'wb')
    pickle.dump(data, output)
    output.close()

    def main(args):
    # Setup logging to store time
    logging.basicConfig(level=logging.WARNING, filename='checksites.log',
    format='%(asctime)s %(levelname)s: %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S')

    # Load previous data
    pickle_file = 'data.pkl'
    pickledata = load_old_results(pickle_file)

    # Check sites only if Internet available
    if is_internet_reachable():
    # First arg is script name, skip it
    for url in args[1:]:
    available = is_url_reachable(url)
    status = '%s is down' % url
    if available:
    status = '%s is up' % url
    # Print status for those just running without automation
    print status
    if url in pickledata and pickledata[url] != available:
    # Email status messages
    logging.warning(status)
    email_alert(str(get_headers(url)), status)
    pickledata[url] = available
    else:
    logging.error('Either the world ended or we are not connected to the net.')

    # Store results in pickle file
    store_results(pickle_file, pickledata)

    if __name__ == '__main__':
    main(sys.argv)
    main(sys.argv)
  6. emwendelin revised this gist Sep 15, 2009. 1 changed file with 3 additions and 3 deletions.
    6 changes: 3 additions & 3 deletions gistfile1.py
    Original file line number Diff line number Diff line change
    @@ -50,9 +50,9 @@ def load_old_results(file_path):
    picklefile.close()
    return pickledata

    def store_results(file_path):
    def store_results(file_path, data):
    output = open(file_path,'wb')
    pickle.dump(pickledata, output)
    pickle.dump(data, output)
    output.close()

    def main(*args):
    @@ -83,7 +83,7 @@ def main(*args):
    logging.error('Either the world ended or we are not connected to the net.')

    # Store results in pickle file
    store_results(pickle_file)
    store_results(pickle_file, pickledata)

    if __name__ == '__main__':
    main(sys.argv)
  7. emwendelin revised this gist Sep 15, 2009. 1 changed file with 78 additions and 82 deletions.
    160 changes: 78 additions & 82 deletions gistfile1.py
    Original file line number Diff line number Diff line change
    @@ -1,93 +1,89 @@
    import pickle, pprint, time, os
    import httplib
    import smtplib
    # Sample usage: "checksites.py eriwen.com nixtutor.com ..."

    import pickle, os, sys, logging
    from httplib import HTTPConnection
    from smtplib import SMTP

    def emailAlert(alert,subject='You have an alert'):
    def email_alert(alert,subject='You have an alert'):
    fromaddr = "youremail@domain.com"
    toaddrs = "youremail@domain.com"
    toaddrs = "youremail@domain.com"

    # Add the From: and To: headers at the start!
    msg = ("From: %s\r\nSubject: %s\r\nTo: %s\r\n\r\n"
    % (fromaddr,subject,toaddrs))
    msg = msg + alert
    msg = "From: %s\r\nSubject: %s\r\nTo: %s\r\n\r\n%s" % (fromaddr, subject, toaddrs, alert)

    server = smtplib.SMTP('localhost')
    # server.set_debuglevel(1)
    server.sendmail(fromaddr, toaddrs, msg)
    server.quit()


    def internetUp():
    data = []
    urls = ['www.google.com','www.yahoo.com']
    try:
    for url in urls:
    conn = httplib.HTTPConnection(url)
    conn.request("HEAD", "/")
    res = conn.getresponse()
    data.append(res.status)
    # print res.status, res.reason, url
    if data[0] != 200 and data[1] != 200:
    return False
    exit('Internet might be down!')
    else:
    return True
    except:
    exit('Internet is defeinitely down!')


    def isSiteup(urls):

    data = {}
    data['timestamp'] = time.time()
    for url in urls:
    conn = httplib.HTTPConnection(url)
    conn.request("HEAD", "/")
    res = conn.getresponse()
    data[url] = res.status
    # print res.status, res.reason, url

    if url in data1:
    if data1[url] != res.status:
    alertMessage = ("%s has changed from %s to %s" % (url, data1[url], res.status))
    alertSubject = ("%s has changed status" % (url))
    emailAlert(alertMessage,alertSubject)
    # print 'Sending an email!'
    #else:
    # print url, 'is still the same', data1[url], 'and', res.status

    output = open('data.pkl','wb')
    pickle.dump(data, output)
    def is_url_reachable(url):
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    if conn.getresponse().status != 200:
    return False
    return True
    except:
    logging.error('Bad URL:', url)
    raise

    def get_headers(url):
    try:
    conn = HTTPConnection(url)
    conn.request("HEAD", "/")
    response = conn.getresponse()
    return response.getheaders()
    except:
    logging.error('Bad URL:', url)
    raise

    def is_internet_reachable():
    '''Checks Google then Yahoo just in case one is down'''
    if not is_url_reachable('www.google.com') or not is_url_reachable('www.yahoo.com'):
    return False
    return True

    def load_old_results(file_path):
    pickledata = {}
    if os.path.isfile('data.pkl'):
    picklefile = open('data.pkl','rb')
    pickledata = pickle.load(picklefile)
    picklefile.close()
    return pickledata

    def store_results(file_path):
    output = open(file_path,'wb')
    pickle.dump(pickledata, output)
    output.close()



    # Check to see if the internet is up
    internetUp()

    if os.path.isfile('data.pkl'):
    pklFile = open('data.pkl','rb')
    data1 = pickle.load(pklFile)
    # pprint.pprint(data1)

    elapsedTime = time.time() - data1['timestamp']
    elapsedMinutes = elapsedTime/60

    #if elapsedMinutes > 2:
    # print 'It\'s been longer than two minutes'
    else:
    data1 = {}

    # Urls to check
    urls = ['www.nixtutor.com',
    'www.marksanborn.net',
    'faceoffshow.com',
    'rocketship.it',
    'jaderobbins.com']

    # Run the checks
    isSiteup(urls)


    #pklFile.close()

    def main(*args):
    # Setup logging - going to store time info in here
    logging.basicConfig(level=logging.WARNING, filename='checksites.log',
    format='%(asctime)s %(levelname)s: %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S')

    # Load previous data
    pickle_file = 'data.pkl'
    pickledata = load_old_results(pickle_file)

    # Check sites only if Internet available
    if is_internet_reachable():
    # Skip the first arg since that is the name of the script
    for url in args[0][1:]:
    available = is_url_reachable(url)
    status = '%s is down' % url
    if available:
    status = '%s is up' % url
    print status
    if url in pickledata and pickledata[url] != available:
    # Send status messages wherever
    logging.warning(status)
    email_alert(str(get_headers(url)), status)
    pickledata[url] = available
    else:
    logging.error('Either the world ended or we are not connected to the net.')

    # Store results in pickle file
    store_results(pickle_file)

    if __name__ == '__main__':
    main(sys.argv)
  8. @sanbornm sanbornm revised this gist Sep 15, 2009. 1 changed file with 62 additions and 19 deletions.
    81 changes: 62 additions & 19 deletions gistfile1.py
    Original file line number Diff line number Diff line change
    @@ -1,21 +1,43 @@
    import pickle, pprint, time, os
    import httplib
    import smtplib

    if os.path.isfile('data.pkl'):
    pklFile = open('data.pkl','rb')
    data1 = pickle.load(pklFile)
    pprint.pprint(data1)

    elapsedTime = time.time() - data1['timestamp']
    elapsedMinutes = elapsedTime/60
    def emailAlert(alert,subject='You have an alert'):
    fromaddr = "youremail@domain.com"
    toaddrs = "youremail@domain.com"

    # Add the From: and To: headers at the start!
    msg = ("From: %s\r\nSubject: %s\r\nTo: %s\r\n\r\n"
    % (fromaddr,subject,toaddrs))
    msg = msg + alert

    server = smtplib.SMTP('localhost')
    # server.set_debuglevel(1)
    server.sendmail(fromaddr, toaddrs, msg)
    server.quit()

    if elapsedMinutes > 2:
    print 'It\'s been longer than two minutes'
    else:
    data1 = {}

    def internetUp():
    data = []
    urls = ['www.google.com','www.yahoo.com']
    try:
    for url in urls:
    conn = httplib.HTTPConnection(url)
    conn.request("HEAD", "/")
    res = conn.getresponse()
    data.append(res.status)
    # print res.status, res.reason, url
    if data[0] != 200 and data[1] != 200:
    return False
    exit('Internet might be down!')
    else:
    return True
    except:
    exit('Internet is defeinitely down!')


    def isSiteup(urls):
    import httplib

    data = {}
    data['timestamp'] = time.time()
    @@ -24,27 +46,48 @@ def isSiteup(urls):
    conn.request("HEAD", "/")
    res = conn.getresponse()
    data[url] = res.status
    print res.status, res.reason, url
    # print res.status, res.reason, url

    if url in data1:
    if data1[url] != res.status:
    print url, 'has changed from', data1[url], 'to', res.status
    print 'Sending an email!'
    else:
    print url, 'is still the same', data1[url], 'and', res.status
    alertMessage = ("%s has changed from %s to %s" % (url, data1[url], res.status))
    alertSubject = ("%s has changed status" % (url))
    emailAlert(alertMessage,alertSubject)
    # print 'Sending an email!'
    #else:
    # print url, 'is still the same', data1[url], 'and', res.status

    output = open('data.pkl','wb')
    pickle.dump(data, output)
    output.close()


    urls = ['www.sfrcorp.com',
    'www.nixtutor.com',

    # Check to see if the internet is up
    internetUp()

    if os.path.isfile('data.pkl'):
    pklFile = open('data.pkl','rb')
    data1 = pickle.load(pklFile)
    # pprint.pprint(data1)

    elapsedTime = time.time() - data1['timestamp']
    elapsedMinutes = elapsedTime/60

    #if elapsedMinutes > 2:
    # print 'It\'s been longer than two minutes'
    else:
    data1 = {}

    # Urls to check
    urls = ['www.nixtutor.com',
    'www.marksanborn.net',
    'faceoffshow.com',
    'rocketship.it',
    'jaderobbins.com']

    # Run the checks
    isSiteup(urls)

    #pklFile.close()

    #pklFile.close()
  9. @sanbornm sanbornm renamed this gist Aug 29, 2009. 1 changed file with 0 additions and 0 deletions.
    File renamed without changes.
  10. @sanbornm sanbornm created this gist Aug 29, 2009.
    50 changes: 50 additions & 0 deletions gistfile1.txt
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,50 @@
    import pickle, pprint, time, os

    if os.path.isfile('data.pkl'):
    pklFile = open('data.pkl','rb')
    data1 = pickle.load(pklFile)
    pprint.pprint(data1)

    elapsedTime = time.time() - data1['timestamp']
    elapsedMinutes = elapsedTime/60

    if elapsedMinutes > 2:
    print 'It\'s been longer than two minutes'
    else:
    data1 = {}


    def isSiteup(urls):
    import httplib

    data = {}
    data['timestamp'] = time.time()
    for url in urls:
    conn = httplib.HTTPConnection(url)
    conn.request("HEAD", "/")
    res = conn.getresponse()
    data[url] = res.status
    print res.status, res.reason, url

    if url in data1:
    if data1[url] != res.status:
    print url, 'has changed from', data1[url], 'to', res.status
    print 'Sending an email!'
    else:
    print url, 'is still the same', data1[url], 'and', res.status

    output = open('data.pkl','wb')
    pickle.dump(data, output)
    output.close()


    urls = ['www.sfrcorp.com',
    'www.nixtutor.com',
    'www.marksanborn.net',
    'faceoffshow.com',
    'rocketship.it',
    'jaderobbins.com']

    isSiteup(urls)

    #pklFile.close()