blob: c06a16aeb6e692c136535608692a36ce78e8abf7 [file] [log] [blame]
#!/usr/bin/python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import logging
import logging.handlers
from os import path
import subprocess
import sys
import threading
import time
import urllib2
import uploader_mail
# How many seconds to wait after a cycle.
CYCLE_DELAY = 10800 # 3 hours
# Verifies a commit this long ago has been uploaded to devtools-frontend appspot
CHECKER_DELAY = "12 hours"
CYCLES_PER_24_HOURS = (86400 / CYCLE_DELAY)
CHECKER_USERDIR = path.join(path.dirname(path.abspath(__file__)), '..', '..') # /home/checker
CHROMIUM_CHECKOUT_PATH = path.join(CHECKER_USERDIR, 'src')
LOG_PATH = path.join(CHECKER_USERDIR, 'logs', 'checker.log')
#MAILCONFIG_PATH = path.join(CHECKER_USERDIR, 'secret_mail_config')
consecutive_success = 0
consecutive_error = 0
def main():
logfile = init_logger()
start_uptime_check_server()
while True:
logfile.doRollover()
logging.info('Starting iteration cycle')
call(['git', 'fetch', 'origin', 'main'])
commit_hash = call(['git', 'log', 'origin/main', '-n1', '--before={}'.format(CHECKER_DELAY), '--pretty=format:%H'])
check_commit(commit_hash.strip())
logging.info('Finished iteration cycle')
time.sleep(CYCLE_DELAY)
def init_logger():
logger = logging.getLogger()
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
logger.addHandler(console)
logfile = logging.handlers.RotatingFileHandler(LOG_PATH, backupCount=30)
logfile.setLevel(logging.INFO)
logger.addHandler(logfile)
formatter = logging.Formatter('%(asctime)s:%(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
console.setFormatter(formatter)
logfile.setFormatter(formatter)
return logfile
def check_commit(commit_hash):
global consecutive_success
global consecutive_error
url = 'https://chrome-devtools-frontend.appspot.com/serve_file/@{}/inspector.html'.format(commit_hash)
request = urllib2.Request(url)
try:
response = urllib2.urlopen(request)
summary = format_summary('Success', commit_hash, response.getcode())
logging.info(summary)
consecutive_error = 0
if consecutive_success == 0:
send_email(subject=summary, body='Response body:\n' + response.read())
consecutive_success += 1
if consecutive_success >= CYCLES_PER_24_HOURS:
consecutive_success = 0
except urllib2.HTTPError as error:
summary = format_summary('Error', commit_hash, error.code)
logging.info(summary)
consecutive_success = 0
if consecutive_error == 0:
send_email(subject=summary, body='Error message:\n' + error.reason)
consecutive_error += 1
if consecutive_error >= CYCLES_PER_24_HOURS / 4:
consecutive_error = 0
def send_email(subject, body):
# with open(MAILCONFIG_PATH, 'r') as mailconfig_file:
# mail_config = uploader_mail.ParseMailConfig(mailconfig_file)
# uploader_mail.SendMail(mail_config, subject, body)
logging.info('DID NOT Sent email with subject: {} and body: {}'.format(subject, body))
def call(args, log_output=True, cwd=CHROMIUM_CHECKOUT_PATH):
process = subprocess.Popen(args, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=cwd)
out, err = process.communicate()
if process.returncode != 0:
summary = 'Error {} from {}'.format(process.returncode, args)
logging.info(summary)
send_email(subject=summary, body=out)
if log_output:
logging.info(out)
return out
def format_summary(status, commit_hash, status_code):
return '[devtools-frontend-checker] {} for commit {} - HTTP status {}'.format(
status, commit_hash, status_code)
def start_uptime_check_server():
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
server = HTTPServer(('', 80), Handler)
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
if __name__ == '__main__':
sys.exit(main())