#!/usr/bin/python3
#
# Send build performance test report emails
#
# Copyright (c) 2017, Intel Corporation.
#
# SPDX-License-Identifier: GPL-2.0-only
#
import argparse
import base64
import logging
import os
import pwd
import re
import shutil
import smtplib
import socket
import subprocess
import sys
import tempfile
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
# Setup logging
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger('oe-build-perf-report')
# Find js scaper script
SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
                         'scrape-html-report.js')
if not os.path.isfile(SCRAPE_JS):
    log.error("Unableto find oe-build-perf-report-scrape.js")
    sys.exit(1)
class ReportError(Exception):
    """Local errors"""
    pass
def check_utils():
    """Check that all needed utils are installed in the system"""
    missing = []
    for cmd in ('phantomjs', 'optipng'):
        if not shutil.which(cmd):
            missing.append(cmd)
    if missing:
        log.error("The following tools are missing: %s", ' '.join(missing))
        sys.exit(1)
def parse_args(argv):
    """Parse command line arguments"""
    description = """Email build perf test report"""
    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
        description=description)
    parser.add_argument('--debug', '-d', action='store_true',
                        help="Verbose logging")
    parser.add_argument('--quiet', '-q', action='store_true',
                        help="Only print errors")
    parser.add_argument('--to', action='append',
                        help="Recipients of the email")
    parser.add_argument('--cc', action='append',
                        help="Carbon copy recipients of the email")
    parser.add_argument('--bcc', action='append',
                        help="Blind carbon copy recipients of the email")
    parser.add_argument('--subject', default="Yocto build perf test report",
                        help="Email subject")
    parser.add_argument('--outdir', '-o',
                        help="Store files in OUTDIR. Can be used to preserve "
                             "the email parts")
    parser.add_argument('--text',
                        help="Plain text message")
    parser.add_argument('--html',
                        help="HTML peport generated by oe-build-perf-report")
    parser.add_argument('--phantomjs-args', action='append',
                        help="Extra command line arguments passed to PhantomJS")
    args = parser.parse_args(argv)
    if not args.html and not args.text:
        parser.error("Please specify --html and/or --text")
    return args
def decode_png(infile, outfile):
    """Parse/decode/optimize png data from a html element"""
    with open(infile) as f:
        raw_data = f.read()
    # Grab raw base64 data
    b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
    b64_data = re.sub('">.+$', '', b64_data, 1)
    # Replace file with proper decoded png
    with open(outfile, 'wb') as f:
        f.write(base64.b64decode(b64_data))
    subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
def mangle_html_report(infile, outfile, pngs):
    """Mangle html file into a email compatible format"""
    paste = True
    png_dir = os.path.dirname(outfile)
    with open(infile) as f_in:
        with open(outfile, 'w') as f_out:
            for line in f_in.readlines():
                stripped = line.strip()
                # Strip out scripts
                if stripped == '':
                    paste = False
                elif stripped == '':
                    paste = True
                elif paste:
                    if re.match('^.+href="data:image/png;base64', stripped):
                        # Strip out encoded pngs (as they're huge in size)
                        continue
                    elif 'www.gstatic.com' in stripped:
                        # HACK: drop references to external static pages
                        continue
                    # Replace charts with  elements
                    match = re.match('