#!/usr/bin/python
#
# Downloads buildlog for more recent build
#
# Copyright (C) 2009-2010, Canonical, Ltd.
# Author: Kees Cook <kees@ubuntu.com>
#
# TODO: handle not specifying pocket so we can just "get latest" of Release, Updates, Security

import sys, optparse, time, os, tempfile, subprocess, shutil, urllib2, glob
import lpl_common
import apt_pkg
from launchpadbugs import http_connection
from configobj import ConfigObj


# Cookie for protected file downloads
cookie_processor = http_connection.LPCookieProcessor()
config = ConfigObj(os.path.expanduser("~/.ubuntu-cve-tracker.conf"))
cookie_file = config["plb_authentication"]
# Work around Firefox 3.5's dumb sqlite locking problems by copying cookies out:
sql = None
if cookie_file.endswith('.sqlite'):
    sql = tempfile.NamedTemporaryFile(prefix='cookies-XXXXXX', suffix='.sqlite')
    cookie_file = sql.name
    shutil.copy(config["plb_authentication"], cookie_file)
cookie_processor.load_file(cookie_file)
opener = urllib2.build_opener(cookie_processor)
sql = None

apt_pkg.InitSystem();

parser = optparse.OptionParser()
parser.add_option("-r","--release", help="Which release to download logs from (required)", metavar="SERIES", action='store', default=None)
parser.add_option("-n","--dry-run", help="Do not actually fetch logs", action='store_true')
parser.add_option("-v","--verbose", help="Add additional reporting", action='store_true')
parser.add_option("-c","--component", help="Which component to download logs from (default 'main')", metavar="COMPONENT", action='store', default='main')
parser.add_option("--lpnet", help="Use lpnet instead of edge for LP API", action='store_true')
parser.add_option("-b", "--base", help="The archive to load logs from (default: Ubuntu Archive)", metavar="GROUP[/PPA]", action='store', default='ubuntu')
parser.add_option("-p","--pocket", help="Which release pocket to download logs from (default 'Release')", metavar="POCKET", action='store', default='Release')
(opt, args) = parser.parse_args()

if not opt.release:
    print >>sys.stderr, 'Usage: %s [OPTIONS] [PKG PKG ...]' % (sys.argv[0])
    sys.exit(1)

if opt.verbose:
    print "Loading Ubuntu Distribution ..."
lp = lpl_common.connect(use_edge=(not opt.lpnet))
ubuntu = lp.distributions['ubuntu']
archive, base_group, base_ppa = lpl_common.get_archive(opt.base, lp, opt.verbose, ubuntu)
series = ubuntu.getSeries(name_or_version=opt.release)

def chunk_report(bytes_so_far, chunk_size, total_size):
    msg = "\t\t"
    if total_size:
        percent = float(bytes_so_far) / total_size
        percent = round(percent*100, 2)
        msg += "Downloaded %d of %d bytes (%0.2f%%)" % (bytes_so_far, total_size, percent)
    else:
        msg += "Downloaded %d bytes" % (bytes_so_far)

    sys.stdout.write(msg+"\r")
    sys.stdout.flush()

def chunked_read(response, chunk_size=8192, outfile=None, report_hook=chunk_report):
    total_size = response.info().getheader('Content-Length').strip()
    total_size = int(total_size)
    bytes_so_far = 0

    while 1:
        chunk = response.read(chunk_size)
        if outfile:
            outfile.write(chunk)
        bytes_so_far += len(chunk)
        if not chunk:
            sys.stdout.write("\n")
            break
        if report_hook:
            report_hook(bytes_so_far, chunk_size, total_size)
    return bytes_so_far

def download(url, filename=None):
    if not filename:
        filename = os.path.basename(url)
    tmp = filename + '.downloading'
    print "\t%s ..." % (url)

    tries = 0
    max_tries = 10
    while True:
        try:
            response = opener.open(url)
            break
        except urllib2.URLError, e:
            tries += 1
            if tries >= max_tries or (e.code != 502 and e.code != 504 and e.code != 500):
                print >>sys.stderr, "Failed (%d): %s" % (e.code, url)
                raise

            print >>sys.stderr, "Retrying (%d): %s" % (e.code, url)
            time.sleep(3)

    chunked_read(response, outfile=file(tmp,'w'))
    os.rename(tmp,filename)


def get_build(source_item, arch):
    for build in source_item.getBuilds():
        if build.arch_tag == arch:
            return build.build_log_url

    # Bleh, did not find build record for this release.  Instead, fall
    # back to searching all releases for this source package and version.
    for src in archive.getPublishedSources(status='Published', exact_match=True, source_name=source_item.source_package_name):
        if src.source_package_version == source_item.source_package_version:
            for build in src.getBuilds():
                if build.arch_tag == arch:
                    return build.build_log_url

    # Still nothing??
    return None



sources = []
if len(args):
    for pkg in args:
        sources += archive.getPublishedSources(status='Published', distro_series=series, pocket=opt.pocket, exact_match=True, source_name=pkg)
else:
    sources = archive.getPublishedSources(status='Published', distro_series=series, pocket=opt.pocket)

arch = 'i386'
for source_item in sources:
    if source_item.component_name != opt.component:
        if opt.verbose:
            print '%s: %s' % (source_item.source_package_name, source_item.component_name)
        continue

    pkg_glob = '%s_%s_*' % (source_item.source_package_name, arch)
    pkg_ver_glob = '%s_%s_%s_*' % (source_item.source_package_name, arch, source_item.source_package_version)
    if len(glob.glob(pkg_ver_glob))==1:
        print '%s: cached' % (source_item.source_package_name)
        continue

    print "%s: finding %s build ..." % (source_item.source_package_name, arch)
    for oldlog in glob.glob(pkg_glob):
        print "\tremoving old build log '%s' ..." % (oldlog)
        os.unlink(oldlog)

    url = get_build(source_item, arch)
    if url:
        if not opt.dry_run:
            download(url, '%s_%s_%s_log.gz' % (source_item.source_package_name, arch, source_item.source_package_version))
    else:
        print "\tNo build found?!"
