#!/usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# vim:smartindent cinwords=if,elif,else,for,while,try,except,finally,def,class:ts=4:sts=4:sta:et:ai:shiftwidth=4
#
# arch-tag: Simple patch queue manager for tla
# Copyright © 2003,2004 Colin Walters <walters@verbum.org>
# Copyright ©  2004 Canonical Ltd. 
#	Author: Robert Collins <robertc@robertcollins.net>
# Copyright © 2003, 2005 Walter Landry

# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

# Some junk to try finding Python 2.3, if "python" on this system
# is too old.
import os,sys
if sys.hexversion >= 0x2030000:
    pass
else:
    if os.getenv('PYTHON'):
        try:
            os.execvp(os.getenv('PYTHON'), [os.getenv('PYTHON')] + sys.argv)
        except:
            1
    try:
        os.execvp('python2.3', ['python2.3'] + sys.argv)
    except:
        1
    sys.stderr.write("This program requires Python 2.3\n")
    sys.exit(1)

import string, stat, re, glob, getopt, time, traceback, gzip, getpass, popen2
import smtplib, email
import logging, logging.handlers
import arch_pqm
from arch_pqm import *

def popen_noshell(cmd, *args):
    return apply(popen_noshell_with_input, [cmd, None] + list(args))

def popen_noshell_with_input(cmd, inputfd, *args):
    (stdin, stdout) = os.pipe()
    pid = os.fork()
    if pid == 0:
        os.close(stdin)
        if inputfd is None:
            inputfd = os.open('/dev/null', os.O_RDONLY) 
        os.dup2(inputfd, 0)
        os.dup2(stdout, 1)
        os.dup2(stdout, 2)
        logger.info("running: " + string.join([cmd] + list(args),' '))
        os.execvp(cmd, [cmd] + list(args))
        os.exit(1)
    os.close(stdout)
    output = os.fdopen(stdin).readlines()
    (pid, status) = os.waitpid(pid, 0)
    msg = ''
    if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
        if os.WIFEXITED(status):
            msg = "%s exited with error code %d" % (cmd, os.WEXITSTATUS(status),)
        elif os.WIFSTOPPED(status):
            msg = "%s stopped unexpectedly with signal %d" % (cmd, os.WSTOPSIG(status),)
        elif os.WIFSIGNALED(status):
            msg = "%s died with signal %d" % (cmd, os.WTERMSIG(status),)
    return (status, msg, output)

class AbstractArchHandler:
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_replay(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_create_branch(self, sender, to_archive, to_revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_make_archive(self, sender, archive, location):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_register_archive(self, sender, archive, location):
        raise PQMTlaFailure(sender, 'Unsupported operation')
    
    def do_create_version(self, sender, archive, revision):
        raise PQMTlaFailure(sender, 'Unsupported operation')

    def do_whereis_archive(self, sender, archive):
        raise PQMTlaFailure(sender, 'Unsupported operation')

class ArXHandler(AbstractArchHandler):
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'merge',
                      '%s/%s' % (fromarchive, fromrevision))

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'archive-cache', '--add',
                      '%s/%s' % (fromarchive, fromrevision))
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'archive-cache', '--delete',
                      '%s/%s' % (fromarchive, fromrevision))

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'tag',
                      '%s/%s' % (to_archive, to_revision),
                      '%s/%s' % (fromarchive, fromrevision))
    
    def do_create_branch(self, sender, to_archive, to_revision):
        return runtla(sender, 'fork',
                      '%s/%s' % (to_archive, to_revision))
    
    def do_make_archive(self, sender, archive, location):
        return runtla(sender, 'make-archive',
                      '%s' % (archive), '%s' % (location))
    
    def do_register_archive(self, sender, archive, location):
        return runtla(sender, 'archives', '-a',
                      '%s' % (location))
    
    def do_create_version(self, sender, archive, revision):
        return runtla(sender, 'init',
                      '%s/%s' % (archive, revision))

    def do_whereis_archive(self, sender, archive):
        return runtla(sender, 'archives', '%s/' % (archive))

class TlaHandler(AbstractArchHandler):
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'star-merge', '%s/%s' % (fromarchive, fromrevision))

    def do_replay(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'replay', '%s/%s' % (fromarchive, fromrevision))

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'cacherev', '%s/%s' % (fromarchive, fromrevision))
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'uncacherev', '%s/%s' % (fromarchive, fromrevision))

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'tag', '%s/%s' % (fromarchive, fromrevision),
                      '%s/%s' % (to_archive, to_revision))
    
    def do_create_branch(self, sender, to_archive, to_revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_archive, to_revision))
    
    def do_make_archive(self, sender, archive, location):
        return runtla(sender, 'make-archive', '%s' % (archive), '%s' % (location))
    
    def do_register_archive(self, sender, archive, location):
        return runtla(sender, 'register-archive', '%s' % (archive), '%s' % (location))
    
    def do_create_version(self, sender, archive, revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_archive, to_revision))
    
    def do_whereis_archive(self, sender, archive):
        return runtla(sender, 'whereis-archive', '%s' % (archive))

class BazBaseHandler(AbstractArchHandler):
    def do_replay(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'replay', '%s/%s' % (fromarchive, fromrevision))

    def do_archive_cache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'cacherev', '%s/%s' % (fromarchive, fromrevision))
    
    def do_archive_uncache(self, sender, fromarchive, fromrevision):
        return runtla(sender, 'uncacherev', '%s/%s' % (fromarchive, fromrevision))

    def do_tag(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'branch', '%s/%s' % (fromarchive, fromrevision),
                      '%s/%s' % (to_archive, to_revision))

    def do_make_archive(self, sender, archive, location):
        return runtla(sender, 'make-archive', '%s' % (archive), '%s' % (location))
    
    def do_register_archive(self, sender, archive, location):
        return runtla(sender, 'register-archive', '%s' % (archive), '%s' % (location))
        
    def do_whereis_archive(self, sender, archive):
        return runtla(sender, 'whereis-archive', '%s' % (archive))
    
class Baz1_0Handler(BazBaseHandler):
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'star-merge', '-t', '%s/%s' % (fromarchive, fromrevision))

    def do_create_branch(self, sender, to_archive, to_revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_archive, to_revision))
    
    def do_create_version(self, sender, archive, revision):
        return runtla(sender, 'archive-setup', '%s/%s' % (to_archive, to_revision))

class Baz1_1Handler(BazBaseHandler):
    def do_star_merge(self, sender, fromarchive, fromrevision, to_archive, to_revision):
        return runtla(sender, 'merge', '--star-merge', '%s/%s' % (fromarchive, fromrevision))


def usage(ecode, ver_only=None):
    print "arch-pqm 0"
    if ver_only:
        sys.exit(ecode)
    print "Usage: arch-pqm [OPTIONS...] [DIRECTORY]"
    print "Options:"
    print "  -v, --verbose\t\tDisplay extra information"
    print "  -q, --quiet\t\tDisplay less information"
    print "  -c, --config=FILE\tParse configuration info from FILE"
    print "  -d, --debug\t\tOutput information to stdout as well as log"
    print "  --no-log\t\tDon't write information to log file"
    print "  -n, --no-act\t\tDon't actually perform changes"
    print "  -r, --read\t\tRead a request from stdin"
    print "  --run\t\tProcess queue"
    print "  --report\t\tPrint patch report (used with --run)"
    print "  --no-verify\t\tDon't verify signatures"
    print "  --queuedir=DIR\t\tPerform first-time configuration"
    print "  --keyring=FILE\t\tUse the specified GPG keyring"
    print "  --help\t\tWhat you're looking at"
    print "  --version\t\tPrint the software version and exit"
    sys.exit(ecode)

def do_mkdir(name):
    if os.access(name, os.X_OK):
        return    
    try:
        logger.info('Creating directory "%s"' % (name))
    except:
        pass
    if not no_act:
        os.mkdir(name)

def do_rename(source, target):
    try:
        logger.debug('Renaming "%s" to "%s"' % (source, target))
    except:
        pass
    if not no_act:
        os.rename(source, target)

def do_chmod(name, mode):
    try:
        logger.info('Changing mode of "%s" to %o' % (name, mode))
    except:
        pass
    if not no_act:
        os.chmod(name, mode)

def dir_from_option(configp, option, default):
    """calculate a working dir path"""
    return os.path.abspath(os.path.expanduser(configp.get_option('DEFAULT',option, os.path.join(queuedir, default))))

class RevisionOptionHandler:
    def __init__(self, revisions, configp):
        self._configp = configp
        self._revisions = revisions
        self._optionmap = {}
        self._optionmap['precommit_hook'] = ['str', None]
        self._optionmap['build_config'] = ['str', None]
        self._optionmap['build_dir'] = ['str', None]
        self._optionmap['commiters'] = ['str', None]
        self._optionmap['commit_re'] = ['str', None]

    def get_option_map(self, dist):
        ret = self._revisions[dist]
        for key in self._optionmap.keys():
            type = self._optionmap[key][0]
            ret[key] = self._optionmap[key][1]
            if self._configp.has_option ('DEFAULT', key):
                ret[key] = self.get_option (type, 'DEFAULT', key)
            if self._configp.has_option (dist, key):
                ret[key] = self.get_option (type, dist, key)
        return ret            

    def get_option (self, type, dist, key):
        if type == 'int':
            return self._configp.getint(dist, key)
        elif type == 'str':
            return self._configp.get(dist, key)
        elif type == 'bool':
            return self._configp.getboolean(dist, key)

        assert(None)

class PQMTlaFailure(PQMException):
    def __init__(self, sender, output):
        self.sender = sender
        self.output = output
        self.msg = str(output)

class PQMCmdFailure(Exception):
    def __init__(self, sender, goodcmds, badcmd, output):
        self.sender = sender
        self.goodcmds = goodcmds
        self.badcmd = badcmd
        self.output = output

def runtla_internal(sender, cmd, *args):
    return apply(popen_noshell, [arch_path, cmd] + list(args))

def runtla(sender, cmd, *args):
    (status, msg, output) = apply(runtla_internal, [sender, cmd] + list(args))
    if not ((status is None) or (status == 0)):
        raise PQMTlaFailure(sender, ["arch command %s %s failed (%s): %s" % (cmd, args, status, msg)] + output)
    return output

def write_lines_to_fd(lines):
    (stdin, stdout) = os.pipe()
    pid = os.fork()
    if pid != 0:
        os.close(stdout)
        return stdin
    os.close(stdin)
    for line in lines:
        os.write(stdout, line)
        os.write(stdout, '\n')
    os._exit(0)

def do_patch(sender, content):
    def is_patchline(line):
        return line != '' and (line[0] in ('+', '-') or line[0:2] == '@@')
    if content == []:
        raise PQMException(sender, "Empty patch content")
    if not is_patchline(content[0]):
        summary = content[0]
    else:
        raise PQMException(sender, "No summary given for patch")
    filenames = []
    for line in content:
        if line[0:4] in ('+++ ', '--- '):
            # We intentionally include the date, etc - stripping it out is too hard and error-prone
            filenames.insert(0, line[4:].strip())
    for filename in filenames:
        if (filename.find('/..') > 0) or (filename.find('../') > 0):
            raise PQMException(sender, "Invalid backreferencing filename in patch: %s", filename)
        elif filename[0] == '/':
            raise PQMException(sender, "Invalid absolute filename in patch: %s", filename)
    fd = write_lines_to_fd(content)
    (status, msg, output) = popen_noshell_with_input(gnupatch_path, fd, '-p1', '--batch', '--no-backup-if-mismatch')
    os.close(fd)
    if not ((status is None) or (status == 0)):
        raise PQMException(sender, ["patch command \"%s\" failed (%s): %s" % (gnupatch_path, status, msg)] + output)
    return (summary, output)

def validate_revision(sender, archive, revision, output, successful):
    try:
        # FIXME, check the revision/version exists
        arch_impl.do_whereis_archive(sender,archive)
    except PQMTlaFailure, e:
        raise PQMCmdFailure(sender, successful, line, output + e.output)

def run_precommit(sender, successful, archive, revision, output):
    hook = allowed_revisions[archive + '/' + revision]['precommit_hook']
    if not hook:
	hook = precommit_hook
    if hook:
	logger.info("running precommit hook: %s" % (hook,))
	output += ['\n', 'Executing pre-commit hook %s at %s' % (hook, time.strftime('%c')), '\n']
	child = popen2.Popen4(hook)
	child.tochild.close()
	output += child.fromchild.readlines()
	ecode = child.wait()
	if not ((ecode is None) or (ecode == 0)):
	    raise PQMCmdFailure(sender, successful, line, output + ['\npre-commit hook failed with error code %d at %s\n' % (ecode - 255, time.strftime('%c'))])
	output += ['\n', 'pre-commit hook succeeded at %s' % (time.strftime('%c')), '\n']
    return output
    
class CommandRunner(object):
    star_re = re.compile('^star-merge (\S+/\S+)\s+(\S+/\S+)\s*$')
    replay_re = re.compile('^replay (\S+/\S+)\s+(\S+/\S+)\s*$')
    archive_cache_re = re.compile('^archive-cache-revision (\S+/\S+)\s*$')
    archive_uncache_re = re.compile('^archive-uncache-revision (\S+/\S+)\s*$')
    tag_re = re.compile('^tag (\S+/\S+)\s+(\S+/\S+)\s*$')
    make_archive_re = re.compile('^make-archive (\S+)\s+(\S+)\s*$')
    my_id_re = re.compile('^my-id (\S.*)\s*$')
    register_archive_re = re.compile('^register-archive (\S+)\s+(\S+)\s*$')
    create_branch_re = re.compile('^create-branch (\S+/\S+)\s+(\S+/\S+)\s*$')
    create_version_re = re.compile('^create-version (\S+/\S+)\s*$')
    patch_re = re.compile('^patch (\S+/\S+)\s*$')
    whitespace_re = re.compile('^\s*$')
    pgp_re = re.compile('^-----BEGIN PGP.*MESSAGE')
    pgp_end_re = re.compile('^-----BEGIN PGP SIG')
    pgp_hash_re = re.compile('^Hash:')
    debug_re = re.compile('^debug')

    def run(self, sender, commitmsg, lines, user_email):
        self.accumulating_patch = False
        self.patch_target = None
        self.patch_content = []
        self.successful = []
        self.unrecognized = []
        self.output = []
        self.skipnext = None
        self.commitmsg=commitmsg
        self.user_email=user_email
        self.debug = True
        logger.info("cleaning working directory")
        cleanup_wd()
        logger.info("parsing commands")
        for line in lines:
            if self.run_command(line, sender):
                break
        if self.patch_content != []:
            to_archive_revision = self.patch_target
            (to_archive, to_revision) = to_archive_revision.split('/', 1)
            validate_revision(sender, to_archive, to_revision, self.output, self.successful)
            logger.info("getting working dir for %s/%s" % (to_archive, to_revision))
            origdir = os.getcwd()
            dir = get_wd(sender, to_archive, to_revision, self.user_email)
            try:
                os.chdir(dir)
                try:
                    self.output += ['\n']
                    (summary, moreoutput) = do_patch(sender, self.patch_content)
                    self.output += moreoutput
                    self.output += ['\n']
                except PQMTlaFailure, e:
                    raise PQMCmdFailure(sender, self.successful, 'patch ' + self.patch_target, self.output + e.output)
            except:
                os.chdir(origdir)
                raise
            logger.info("executing patch")
            self.output = run_precommit(sender, self.successful, to_archive, to_revision, self.output)
            self.successful.append('patch ' + self.patch_target)
            self.output += ['\n', 'patch succeeded at %s' % (time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                runtla(sender, 'commit', '-s', summary)
                logger.info("commit succeeded")
            finally:
                os.chdir(origdir)
        cleanup_wd()
        return (self.successful, self.unrecognized, self.output)
    
    def run_command(self, line, sender):
        if self.skipnext:
            self.skipnext = None
            return False
        if self.whitespace_re.match(line):
            return False
        if self.pgp_re.match(line):
            skipnext=1
            return False
        if self.pgp_end_re.match(line):
            return True
        if self.pgp_hash_re.match(line):
            return False
        patch_match = self.patch_re.match(line)
        star_match = self.star_re.match(line)
        replay_match = self.replay_re.match(line)
        archive_cache_match=self.archive_cache_re.match(line)
        archive_uncache_match=self.archive_uncache_re.match(line)
        tag_match=self.tag_re.match(line)
        create_branch_match=self.create_branch_re.match(line)
        make_archive_match=self.make_archive_re.match(line)
        my_id_match=self.my_id_re.match(line)
        register_archive_match=self.register_archive_re.match(line)
        create_version_match=self.create_version_re.match(line)
        debug_match = self.debug_re.match(line)
        if patch_match:
            # GNU Patch
            logger.info("patch content found, target: %s", patch_match.group(1))
            self.patch_target = patch_match.group(1)
            self.accumulating_patch = True
            return False
        elif self.accumulating_patch:
            self.patch_content.append(line)
            return False
        elif star_match:
            self.do_merge(from_archive_revision=star_match.group(1), to_archive_revision=star_match.group(2), merge_name='star-merge', merge_method=arch_impl.do_star_merge, line=line, sender=sender)
        elif replay_match:
            self.do_merge(from_archive_revision=replay_match.group(1), to_archive_revision=replay_match.group(2), merge_name='replay', merge_method=arch_impl.do_replay, line=line, sender=sender)

        elif archive_cache_match:
            # Cache a revision
            archive_revision = archive_cache_match.group(1)
            (archive, revision) = archive_revision.split('/', 1)
##                 if not allowed_revisions.has_key(archive_revision):
##                     raise PQMException(sender, "Disallowed archive/revision: " + archive_revision)
            self.wrap_command(arch_impl.do_whereis_archive, line, sender, archive)
            self.output += ['\n', 'Executing archive-cache-revision %s/%s at %s' % (archive, revision,
                                                                               time.strftime('%c')), '\n']
            self.wrap_command(arch_impl.do_archive_cache, line, sender, archive, revision)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'archive-cache-revision succeeded at %s' % (time.strftime('%c')), '\n']
        elif archive_uncache_match:
            # Uncache a revision
            archive_revision = archive_uncache_match.group(1)
            (archive, revision) = archive_revision.split('/', 1)
##                     if not allowed_revisions.has_key(archive_revision):
##                         raise PQMException(sender, "Disallowed archive/revision: " + archive_revision)
            self.wrap_command(arch_impl.do_whereis_archive, line, sender, archive)
            self.output += ['\n', 'Executing archive-uncache-revision %s/%s at %s' % (archive, revision,
                                                                                 time.strftime('%c')), '\n']
            self.wrap_command(arch_impl.do_archive_uncache, line, sender, archive, revision)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'archive-uncache-revision succeeded at %s' % (time.strftime('%c')), '\n']
        elif tag_match:
            # Tag a branch
            from_archive_revision = tag_match.group(1)
            to_archive_revision = tag_match.group(2)
            (from_archive, from_revision) = from_archive_revision.split('/', 1)
            (to_archive, to_revision) = to_archive_revision.split('/', 1)
##                         if not allowed_revisions.has_key(to_archive_revision):
##                             raise PQMException(sender, "Disallowed archive/revision: " + to_archive_revision)
            self.wrap_command(arch_impl.do_whereis_archive, line, sender, from_archive)
            self.wrap_command(arch_impl.do_whereis_archive, line, sender, to_archive)
            self.output += ['\n', 'Executing tag %s/%s at %s' % (from_archive, from_revision,
                                                            time.strftime('%c')), '\n']
            self.wrap_command(arch_impl.do_tag, line, sender, from_archive,from_revision, to_archive, to_revision)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'tag succeeded at %s' % (time.strftime('%c')), '\n']
        elif create_branch_match:
            # Create a branch
            from_archive_revision = create_branch_match.group(1)
            to_archive_revision = create_branch_match.group(2)
            (from_archive, from_revision) = from_archive_revision.split('/', 1)
            (to_archive, to_revision) = to_archive_revision.split('/', 1)
##                             if not allowed_revisions.has_key(to_archive_revision):
##                                 raise PQMException(sender, "Disallowed archive/revision: " + to_archive_revision)
            self.wrap_command(arch_impl.do_whereis_archive, line, sender, from_archive)
            self.wrap_command(arch_impl.do_whereis_archive, line, sender, to_archive)
            logger.info("getting working dir for %s/%s" % (to_archive, to_revision))
            dir = get_wd(sender, from_archive, from_revision, self.user_email)
            origdir = os.getcwd()
            self.output += ['\n', 'Executing create-branch %s/%s %s/%s at %s' % (from_archive, from_revision, to_archive, to_revision,
                                                                            time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                self.wrap_command(arch_impl.do_create_branch, line, sender, to_archive,to_revision)
            except:
                os.chdir(origdir)
                raise
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'create-branch succeeded at %s' % (time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                runtla(sender, 'commit', '-s', self.commitmsg)
                logger.info("commit succeeded")
            finally:
                os.chdir(origdir)
        elif make_archive_match:
            # Make an archive
            archive = make_archive_match.group(1)
            location = make_archive_match.group(2)
            self.output += ['\n', 'Executing make-archive %s %s at %s' % (archive, location,
                                                                     time.strftime('%c')), '\n']
            self.wrap_command(arch_impl.do_make_archive, line, sender, archive, location)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'make-archive succeeded at %s' % (time.strftime('%c')), '\n']
        elif register_archive_match:
            # Register an archive.  Note that this
            # also works for unregistering an
            # archive by making the archive=='-d'
            # and the location be the archive.
            archive = register_archive_match.group(1)
            location = register_archive_match.group(2)
            self.output += ['\n', 'Executing register-archive %s %s at %s' % (archive, location,
                                                                         time.strftime('%c')), '\n']
            self.wrap_command(arch_impl.do_register_archive, line, sender, archive, location)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'register-archive succeeded at %s' % (time.strftime('%c')), '\n']
        elif create_version_match:
            # Create a new line of development
            archive_revision = create_version_match.group(1)
            (archive, revision) = archive_revision.split('/', 1)
            validate_revision(sender, archive, revision, self.output, self.successful)
            logger.info("getting working dir for %s/%s" % (archive, revision))
            
            dirpath=os.path.join(workdir, archive)
            
            if not os.access(dirpath, os.W_OK):
                os.mkdir(dirpath)
                
            os.chdir(dirpath)
            dir=os.path.join(dirpath, revision)        
            if os.access(dir, os.W_OK):
                raise PQMException(sender, "Working dir already exists: " + dir)
            os.mkdir(dir)
            
            origdir = os.getcwd()
            self.output += ['\n', 'Executing create-version %s/%s at %s' % (archive, revision, time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                self.wrap_command(arch_impl.do_create_version, line, sender, archive, revision)
            except:
                os.chdir(origdir)
                raise
            os.chdir(origdir)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'create-version succeeded at %s' % (time.strftime('%c')), '\n']
            try:
                os.chdir(dir)
                runtla(sender, 'commit', '-s', self.commitmsg)
                logger.info("commit succeeded")
            finally:
                os.chdir(origdir)
        elif my_id_match:
            myid = my_id_match.group(1)
            self.wrap_command(runtla, line, sender, myid)
            logger.info("success: %s" % (line,))
            self.successful.append(line)
            self.output += ['\n', 'my-id succeeded at %s' % (time.strftime('%c')), '\n']
        elif debug_match:
            self.debug = True
        else:
            self.unrecognized.append(line)
    def wrap_command(self, command, line, sender, *args):
        try:
            self.output += command(sender, *args)
        except PQMTlaFailure, e:
            raise PQMCmdFailure(sender, self.successful, line, self.output + e.output)
        
    def check_commit_regex(self, sender, to_archive_revision):
        """Confirm that commit message matches any regexp supplied in the
        configuration file.
        """
        if not allowed_revisions[to_archive_revision]["commit_re"]:
            # No regexp, therefore accept anything
            return
        regex = allowed_revisions[to_archive_revision]["commit_re"]
        if re.match(regex, self.commitmsg):
            # Regexp matched, accept the commitmsg
            return
        raise PQMException(sender,
                           "Commit message [%s] does not match commit_re [%s]"
                           % (self.commitmsg, regex)
                           )

    def check_target(self, sender, archive, revision, line):
        """Check that the sender is allowed to commit to toarchive/to_revision"""
        # FIXME check gpg etc etc.
        package=archive+'/'+revision
        if not allowed_revisions.has_key(package):
            raise PQMCmdFailure(sender, self.successful, line, ["Sender not authorised to commit to package %s" % package])
    def do_merge(self, from_archive_revision, to_archive_revision, merge_name, merge_method, line, sender):
        # Star-merge
        (from_archive, from_revision) = from_archive_revision.split('/', 1)
        (to_archive, to_revision) = to_archive_revision.split('/', 1)
#         if not allowed_revisions.has_key(to_archive_revision):
#             raise PQMException(sender, "Disallowed archive/revision: " + to_archive_revision)
        validate_revision(sender, from_archive, from_revision, self.output, self.successful)
        validate_revision(sender, to_archive, to_revision, self.output, self.successful)
        self.check_target( sender, to_archive, to_revision, line)
        self.check_commit_regex(sender, to_archive_revision)
        logger.info("getting working dir for %s/%s" % (to_archive, to_revision))
        logger.info("current cwd is %s", os.getcwd())
        dir = get_wd(sender, to_archive, to_revision,self.user_email)
        origdir = os.getcwd()
        self.output += ['\n', 'Executing %s %s/%s at %s' % (merge_name, from_archive, from_revision,
                                                               time.strftime('%c')), '\n']
        try:
            os.chdir(dir)
            self.wrap_command(merge_method, line, sender, from_archive, from_revision, to_archive, to_revision)
        except:
            os.chdir(origdir)
            raise
        self.output = run_precommit(sender, self.successful, to_archive, to_revision, self.output)
        os.chdir(origdir)
        logger.info("success: %s" % (line,))
        self.successful.append(line)
        self.output += ['\n', '%s succeeded at %s' % (merge_name, time.strftime('%c')), '\n']
        try:
            os.chdir(dir)
            runtla(sender, 'commit', '-s', self.commitmsg)
            logger.info("commit succeeded")
        finally:
            os.chdir(origdir)

def package_from_config(config, sender, archive, revision, fullpath):
    """build the config config in dir fullpath. Then
    find the package archive/revision in the config, and
    return the path to it."""
    elements=re.split("/", config, 2)
    config_package='%s/%s' % (elements[0],elements[1])
    wanted_package='%s/%s' % (archive, revision)
    # TODO use the implementation stuff
    runtla(sender, 'get', config_package, fullpath)
    runtla(sender, 'build-config', '-d', fullpath, elements[2])
    if config_package == wanted_package:
        return fullpath
    lines=runtla(sender, 'cat-config', '-d', fullpath, elements[2])
    for line in lines:
        elements=re.split("\t", line)
        package=string.strip(elements[1])
        if package==wanted_package:
            return os.path.join(fullpath, elements[0])
    raise PQMException(sender, "Package %s not found in config" % wanted_package)


def get_wd(sender, archive, revision, user_email):
    dirpath=os.path.join(workdir, archive)
    package = archive + '/' + revision
    commiters=allowed_revisions[package]['commiters']
    if commiters and not user_email in groups[commiters]:
        logger.error ("%s is not permitted to commit to %s", user_email, package)
        raise PQMException(sender, "%s is not permitted to commit to %s" % (user_email, package))
    possible_dir=allowed_revisions[package]['build_dir']
    if possible_dir:
        dirpath=os.path.join(possible_dir, archive)
            
    config = allowed_revisions[package]['build_config']
    fullpath = prep_wd(sender, dirpath, revision, config)
    if config:
        return package_from_config(config, sender, archive, revision, fullpath)
    else:
        runtla(sender, 'get', '%s/%s' % (archive, revision), fullpath)
        return fullpath

def prep_wd(sender, dirpath, revision, config):
    if not os.access(dirpath, os.W_OK):
        os.mkdir(dirpath)
    
    if config:
        elements=re.split("/", config)
        fullpath=os.path.join(dirpath, "%s---%s" % (elements[0], revision))
    else:
        fullpath=os.path.join(dirpath, revision)        

    if os.access(fullpath, os.W_OK):
        logger.error("Working dir already exists: " + fullpath)
        raise PQMException(sender, "Working dir already exists: " + fullpath)
    return fullpath

def rm_rf(top):
    for root, dirs, files in os.walk(top, topdown=False):
        for name in files:
            os.remove(os.path.join(root, name))
        for name in dirs:
            if os.path.islink(os.path.join(root, name)):
                os.remove(os.path.join(root, name))
            else:
                os.rmdir(os.path.join(root, name))

def cleanup_wd():
    for top in os.listdir(workdir):
       rm_rf(os.path.join(workdir, top))
    for package,keys in allowed_revisions.items():
        possible_dir=keys['build_dir']
        if not possible_dir:
            continue
        for top in os.listdir(possible_dir):
            rm_rf(os.path.join(possible_dir, top))

class LockFile(object):
    """I represent a lock that is made on the file system, to prevent concurrent execution of this code"""
    def __init__(self, filename):
        self.filename=filename
        self.locked=False
    def acquire(self):
        if no_act:
            return
        logger.info('creating lockfile')
        try:
            os.open(self.filename, os.O_CREAT | os.O_EXCL)
            self.locked=True
        except OSError, e:
            if cron_mode:
                logger.info("lockfile %s already exists, exiting", self.filename)
                sys.exit(0)
            else:
                logger.error("Couldn't create lockfile: %s", self.filename)
                sys.exit(1)
    def release(self):
        if not self.locked:
            return
        if no_act:
            return
        logger.debug('Removing lock file: %s', self.filename)
        os.unlink(self.filename)
        self.locked=False

def do_run_mode(queuedir, logger, logdir, msg, mail_reply, mail_server, from_address, fromaddr, print_report):
    scripts = find_patches(queuedir, logger, verify_sigs)
    (goodscripts, badscripts) = ([], [])
    for script in scripts:
        run_one_script(logger, script, logdir, msg, goodscripts, badscripts, mail_reply, mail_server, from_address, fromaddr)
            
    if print_report:
        for (patchname, logname) in goodscripts:
            print "Patch: " + patchname
            print "Status: success"
            print "Log: " + logname
            print
        for (patchname, logname) in badscripts:
            print "Patch: " + patchname
            print "Status: failure"
            print "Log: " + logname
            print

def run_one_script(logger, script, logdir, msg, goodscripts, badscripts, mail_reply, mail_server, from_address, fromaddr):
    try:
        logger.info('trying script ' + script.filename)
        logname = os.path.join(logdir, os.path.basename(script.filename) + '.log')
        (sender, subject, msg, sig) = read_email(logger, open(script.filename))
        if verify_sigs:
            sigid,siguid = verify_sig(script.getSender(), msg, sig, 0, logger)
        success = False
        output = []
        failedcmd=None

        cmd=CommandRunner()
        (successes, unrecognized, output) = cmd.run(script.getSender(),
                                                    script.getSubject(),
                                                    script.getLines(),
                                                    siguid)
        logger.info('successes: %s' % (successes,))
        logger.info('unrecognized: %s' % (unrecognized,))
        success = True
        goodscripts.append((script.filename, logname))
    except PQMCmdFailure, e:
        badscripts.append((script.filename, logname))
        successes = e.goodcmds
        failedcmd = e.badcmd
        output = e.output
        unrecognized=[]
    except PQMException, e:
        badscripts.append((script.filename, logname))
        successes = []
        failedcmd = []
        output = [str(e)]
        unrecognized=[]
    log_list(logname, output)
    os.unlink(script.filename)
    if mail_reply:
        send_mail_reply(success, successes, unrecognized,
                        mail_server, from_address, script.getSender(),
                        fromaddr, failedcmd, output, cmd)
    else:
        logger.info('not sending mail reply')

def gather_output(retmesg, output):
    result=''
    for line in output:
        result += '\n%s' % line
    return result

def send_mail_reply(success, successes, unrecognized, mail_server, from_address, sender, fromaddr, failedcmd, output, cmd):
    if success:
        retmesg = mail_format_successes(successes, "Command was successful.", unrecognized, line)
        if len(successes) > 0:
            statusmsg='success'
        else:
            statusmsg='no valid commands given'
    else:
        retmesg = mail_format_successes(successes, "Command passed checks, but was not committed.", unrecognized, line)
        retmesg+= "\n%s" % failedcmd
        retmesg+= '\nCommand failed!'
        if not cmd.debug:
            retmesg+= '\nLast 20 lines of log output:'
            retmesg += gather_output (retmesg, output[-20:])
        else:
            retmesg+= '\nAll lines of log output:'
            retmesg += gather_output (retmesg, output)
        statusmsg='failure'
    server = smtplib.SMTP(mail_server)
    server.sendmail(from_address, [sender], 'From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s\n' % (fromaddr, sender, statusmsg, retmesg))
    server.quit()

def mail_format_successes(successes, command_msg, unrecognized, line):
    retmesg = []
    for success in successes:
        retmesg.append('> ' + success)
        retmesg.append(command_msg)
        for line in unrecognized:
            retmesg.append('> ' + line)
            retmesg.append('Unrecognized command.')
    return string.join(retmesg, '\n')

def log_list(logname, list):
    f = open(logname, 'w')
    for l in list:
        f.write(l)
    f.close()

def run(pqm_subdir, run_mode, queuedir, logger, logdir, msg, mail_reply, mail_server, from_address, fromaddr, print_report):
    lockfile=LockFile(os.path.join(pqm_subdir, 'arch-pqm.lock'))
    lockfile.acquire()
    try:
        if run_mode:
            do_run_mode(queuedir, logger, logdir, msg, mail_reply, mail_server, from_address, fromaddr, print_report)
    finally:
        lockfile.release()
        
def do_read_mode(logger):
    sender = None
    try:
        (sender, subject, msg, sig) = read_email(logger)
        if verify_sigs:
            sigid,siguid = verify_sig(sender, msg, sig, 1, logger)
            open(transaction_file, 'a').write(sigid + '\n')
        fname = 'patch.%d' % (time.time())
        logger.info('new patch ' + fname)
        f = open('tmp.' + fname, 'w')
        f.write('From: ' + sender + '\n')
        f.write('Subject: ' + subject + '\n')
        f.write(string.join(re.split('\r?\n', msg), '\n')) # canonicalize line endings
        f.close()
        os.rename('tmp.' + fname, fname)
    except:
        if sender and mail_reply:
            server = smtplib.SMTP(mail_server)
            tb=string.join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback), '')
            server.sendmail(from_address, [sender], 'From: %s\r\nTo: %s\r\nSubject: error processing requests\r\n\r\n' % (fromaddr, sender) + 'An error was encountered:\n' + tb)
            server.quit()
        logger.exception("Caught exception")
        sys.exit(1)
    sys.exit(0)

arch_path = 'arx'
arch_impl = None
gnupatch_path = 'patch'
logfile_name = 'arch-pqm.log'
default_mail_log_level = logging.ERROR
mail_server = 'localhost'
queuedir = None
workdir = None
logdir = None
mail_reply = 1
verify_sigs = 1
from_address = None
allowed_revisions = {}
precommit_hook = []
groups = {}

try:
    opts, args = getopt.getopt(sys.argv[1:], 'vqc:dnrk',
                               ['verbose', 'quiet', 'config=', 'debug', 'no-log',
                                'no-act', 'read', 'run', 'report', 'cron', 'no-verify',
                                'queuedir=', 'keyring=', 'help', 'version', ])
except getopt.GetoptError, e:
    sys.stderr.write("Error reading arguments: %s\n" % e)
    usage(1)
for (key, val) in opts:
    if key == '--help':
        usage(0)
    elif key == '--version':
        usage(0, ver_only=1)
if len(args) > 1:
    sys.stderr.write("Unknown arguments: %s\n" % args[1:])
    usage(1)

logger = logging.getLogger("arch-pqm")

loglevel = logging.WARN
no_act = 0
debug_mode = 0
run_mode = 0
read_mode = 0
cron_mode = 0
print_report = 0
no_log = 0
batch_mode = 0
custom_config_files = 0
for key, val in opts:
    if key in ('-v', '--verbose'):
        if loglevel == logging.INFO:
            loglevel = logging.DEBUG
        elif loglevel == logging.WARN:
            loglevel = logging.INFO
    elif key in ('-q', '--quiet'):
        if loglevel == logging.WARN:
            loglevel = logging.ERROR
        elif loglevel == logging.WARN:
            loglevel = logging.CRITICAL
    elif key in ('-c', '--config'):
        if not custom_config_files:
            custom_config_files = 1
            configfile_names = []
        configfile_names.append(os.path.abspath(os.path.expanduser(val)))
    elif key in ('--keyring'):
        arch_pqm.keyring = val
    elif key in ('-n', '--no-act'):
        no_act = 1
    elif key in ('-d', '--debug'):
        debug_mode = 1
    elif key in ('--queuedir',):
        queuedir = val
    elif key in ('--keyring',):
        arch_pqm.keyring = val
    elif key in ('--no-log',):
        no_log = 1
    elif key in ('--no-verify',):
        verify_sigs = 0
    elif key in ('-r', '--read'):
        read_mode = 1
    elif key in ('--run',):
        run_mode = 1
    elif key in ('--cron',):
        cron_mode = 1
    elif key in ('--report',):
        print_report = 1


logger.setLevel(logging.DEBUG)
stderr_handler = logging.StreamHandler(strm=sys.stderr)
stderr_handler.setLevel(loglevel)
logger.addHandler(stderr_handler)
stderr_handler.setLevel(loglevel)
stderr_handler.setFormatter(logging.Formatter(fmt="%(name)s [%(thread)d] %(levelname)s: %(message)s"))

if not (read_mode or run_mode):
    logger.error("Either --read or --run must be specified")
    sys.exit(1)

configp = ConfigParser()
configfile_names = map(lambda x: os.path.abspath(os.path.expanduser(x)), configfile_names)
logger.debug("Reading config files: %s" % (configfile_names,))
configp.read(configfile_names)

if configp.has_option('DEFAULT', 'arch_path'):
    arch_path = configp.get('DEFAULT', 'arch_path')
elif configp.has_option('DEFAULT', 'tlapath'): 
    logger.warn("Option 'tlapath' is deprecated")
    arch_path = configp.get('DEFAULT', 'tlapath')

if configp.has_option('DEFAULT', 'groups'):
    for group in configp.get('DEFAULT', 'groups').split(','):
        groups[group.strip()]=[]
    logger.info('found groups %s', groups)
    for group in groups.keys():
        for member in configp.get(group, 'members').split(','):
            groups[group].append(member.strip())
        logger.info('group %s has members %s', group, groups[group])

if os.access(arch_path, os.X_OK):
    logger.error("Can't execute \"%s\", please fix arch_path" % (arch_path,))
    sys.exit(1)

if configp.has_option('DEFAULT', 'arch_impl'):
    impl = configp.get('DEFAULT', 'arch_impl')
    if impl == 'tla':
        arch_impl = TlaHandler()
    elif impl == 'arx':
        arch_impl = ArXHandler()
    elif impl == 'baz':
        arch_impl = Baz1_1Handler()
    elif impl == 'baz1.0':
        arch_impl = Baz1_0Handler()
    else:
        logger.error("Unknown arch_impl \"%s\"" % (impl,))
        sys.exit(1)
else:
    (status, msg, output) = popen_noshell(arch_path, '--version')
    for line in output:
        if line.find('baz ') >= 0:
            arch_impl = Baz1_1Handler()
            break
        elif line.find('tla ') >= 0:
            arch_impl = TlaHandler()
            break
        elif line.find('ArX ') >= 0:
            arch_impl = ArXHandler()
            break
    if not arch_impl:
        logger.error("Couldn't determine arch implementation, please set arch_impl")
        sys.exit(1)

arch_pqm.gpgv_path = configp.get_option('DEFAULT', 'gpgv_path', 'gpgv')
myname = configp.get_option('DEFAULT', 'myname', 'Arch Patch Queue Manager')

if configp.has_option('DEFAULT', 'from_address'):
    from_address = configp.get('DEFAULT', 'from_address')
else:
    logger.error("No from_address specified")
    sys.exit(1)
fromaddr = '%s <%s>' % (myname, from_address)

mail_reply=configp.get_boolean_option('DEFAULT', 'mail_reply',1)
verify_sigs=configp.get_boolean_option('DEFAULT', 'verify_sigs', verify_sigs)

if not queuedir:
    queuedir = get_queuedir (configp, logger, args)
queuedir=os.path.abspath(queuedir)

if not configp.has_option('DEFAULT', 'dont_set_home'):
	os.environ['HOME'] = queuedir

workdir=dir_from_option(configp, 'workdir', 'workdir')
logdir=dir_from_option(configp, 'logdir', 'logs')

if not arch_pqm.keyring:
    if configp.has_option('DEFAULT', 'keyring'):
        arch_pqm.keyring = configp.get('DEFAULT', 'keyring')
    else:
        logger.error("No keyring specified on command line or in config files.")
        sys.exit(1)
if not os.access(arch_pqm.keyring, os.R_OK):
    logger.error("Couldn't access keyring %s" % (arch_pqm.keyring,))
    sys.exit(1)

sects = configp.sections()
if len(sects) > 0:
    for sect in sects:
        if str(sect) in groups.keys():
            continue
        logger.info("managing revision: " + sect)
        allowed_revisions[sect] = {}
else:
    logger.error("No revisions to manage!")
    sys.exit(1)
    

rev_optionhandler = RevisionOptionHandler(allowed_revisions, configp)

for rev in allowed_revisions.keys():
    allowed_revisions[rev] = rev_optionhandler.get_option_map(rev)

do_mkdir(queuedir)
os.chdir(queuedir)
do_mkdir(workdir)
do_mkdir(logdir)
pqm_subdir = os.path.join(queuedir, 'arch-pqm')
arch_pqm.pqm_subdir = pqm_subdir
do_mkdir(pqm_subdir)

if configp.has_option('DEFAULT', 'logfile'):
    logfile_name = configp.get('DEFAULT', 'logfile')

if not no_log:
    if not os.path.isabs(logfile_name):
        logfile_name = os.path.join(pqm_subdir, logfile_name)
    logger.debug("Adding log file: %s" % (logfile_name,))
    filehandler = logging.FileHandler(logfile_name)
    if loglevel == logging.WARN:
        filehandler.setLevel(logging.INFO)
    else:
        filehandler.setLevel(logging.DEBUG)
    logger.addHandler(filehandler)
    filehandler.setFormatter(logging.Formatter(fmt="%(asctime)s %(name)s [%(thread)d] %(levelname)s: %(message)s", datefmt="%b %d %H:%M:%S"))

if not (debug_mode or batch_mode):
    # Don't log to stderr past this point
    logger.removeHandler(stderr_handler)

transaction_file = os.path.join(queuedir, 'transactions-completed')
if os.access(transaction_file, os.R_OK):
    lines = open(transaction_file).readlines()
    for line in lines:
        arch_pqm.used_transactions[line[0:-1]] = 1

if read_mode:
    do_read_mode(logger)

assert(run_mode)

run(pqm_subdir, run_mode, queuedir, logger, logdir, msg, mail_reply, mail_server, from_address, fromaddr, print_report)
logger.info("main thread exiting...")
sys.exit(0)

