From: Joerg Jaspert Date: Sat, 18 Sep 2010 14:51:39 +0000 (+0200) Subject: Merge remote branch 'tolimar/tolimar-adopt-bts-categorize' into merge X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=dec160172b136c712dddf2c942ee03a52e8e0b88;hp=6d03dbb954cd67e522374c12225aa08a129e5d60;p=dak.git Merge remote branch 'tolimar/tolimar-adopt-bts-categorize' into merge * tolimar/tolimar-adopt-bts-categorize: Whatever the difference between the summary and the subject field is: We Port bts_categorize to python-debianbts Signed-off-by: Joerg Jaspert --- diff --git a/config/backports/cron.monthly b/config/backports/cron.monthly index d2f403fc..38a57fd1 100755 --- a/config/backports/cron.monthly +++ b/config/backports/cron.monthly @@ -16,9 +16,9 @@ for m in mail import; do if [ -f $m ]; then mv $m ${m}-$DATE sleep 20 - gzip -9 ${m}-$DATE - chgrp backports ${m}-$DATE.gz - chmod 660 ${m}-$DATE.gz + xz -9 ${m}-$DATE + chgrp backports ${m}-$DATE.xz + chmod 660 ${m}-$DATE.xz fi; done diff --git a/config/backports/dak.conf b/config/backports/dak.conf index 8a2b23a9..eaa89c27 100644 --- a/config/backports/dak.conf +++ b/config/backports/dak.conf @@ -437,3 +437,8 @@ Import-LDAP-Fingerprints }; KeyServer "wwwkeys.eu.pgp.net"; }; + +Changelogs +{ + Export "/srv/backports-master.debian.org/export/changelogs"; +} diff --git a/config/backports/dinstall.functions b/config/backports/dinstall.functions index 0ae81724..2757a179 100644 --- a/config/backports/dinstall.functions +++ b/config/backports/dinstall.functions @@ -604,3 +604,12 @@ Current action: ${1} Action start: ${RIGHTNOW} EOF } + +# extract changelogs and stuff +function changelogs() { + log "Extracting changelogs" + dak make-changelog -e + mkdir -p ${exportpublic}/changelogs + cd ${exportpublic}/changelogs + rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . +} diff --git a/config/backports/vars b/config/backports/vars index 7f051f82..25158581 100644 --- a/config/backports/vars +++ b/config/backports/vars @@ -1,10 +1,11 @@ # locations used by many scripts base=/srv/backports-master.debian.org +public=/srv/backports-web.debian.org bindir=$base/bin ftpdir=$base/ftp/ indices=$ftpdir/indices -webdir=/srv/backports-web.debian.org/underlay/ +webdir=$public/underlay/ scriptdir=$base/scripts archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 " @@ -27,6 +28,8 @@ incoming=$base/incoming logdir=$base/log/cron/ mirrordir=$base/mirror/ newstage=$queuedir/newstage/ +exportdir=$base/export/ +exportpublic=$public/export/ ftpgroup=debadmin diff --git a/config/debian/cron.dinstall b/config/debian/cron.dinstall index 0841f838..a9e9bd18 100755 --- a/config/debian/cron.dinstall +++ b/config/debian/cron.dinstall @@ -388,6 +388,14 @@ rm -f "${LOCK_DAILY}" ts "locked part finished" state "postlock" +GO=( + FUNC="changelogs" + TIME="changelogs" + ARGS="" + ERR="false" +) +stage $GO & + GO=( FUNC="pgdump_post" TIME="pg_dump2" diff --git a/config/debian/cron.monthly b/config/debian/cron.monthly index 57706bf8..685a1939 100755 --- a/config/debian/cron.monthly +++ b/config/debian/cron.monthly @@ -19,9 +19,9 @@ for m in mail bxamail; do chown dak:ftpteam ${m} chmod 660 ${m} sleep 20 - gzip -9 ${m}-$DATE - chgrp $ftpgroup ${m}-$DATE.gz - chmod 660 ${m}-$DATE.gz + xz -9 ${m}-$DATE + chgrp $ftpgroup ${m}-$DATE.xz + chmod 660 ${m}-$DATE.xz fi; done diff --git a/config/debian/dinstall.functions b/config/debian/dinstall.functions index 1b508d25..c5aeed13 100644 --- a/config/debian/dinstall.functions +++ b/config/debian/dinstall.functions @@ -604,3 +604,12 @@ Current action: ${1} Action start: ${RIGHTNOW} EOF } + +# extract changelogs and stuff +function changelogs() { + log "Extracting changelogs" + dak make-changelog -e + mkdir -p ${exportpublic}/changelogs + cd ${exportpublic}/changelogs + rsync -aHW --delete --delete-after --ignore-errors ${exportdir}/changelogs/. . +} diff --git a/config/debian/vars b/config/debian/vars index 2a3200cf..0d5eb529 100644 --- a/config/debian/vars +++ b/config/debian/vars @@ -25,6 +25,8 @@ accepted=$queuedir/accepted/ mirrordir=$base/mirror/ incoming=$base/incoming newstage=$queuedir/newstage/ +exportdir=$base/export/ +exportpublic=$public/rsync/export/ ftpgroup=debadmin diff --git a/config/homedir/.bash_logout b/config/homedir/.bash_logout new file mode 100644 index 00000000..de4f5f75 --- /dev/null +++ b/config/homedir/.bash_logout @@ -0,0 +1,7 @@ +# ~/.bash_logout: executed by bash(1) when login shell exits. + +# when leaving the console clear the screen to increase privacy + +if [ "$SHLVL" = 1 ]; then + [ -x /usr/bin/clear_console ] && /usr/bin/clear_console -q +fi diff --git a/config/homedir/.bash_profile b/config/homedir/.bash_profile new file mode 120000 index 00000000..5157537b --- /dev/null +++ b/config/homedir/.bash_profile @@ -0,0 +1 @@ +.bashrc \ No newline at end of file diff --git a/config/homedir/.bashrc b/config/homedir/.bashrc new file mode 100644 index 00000000..0297a816 --- /dev/null +++ b/config/homedir/.bashrc @@ -0,0 +1,59 @@ +# bashrc for dak user + +# If not running interactively, don't do anything +[ -z "$PS1" ] && return + + +# append to the history file, don't overwrite it +shopt -s histappend +export HISTCONTROL=ignoreboth + +# check the window size after each command and, if necessary, +# update the values of LINES and COLUMNS. +shopt -s checkwinsize + +# make less more friendly for non-text input files, see lesspipe(1) +[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)" + +## A little nice prompt. +PS1='`_ret=$?; if test $_ret -ne 0; then echo "\[\033[01;31m\]$_ret "; set ?=$_ret; unset _ret; fi`\[\033[01;33m\][`git branch 2>/dev/null|cut -f2 -d\* -s` ] \[\033[01;32m\]\u@\[\033[00;36m\]\h\[\033[01m\]:\[\033[00;37m\]\w\[\033[00m\]\$ ' + +# If this is an xterm set the title to user@host:dir +case "$TERM" in +xterm*|rxvt*) + PS1="\[\e]0;\u@\h: \w\a\]$PS1" + ;; +*) + ;; +esac + +case "$HOSTNAME" in + franck) + export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars + ;; + morricone) + export SCRIPTVARS=/srv/backports-master.debian.org/dak/config/backports/vars + ;; + chopin|ries) + export SCRIPTVARS=/srv/security-master.debian.org/dak/config/debian-security/vars + ;; + *) + echo "Unconfigured dak host, not importing the usual vars" + ;; +esac + +. $SCRIPTVARS + +function emacs() { + export EDITOR=$(which emacs) +} + +export PAGER=less +export LESS="-X" +export EDITOR=$(which vim) +export HISTFILESIZE=6000 +export GREP_OPTIONS="--color=auto" +export CDPATH=".:~:${base}:${public}:${queuedir}" + +alias base='cd ${base}' +alias config='cd ${configdir}' diff --git a/config/homedir/.emacs b/config/homedir/.emacs new file mode 100644 index 00000000..8906d3f8 --- /dev/null +++ b/config/homedir/.emacs @@ -0,0 +1,9 @@ +(custom-set-variables + ;; custom-set-variables was added by Custom -- don't edit or cut/paste it! + ;; Your init file should contain only one such instance. + '(global-font-lock-mode t nil (font-lock)) + '(show-trailing-whitespace t)) +(custom-set-faces + ;; custom-set-faces was added by Custom -- don't edit or cut/paste it! + ;; Your init file should contain only one such instance. + ) diff --git a/config/homedir/.muttrc b/config/homedir/.muttrc new file mode 100644 index 00000000..cd5ae637 --- /dev/null +++ b/config/homedir/.muttrc @@ -0,0 +1,158 @@ +# +# System configuration file for Mutt +# + +# default list of header fields to weed when displaying +# +ignore "from " received content- mime-version status x-status message-id +ignore sender references return-path lines +ignore date delivered-to precedence errors-to in-reply-to user-agent +ignore x-loop x-sender x-mailer x-msmail-priority x-mimeole x-priority +ignore x-accept-language x-authentication-warning + +# emacs-like bindings +bind editor "\e" kill-word +bind editor "\e" kill-word + +# map delete-char to a sane value +bind editor delete-char + +# don't add the hostname to the From header +unset use_domain +# don't generate a From header +unset use_from + +# Specifies how to sort messages in the index menu. +set sort=threads + +# Exim does not remove Bcc headers +unset write_bcc +# Postfix and qmail use Delivered-To for detecting loops +unset bounce_delivered + +# imitate the old search-body function +macro index \eb '/~b ' 'search in message bodies' + +# simulate the old url menu +macro index \cb |urlview\n 'call urlview to extract URLs out of a message' +macro pager \cb |urlview\n 'call urlview to extract URLs out of a message' + +# Show documentation when pressing F1 +macro generic "!zless /usr/share/doc/mutt/manual.txt.gz\n" "Show Mutt documentation" +macro index "!zless /usr/share/doc/mutt/manual.txt.gz\n" "Show Mutt documentation" +macro pager "!zless /usr/share/doc/mutt/manual.txt.gz\n" "Show Mutt documentation" + +# Use folders which match on \\.gz$ as gzipped folders: +open-hook \\.gz$ "gzip -cd %f > %t" +close-hook \\.gz$ "gzip -c %t > %f" +append-hook \\.gz$ "gzip -c %t >> %f" + +# Use folders which match on \\.xz$ as xz compressed folders: +open-hook \\.xz$ "xz -cd %f > %t" +close-hook \\.xz$ "xz -c %t > %f" +append-hook \\.xz$ "xz -c %t >> %f" + +# colors +color normal white black +color attachment brightyellow black +color hdrdefault cyan black +color indicator black cyan +color markers brightred black +color quoted green black +color signature cyan black +color status brightgreen blue +color tilde blue black +color tree red black + +# aliases for broken MUAs +charset-hook windows-1250 CP1250 +charset-hook windows-1251 CP1251 +charset-hook windows-1252 CP1252 +charset-hook windows-1253 CP1253 +charset-hook windows-1254 CP1254 +charset-hook windows-1255 CP1255 +charset-hook windows-1256 CP1256 +charset-hook windows-1257 CP1257 +charset-hook windows-1258 CP1258 + +## +## More settings +## +set ispell=ispell + +# GnuPG configuration +set pgp_decode_command="/usr/bin/gpg --status-fd=2 %?p?--passphrase-fd 0? --no-verbose --quiet --batch --output - %f" +set pgp_verify_command="/usr/bin/gpg --status-fd=2 --no-verbose --quiet --batch --output - --verify %s %f" +set pgp_decrypt_command="/usr/bin/gpg --status-fd=2 --passphrase-fd 0 --no-verbose --quiet --batch --output - %f" +set pgp_sign_command="/usr/bin/gpg --no-verbose --batch --quiet --output - --passphrase-fd 0 --armor --detach-sign --textmode %?a?-u %a? %f" +set pgp_clearsign_command="/usr/bin/gpg --no-verbose --batch --quiet --output - --passphrase-fd 0 --armor --textmode --clearsign %?a?-u %a? %f" +set pgp_encrypt_only_command="/usr/lib/mutt/pgpewrap /usr/bin/gpg --batch --quiet --no-verbose --output - --encrypt --textmode --armor --always-trust -- -r %r -- %f" +set pgp_encrypt_sign_command="/usr/lib/mutt/pgpewrap /usr/bin/gpg --passphrase-fd 0 --batch --quiet --no-verbose --textmode --output - --encrypt --sign %?a?-u %a? --armor --always-trust -- -r %r -- %f" +set pgp_import_command="/usr/bin/gpg --no-verbose --import -v %f" +set pgp_export_command="/usr/bin/gpg --no-verbose --export --armor %r" +set pgp_verify_key_command="/usr/bin/gpg --verbose --batch --fingerprint --check-sigs %r" +set pgp_list_pubring_command="/usr/bin/gpg --no-verbose --batch --quiet --with-colons --list-keys %r" +set pgp_list_secring_command="/usr/bin/gpg --no-verbose --batch --quiet --with-colons --list-secret-keys %r" +set pgp_good_sign="^\\[GNUPG:\\] VALIDSIG" + +set nomove +bind pager previous-line +bind pager next-line +bind index previous-entry +bind index next-entry +bind index previous-entry +bind index next-entry +bind pager previous-entry +bind pager next-entry +bind pager $ bottom +bind index next-unread +bind index \c? search-reverse +bind index \c\\ search-opposite +set edit_hdrs +set xterm_set_titles + +ignore * +unignore date from to cc subject x-mailer resent-from reply-to mail-followup-to + +set index_format="%4C %Z %(%b%d %H) %-15.15L (%?l?%4l&%4c?) %s" + +set folder=~/mail + +unset confirmappend + +my_hdr From: Archive Administrator + +macro index s = + +set tmpdir="/tmp" + +macro index s = +macro index c = +macro pager s = +macro pager c = + +set index_format="%4C %Z %(%b%d %H) %-15.15L (%?l?%4l&%4c?) %s" + + +set delete=yes +unset confirmappend + +set implicit_autoview +auto_view text/html + +set pager_stop +set wrap_search # Wrap search to top/bottom. +set smart_wrap # wrap long lines at word boundary. +set tilde # Internal Pager: ~~~~ at and of message? + +set pager_context=3 +set pager_index_lines=9 # number of lines to see from the index +set pgp_timeout=60 # number of seconds to cache passphrase + +set mail_check=1 +set timeout=1 + +set beep +set beep_new +set fast_reply +set metoo diff --git a/config/homedir/.profile b/config/homedir/.profile new file mode 100644 index 00000000..bf8754f5 --- /dev/null +++ b/config/homedir/.profile @@ -0,0 +1,12 @@ +# if running bash +if [ -n "$BASH_VERSION" ]; then + # include .bashrc if it exists + if [ -f "$HOME/.bashrc" ]; then + . "$HOME/.bashrc" + fi +fi + +# set PATH so it includes user's private bin if it exists +if [ -d "$HOME/bin" ] ; then + PATH="$HOME/bin:$PATH" +fi diff --git a/config/homedir/.psqlrc b/config/homedir/.psqlrc new file mode 100644 index 00000000..af5f96de --- /dev/null +++ b/config/homedir/.psqlrc @@ -0,0 +1 @@ +\set HISTSIZE 12000 diff --git a/dak/dakdb/update36.py b/dak/dakdb/update36.py new file mode 100644 index 00000000..d5810f68 --- /dev/null +++ b/dak/dakdb/update36.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# coding=utf8 + +""" +Add processed field to changes_pending_files + +@contact: Debian FTP Master +@copyright: 2010 Mark Hymers +@license: GNU General Public License version 2 or later +""" + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +################################################################################ + + +################################################################################ + +import psycopg2 +from daklib.dak_exceptions import DBUpdateError + +################################################################################ +def do_update(self): + """ + Add processed field to changes_pending_files + """ + print __doc__ + try: + c = self.db.cursor() + c.execute('ALTER TABLE changes_pending_files ADD COLUMN processed BOOL DEFAULT FALSE') + c.execute("UPDATE config SET value = '36' WHERE name = 'db_revision'") + self.db.commit() + + except psycopg2.ProgrammingError, msg: + self.db.rollback() + raise DBUpdateError, 'Unable to apply build_queue update 36, rollback issued. Error message : %s' % (str(msg)) diff --git a/dak/process_new.py b/dak/process_new.py index fc952d2a..dd990f1f 100755 --- a/dak/process_new.py +++ b/dak/process_new.py @@ -333,43 +333,6 @@ def edit_overrides (new, upload, session): return new -################################################################################ - -def edit_note(note, upload, session): - # Write the current data to a temporary file - (fd, temp_filename) = utils.temp_filename() - editor = os.environ.get("EDITOR","vi") - answer = 'E' - while answer == 'E': - os.system("%s %s" % (editor, temp_filename)) - temp_file = utils.open_file(temp_filename) - newnote = temp_file.read().rstrip() - temp_file.close() - print "New Note:" - print utils.prefix_multi_line_string(newnote," ") - prompt = "[D]one, Edit, Abandon, Quit ?" - answer = "XXX" - while prompt.find(answer) == -1: - answer = utils.our_raw_input(prompt) - m = re_default_answer.search(prompt) - if answer == "": - answer = m.group(1) - answer = answer[:1].upper() - os.unlink(temp_filename) - if answer == 'A': - return - elif answer == 'Q': - end() - sys.exit(0) - - comment = NewComment() - comment.package = upload.pkg.changes["source"] - comment.version = upload.pkg.changes["version"] - comment.comment = newnote - comment.author = utils.whoami() - comment.trainee = bool(Options["Trainee"]) - session.add(comment) - session.commit() ################################################################################ @@ -448,57 +411,6 @@ def add_overrides (new, upload, session): ################################################################################ -def prod_maintainer (notes, upload): - cnf = Config() - # Here we prepare an editor and get them ready to prod... - (fd, temp_filename) = utils.temp_filename() - temp_file = os.fdopen(fd, 'w') - for note in notes: - temp_file.write(note.comment) - temp_file.close() - editor = os.environ.get("EDITOR","vi") - answer = 'E' - while answer == 'E': - os.system("%s %s" % (editor, temp_filename)) - temp_fh = utils.open_file(temp_filename) - prod_message = "".join(temp_fh.readlines()) - temp_fh.close() - print "Prod message:" - print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1) - prompt = "[P]rod, Edit, Abandon, Quit ?" - answer = "XXX" - while prompt.find(answer) == -1: - answer = utils.our_raw_input(prompt) - m = re_default_answer.search(prompt) - if answer == "": - answer = m.group(1) - answer = answer[:1].upper() - os.unlink(temp_filename) - if answer == 'A': - return - elif answer == 'Q': - end() - sys.exit(0) - # Otherwise, do the proding... - user_email_address = utils.whoami() + " <%s>" % ( - cnf["Dinstall::MyAdminAddress"]) - - Subst = upload.Subst - - Subst["__FROM_ADDRESS__"] = user_email_address - Subst["__PROD_MESSAGE__"] = prod_message - Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"] - - prod_mail_message = utils.TemplateSubst( - Subst,cnf["Dir::Templates"]+"/process-new.prod") - - # Send the prod mail - utils.send_mail(prod_mail_message) - - print "Sent proding message" - -################################################################################ - def do_new(upload, session): print "NEW\n" files = upload.pkg.files @@ -516,7 +428,7 @@ def do_new(upload, session): done = 0 while not done: # Find out what's new - new = determine_new(changes, files) + new, byhand = determine_new(upload.pkg.changes_file, changes, files, session=session) if not new: break @@ -617,14 +529,14 @@ def do_byhand(upload, session): done = 0 while not done: files = upload.pkg.files - will_install = 1 + will_install = True byhand = [] for f in files.keys(): - if files[f]["type"] == "byhand": + if files[f]["section"] == "byhand": if os.path.exists(f): print "W: %s still present; please process byhand components and try again." % (f) - will_install = 0 + will_install = False else: byhand.append(f) @@ -646,21 +558,39 @@ def do_byhand(upload, session): answer = answer[:1].upper() if answer == 'A': - try: - check_daily_lock() - done = 1 - for f in byhand: - del files[f] - Logger.log(["BYHAND ACCEPT: %s" % (upload.pkg.changes_file)]) - except CantGetLockError: - print "Hello? Operator! Give me the number for 911!" - print "Dinstall in the locked area, cant process packages, come back later" + dbchg = get_dbchange(upload.pkg.changes_file, session) + if dbchg is None: + print "Warning: cannot find changes file in database; can't process BYHAND" + else: + try: + check_daily_lock() + done = 1 + for b in byhand: + # Find the file entry in the database + found = False + for f in dbchg.files: + if f.filename == b: + found = True + f.processed = True + break + + if not found: + print "Warning: Couldn't find BYHAND item %s in the database to mark it processed" % b + + session.commit() + Logger.log(["BYHAND ACCEPT: %s" % (upload.pkg.changes_file)]) + except CantGetLockError: + print "Hello? Operator! Give me the number for 911!" + print "Dinstall in the locked area, cant process packages, come back later" elif answer == 'M': - Logger.log(["BYHAND REJECT: %s" % (upload.pkg.changes_file)]) - upload.do_reject(manual=1, reject_message=Options["Manual-Reject"]) - upload.pkg.remove_known_changes(session=session) - session.commit() - done = 1 + aborted = upload.do_reject(manual=1, + reject_message=Options["Manual-Reject"], + notes=get_new_comments(changes.get("source", ""), session=session)) + if not aborted: + upload.pkg.remove_known_changes(session=session) + session.commit() + Logger.log(["BYHAND REJECT: %s" % (upload.pkg.changes_file)]) + done = 1 elif answer == 'S': done = 1 elif answer == 'Q': @@ -722,13 +652,15 @@ class clean_holding(object): os.unlink(os.path.join(h.holding_dir, f)) -def do_pkg(changes_file, session): - new_queue = get_policy_queue('new', session ); +def do_pkg(changes_full_path, session): + changes_dir = os.path.dirname(changes_full_path) + changes_file = os.path.basename(changes_full_path) + u = Upload() u.pkg.changes_file = changes_file (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changes_file) u.load_changes(changes_file) - u.pkg.directory = new_queue.path + u.pkg.directory = changes_dir u.update_subst() u.logger = Logger origchanges = os.path.abspath(u.pkg.changes_file) @@ -757,9 +689,10 @@ def do_pkg(changes_file, session): if not recheck(u, session): return - # FIXME: This does need byhand checks added! - new = determine_new(u.pkg.changes, files) - if new: + new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, session=session) + if byhand: + do_byhand(u, session) + elif new: do_new(u, session) else: try: @@ -809,7 +742,9 @@ def main(): changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv) if len(changes_files) == 0: new_queue = get_policy_queue('new', session ); - changes_files = utils.get_changes_files(new_queue.path) + changes_paths = [ os.path.join(new_queue.path, j) for j in utils.get_changes_files(new_queue.path) ] + else: + changes_paths = [ os.path.abspath(j) for j in changes_files ] Options = cnf.SubTree("Process-New::Options") @@ -826,15 +761,15 @@ def main(): Priorities = Priority_Completer(session) readline.parse_and_bind("tab: complete") - if len(changes_files) > 1: + if len(changes_paths) > 1: sys.stderr.write("Sorting changes...\n") - changes_files = sort_changes(changes_files, session) + changes_files = sort_changes(changes_paths, session) - for changes_file in changes_files: + for changes_file in changes_paths: changes_file = utils.validate_changes_file_arg(changes_file, 0) if not changes_file: continue - print "\n" + changes_file + print "\n" + os.path.basename(changes_file) do_pkg (changes_file, session) diff --git a/dak/show_new.py b/dak/show_new.py index 6d646918..6d357134 100755 --- a/dak/show_new.py +++ b/dak/show_new.py @@ -172,7 +172,7 @@ def do_pkg(changes_file): u.check_source_against_db(deb_filename, session) u.pkg.changes["suite"] = u.pkg.changes["distribution"] - new = determine_new(u.pkg.changes, files, 0, session) + new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, session) htmlname = changes["source"] + "_" + changes["version"] + ".html" sources.add(htmlname) diff --git a/dak/update_db.py b/dak/update_db.py index c6dc404b..bfd05fe7 100755 --- a/dak/update_db.py +++ b/dak/update_db.py @@ -45,7 +45,7 @@ from daklib.dak_exceptions import DBUpdateError ################################################################################ Cnf = None -required_database_schema = 35 +required_database_schema = 36 ################################################################################ diff --git a/daklib/changesutils.py b/daklib/changesutils.py index 0aca121a..35b3d57b 100644 --- a/daklib/changesutils.py +++ b/daklib/changesutils.py @@ -159,7 +159,9 @@ def changes_to_queue(upload, srcqueue, destqueue, session): for f in chg.files: # update the changes_pending_files row f.queue = destqueue - utils.move(os.path.join(srcqueue.path, f.filename), destqueue.path, perms=int(destqueue.perms, 8)) + # Only worry about unprocessed files + if not f.processed: + utils.move(os.path.join(srcqueue.path, f.filename), destqueue.path, perms=int(destqueue.perms, 8)) utils.move(os.path.join(srcqueue.path, upload.pkg.changes_file), destqueue.path, perms=int(destqueue.perms, 8)) chg.in_queue = destqueue @@ -188,9 +190,14 @@ def new_accept(upload, dry_run, session): else: # Just a normal upload, accept it... (summary, short_summary) = upload.build_summaries() - srcqueue = get_policy_queue('new', session) destqueue = get_policy_queue('newstage', session) + srcqueue = get_policy_queue_from_path(upload.pkg.directory, session) + + if not srcqueue: + # Assume NEW and hope for the best + srcqueue = get_policy_queue('new', session) + changes_to_queue(upload, srcqueue, destqueue, session) __all__.append('new_accept') diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 65e14104..fab0870e 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -1917,6 +1917,31 @@ def get_policy_queue(queuename, session=None): __all__.append('get_policy_queue') +@session_wrapper +def get_policy_queue_from_path(pathname, session=None): + """ + Returns PolicyQueue object for given C{path name} + + @type queuename: string + @param queuename: The path + + @type session: Session + @param session: Optional SQLA session object (a temporary one will be + generated if not supplied) + + @rtype: PolicyQueue + @return: PolicyQueue object for the given queue + """ + + q = session.query(PolicyQueue).filter_by(path=pathname) + + try: + return q.one() + except NoResultFound: + return None + +__all__.append('get_policy_queue_from_path') + ################################################################################ class Priority(object): diff --git a/daklib/queue.py b/daklib/queue.py index c5e6ca74..7d7faa4b 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -92,10 +92,13 @@ def get_type(f, session): # Determine what parts in a .changes are NEW -def determine_new(changes, files, warn=1, session = None): +def determine_new(filename, changes, files, warn=1, session = None): """ Determine what parts in a C{changes} file are NEW. + @type filename: str + @param filename: changes filename + @type changes: Upload.Pkg.changes dict @param changes: Changes dictionary @@ -109,13 +112,22 @@ def determine_new(changes, files, warn=1, session = None): @return: dictionary of NEW components. """ + # TODO: This should all use the database instead of parsing the changes + # file again new = {} + byhand = {} + + dbchg = get_dbchange(filename, session) + if dbchg is None: + print "Warning: cannot find changes file in database; won't check byhand" # Build up a list of potentially new things for name, f in files.items(): - # Skip byhand elements -# if f["type"] == "byhand": -# continue + # Keep a record of byhand elements + if f["section"] == "byhand": + byhand[name] = 1 + continue + pkg = f["package"] priority = f["priority"] section = f["section"] @@ -163,6 +175,23 @@ def determine_new(changes, files, warn=1, session = None): del changes["suite"][suite] changes["suite"][override] = 1 + # Check for unprocessed byhand files + if dbchg is not None: + for b in byhand.keys(): + # Find the file entry in the database + found = False + for f in dbchg.files: + if f.filename == b: + found = True + # If it's processed, we can ignore it + if f.processed: + del byhand[b] + break + + if not found: + print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed" + + # Check for new stuff for suite in changes["suite"].keys(): for pkg in new.keys(): ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session) @@ -180,7 +209,7 @@ def determine_new(changes, files, warn=1, session = None): if new[pkg].has_key("othercomponents"): print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"]) - return new + return new, byhand ################################################################################ @@ -247,6 +276,96 @@ class TarTime(object): ############################################################################### +def prod_maintainer(notes, upload): + cnf = Config() + + # Here we prepare an editor and get them ready to prod... + (fd, temp_filename) = utils.temp_filename() + temp_file = os.fdopen(fd, 'w') + for note in notes: + temp_file.write(note.comment) + temp_file.close() + editor = os.environ.get("EDITOR","vi") + answer = 'E' + while answer == 'E': + os.system("%s %s" % (editor, temp_filename)) + temp_fh = utils.open_file(temp_filename) + prod_message = "".join(temp_fh.readlines()) + temp_fh.close() + print "Prod message:" + print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1) + prompt = "[P]rod, Edit, Abandon, Quit ?" + answer = "XXX" + while prompt.find(answer) == -1: + answer = utils.our_raw_input(prompt) + m = re_default_answer.search(prompt) + if answer == "": + answer = m.group(1) + answer = answer[:1].upper() + os.unlink(temp_filename) + if answer == 'A': + return + elif answer == 'Q': + end() + sys.exit(0) + # Otherwise, do the proding... + user_email_address = utils.whoami() + " <%s>" % ( + cnf["Dinstall::MyAdminAddress"]) + + Subst = upload.Subst + + Subst["__FROM_ADDRESS__"] = user_email_address + Subst["__PROD_MESSAGE__"] = prod_message + Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"] + + prod_mail_message = utils.TemplateSubst( + Subst,cnf["Dir::Templates"]+"/process-new.prod") + + # Send the prod mail + utils.send_mail(prod_mail_message) + + print "Sent prodding message" + +################################################################################ + +def edit_note(note, upload, session): + # Write the current data to a temporary file + (fd, temp_filename) = utils.temp_filename() + editor = os.environ.get("EDITOR","vi") + answer = 'E' + while answer == 'E': + os.system("%s %s" % (editor, temp_filename)) + temp_file = utils.open_file(temp_filename) + newnote = temp_file.read().rstrip() + temp_file.close() + print "New Note:" + print utils.prefix_multi_line_string(newnote," ") + prompt = "[D]one, Edit, Abandon, Quit ?" + answer = "XXX" + while prompt.find(answer) == -1: + answer = utils.our_raw_input(prompt) + m = re_default_answer.search(prompt) + if answer == "": + answer = m.group(1) + answer = answer[:1].upper() + os.unlink(temp_filename) + if answer == 'A': + return + elif answer == 'Q': + end() + sys.exit(0) + + comment = NewComment() + comment.package = upload.pkg.changes["source"] + comment.version = upload.pkg.changes["version"] + comment.comment = newnote + comment.author = utils.whoami() + comment.trainee = bool(Options["Trainee"]) + session.add(comment) + session.commit() + +############################################################################### + class Upload(object): """ Everything that has to do with an upload processed. @@ -1012,11 +1131,24 @@ class Upload(object): session = DBConn().session() self.check_source_against_db(dsc_filename, session) self.check_dsc_against_db(dsc_filename, session) - session.close() + + dbchg = get_dbchange(self.pkg.changes_file, session) # Finally, check if we're missing any files for f in self.later_check_files: - self.rejects.append("Could not find file %s references in changes" % f) + print 'XXX: %s' % f + # Check if we've already processed this file if we have a dbchg object + ok = False + if dbchg: + for pf in dbchg.files: + if pf.filename == f and pf.processed: + self.notes.append('%s was already processed so we can go ahead' % f) + ok = True + del self.pkg.files[f] + if not ok: + self.rejects.append("Could not find file %s references in changes" % f) + + session.close() return True diff --git a/daklib/queue_install.py b/daklib/queue_install.py index bc1b8739..d016c986 100755 --- a/daklib/queue_install.py +++ b/daklib/queue_install.py @@ -254,7 +254,7 @@ def determine_target(u): # Statically handled queues target = None - for q in ["new", "autobyhand", "byhand"]: + for q in ["autobyhand", "byhand", "new"]: if QueueInfo[q]["is"](u): target = q break