]> git.decadent.org.uk Git - dak.git/commitdiff
merge ftpmaster branch
authorThomas Viehmann <tv@beamnet.de>
Sat, 3 May 2008 10:25:00 +0000 (10:25 +0000)
committerThomas Viehmann <tv@beamnet.de>
Sat, 3 May 2008 10:25:00 +0000 (10:25 +0000)
64 files changed:
ChangeLog
config/debian-security/apt.conf
config/debian-security/cron.buildd
config/debian-security/cron.daily [changed mode: 0644->0755]
config/debian-security/dak.conf
config/debian/cron.buildd
config/debian/cron.daily
config/debian/cron.weekly
config/debian/dak.conf
config/debian/extensions.py
config/debian/pseudo-packages.description
config/debian/pseudo-packages.maintainers
config/debian/vars
dak/check_archive.py [changed mode: 0755->0644]
dak/check_overrides.py [changed mode: 0755->0644]
dak/check_proposed_updates.py [changed mode: 0755->0644]
dak/clean_proposed_updates.py [changed mode: 0755->0644]
dak/clean_queues.py [changed mode: 0755->0644]
dak/clean_suites.py [changed mode: 0755->0644]
dak/compare_suites.py [changed mode: 0755->0644]
dak/control_overrides.py [changed mode: 0755->0644]
dak/control_suite.py [changed mode: 0755->0644]
dak/cruft_report.py [changed mode: 0755->0644]
dak/dak.py [changed mode: 0755->0644]
dak/decode_dot_dak.py [changed mode: 0755->0644]
dak/examine_package.py [changed mode: 0755->0644]
dak/find_null_maintainers.py [changed mode: 0755->0644]
dak/generate_index_diffs.py [changed mode: 0755->0644]
dak/generate_releases.py [changed mode: 0755->0644]
dak/import_archive.py [changed mode: 0755->0644]
dak/import_keyring.py [changed mode: 0755->0644]
dak/import_ldap_fingerprints.py [changed mode: 0755->0644]
dak/import_users_from_passwd.py [changed mode: 0755->0644]
dak/init_db.py [changed mode: 0755->0644]
dak/init_dirs.py [changed mode: 0755->0644]
dak/ls.py [changed mode: 0755->0644]
dak/make_maintainers.py [changed mode: 0755->0644]
dak/make_overrides.py [changed mode: 0755->0644]
dak/make_suite_file_list.py [changed mode: 0755->0644]
dak/mirror_split.py [changed mode: 0755->0644]
dak/new_security_install.py [changed mode: 0755->0644]
dak/override.py [changed mode: 0755->0644]
dak/poolize.py [changed mode: 0755->0644]
dak/process_accepted.py [changed mode: 0755->0644]
dak/process_new.py [changed mode: 0755->0644]
dak/process_unchecked.py [changed mode: 0755->0644]
dak/queue_report.py [changed mode: 0755->0644]
dak/reject_proposed_updates.py [changed mode: 0755->0644]
dak/rm.py [changed mode: 0755->0644]
dak/security_install.py [changed mode: 0755->0644]
dak/show_new.py [changed mode: 0755->0644]
dak/split_done.py [changed mode: 0755->0644]
dak/stats.py [changed mode: 0755->0644]
dak/symlink_dists.py [changed mode: 0755->0644]
dak/test/001/test.py [changed mode: 0755->0644]
dak/test/002/test.py [changed mode: 0755->0644]
dak/transitions.py [changed mode: 0755->0644]
daklib/extensions.py
daklib/logging.py
daklib/queue.py [changed mode: 0755->0644]
daklib/utils.py [changed mode: 0755->0644]
scripts/debian/import_testing.sh [new file with mode: 0755]
scripts/debian/insert_missing_changedby.py [changed mode: 0755->0644]
setup/init_pool.sql

index b543449aae8c8f1953bd2643e9cb1a2d3ab7555b..fc6e9f88276864682abe594ba6d03c265dd55091 100644 (file)
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,111 @@
+2008-05-03  Thomas Viehmann  <tv@beamnet.de>
+
+       * dak/process_unchecked.py (check_hashes): Reject on error while
+       parsing Checksums-*.
+
+2008-05-02  Joerg Jaspert  <joerg@debian.org>
+
+       * config/debian/pseudo-packages*: Removed listarchives, closes #468667
+       added wiki.debian.org (currently pointing to debian-www), closes #194206
+       added security-tracker, closes #436152
+       added release.debian.org, closes #451005
+
+       * config/debian/cron.buildd: Cleanup unreachable code. (If someone
+       still wants it - its in revision control...)
+
+       * config/debian/cron.daily: Modify call to update-ftpstats to only
+       use files log/2*, instead of log/*, so it ignores the new cron/
+       subdir. Makes sure it can't get confused, and is also safe for
+       nearly thousand years. If this code is really in use in year 3000,
+       im sure people can adjust it! :)
+
+       * config/debian/vars: Add logdir as a place for cronjob log output
+
+       * config/debian/cron.daily: Use a logfile and be more verbose of
+       whats going on.
+         Also moved the commented VACUUM ; VACUUM ANALYZE calls over to
+       cron.weekly, ...
+       * config/debian/cron.weekly: likewise,
+         ... and activate those calls again. Once a week, as an
+       additional safety bet to be sure the stuff does get run is ok,
+       even if we have autovacuum by default.
+
+2008-05-02  Thomas Viehmann  <tv@beamnet.de>
+
+       * dak/process_unchecked.py (check_hashes): fix typo in
+         checksum reject message.
+
+2008-05-02  Joerg Jaspert  <joerg@debian.org>
+
+       * config/debian/extensions.py: used reindent.py from the python
+       2.x source examples to get all dak code use the same indentation
+       style, no longer a mix of 4 spaces / 1 tab.
+       * dak/check_archive.py: likewise
+       * dak/check_overrides.py: likewise
+       * dak/check_proposed_updates.py: likewise
+       * dak/clean_proposed_updates.py: likewise
+       * dak/clean_queues.py: likewise
+       * dak/clean_suites.py: likewise
+       * dak/compare_suites.py: likewise
+       * dak/control_overrides.py: likewise
+       * dak/control_suite.py: likewise
+       * dak/cruft_report.py: likewise
+       * dak/dak.py: likewise
+       * dak/decode_dot_dak.py: likewise
+       * dak/examine_package.py: likewise
+       * dak/find_null_maintainers.py: likewise
+       * dak/generate_index_diffs.py: likewise
+       * dak/generate_releases.py: likewise
+       * dak/import_archive.py: likewise
+       * dak/import_keyring.py: likewise
+       * dak/import_ldap_fingerprints.py: likewise
+       * dak/import_users_from_passwd.py: likewise
+       * dak/init_db.py: likewise
+       * dak/init_dirs.py: likewise
+       * dak/ls.py: likewise
+       * dak/make_maintainers.py: likewise
+       * dak/make_overrides.py: likewise
+       * dak/make_suite_file_list.py: likewise
+       * dak/mirror_split.py: likewise
+       * dak/new_security_install.py: likewise
+       * dak/override.py: likewise
+       * dak/poolize.py: likewise
+       * dak/process_accepted.py: likewise
+       * dak/process_new.py: likewise
+       * dak/process_unchecked.py: likewise
+       * dak/queue_report.py: likewise
+       * dak/reject_proposed_updates.py: likewise
+       * dak/rm.py: likewise
+       * dak/security_install.py: likewise
+       * dak/show_new.py: likewise
+       * dak/split_done.py: likewise
+       * dak/stats.py: likewise
+       * dak/symlink_dists.py: likewise
+       * dak/test/001/test.py: likewise
+       * dak/test/002/test.py: likewise
+       * dak/transitions.py: likewise
+       * daklib/extensions.py: likewise
+       * daklib/logging.py: likewise
+       * daklib/queue.py: likewise
+       * daklib/utils.py: likewise
+       * scripts/debian/insert_missing_changedby.py: likewise
+
+       * dak/process_new.py (recheck): Make the path check more robust,
+       so we no longer have to keep process_new seperate trees between
+       security and normal archive.
+
+2008-04-27  Joerg Jaspert  <joerg@debian.org>
+
+       * dak/process_accepted.py (Urgency_Log.__init__): Warn if the
+       configured path does not exist or is not writeable by us. Use a
+       tmp path if so, so we do not lose the urgencies in such cases.
+
+       * config/debian/dak.conf: Changed path for UrgencyLog
+       Same for the ReleaseTransitions file
+
+       * config/debian/cron.daily: Notify qa user on merkel of dinstall
+       start, Remove the britney call
+
 2008-04-26  Joerg Jaspert  <joerg@debian.org>
 
        * dak/process_new.py: Call end() whenever we try to leave by
        * dak/examine_package.py (check_deb): Remove linda call. It
        provides no added benefit to lintian anymore.
 
+2008-01-07  Joerg Jaspert  <joerg@debian.org>
+
+       * dak/examine_package.py (check_deb): Remove linda call. It
+       provides no added benefit to lintian anymore.
+
 2008-01-06  Joerg Jaspert  <joerg@debian.org>
 
        * dak/examine_package.py (do_lintian): lintian now supports html
        coloring, so use it.
        (do_command): Dont escape html chars if param escaped = 1
 
+2008-01-06  Joerg Jaspert  <joerg@debian.org>
+
+       * dak/examine_package.py (do_lintian): lintian now supports html
+       coloring, so use it.
+       (do_command): Dont escape html chars if param escaped = 1
+
+2007-12-31  Anthony Towns  <ajt@debian.org>
+
+       * dak/process_new.py (recheck): pass "" for prefix_str to reject()
+       when processing result of check_dsc_against_db so we don't promote
+       warnings to rejections.
+
 2007-12-31  Anthony Towns  <ajt@debian.org>
 
        * dak/process_new.py (recheck): pass "" for prefix_str to reject()
        * scripts/debian/update-readmenonus: Removed.
 
 
+2007-12-30  Joerg Jaspert  <joerg@debian.org>
+
+       * dak/dak.py (init): add show-new. This is based on a patch
+       submitted by Thomas Viehmann in Bug #408318, but large parts of
+       handling it are rewritten and show-new is done by me.
+
+       * dak/queue_report.py (table_row): Add link to generated html page
+       for NEW package.
+
+       * dak/show_new.py: new file, generates html overview for NEW
+       packages, similar to what we see with examine-package.
+
+       * config/debian/cron.hourly: Add show-new call
+
+       * config/debian/dak.conf: Add HTMLPath for Show-New
+
+       * dak/examine_package.py (print_copyright): ignore stderr when
+       finding copyright file.
+       (main): add html option
+       (html_escape): new function
+       (escape_if_needed): ditto
+       (headline): ditto
+       (colour_output): ditto
+       (print_escaped_text): ditto
+       (print_formatted_text): ditto
+       - use those functions everywhere where we generate output, as they
+       easily know if we want html or not and just DTRT
+       (do_lintian): new function
+       (check_deb): use it
+       (output_deb_info): Use print_escaped_text, not print_formatted_text.
+       Also import daklib.queue, determine_new now lives there
+
+       Also add a variable to see if we want html output. Default is
+       disabled, show_new enables it for its use.
+       Most of html, besides header/footer are in examine_package instead
+       of show_new, as it makes it a whole lot easier to deal with it at
+       the point the info is generated.
+
+
+       * dak/process_new.py (determine_new): Moved out of here.
+       (check_valid): Moved out of here.
+       (get_type): Moved out of here.
+
+       * daklib/queue.py (determine_new): Moved here.
+       (check_valid): Moved here.
+       (get_type): Moved here.
+
+       * dak/init_db.py (do_section): Remove non-US code
+
+       * dak/make_overrides.py (main): ditto
+
+       * dak/process_new.py (determine_new): ditto
+
+       * daklib/queue.py (Upload.in_override_p),
+       (Upload.check_override): ditto
+
+       * daklib/utils.py (extract_component_from_section):,
+       (poolify): ditto
+
+       * dak/import_archive.py (update_section): ditto
+
+       * dak/symlink_dists.py (fix_component_section): ditto
+
+       * scripts/debian/mkmaintainers: ditto
+
+       * scripts/debian/update-mirrorlists (masterlist): ditto
+
+       * config/debian-non-US/*: Remove subdir
+
+       * scripts/debian/update-readmenonus: Removed.
+
+
+2007-12-28  Anthony Towns  <ajt@debian.org>
+
+       * daklib/utils.py (check_signature): add NOTATION_DATA and
+       NOTATION_NAME to known keywords.
+
+       * daklib/queue.py (Upload.check_source_against_db):
+
+       * dak/make_suite_file_list.py: add -f/--force option.
+
+       * dak/generate_releases.py: add -a/--apt-conf=FILE and
+       -f/--force-touch options.  Pull version info from the database.
+       Make suite description optional.
+
+       * config/debian/dak.conf: update
+       Reject-Proposed-Updates::MoreInfoURL.  Comment out
+       Suite::Stable::Version and ::Description.
+
+       * config/debian/apt.conf: Add hurd-i386 to unstable
+       debian-installer stanza.
+
+2007-12-28  Joerg Jaspert  <joerg@debian.org>
+
+       * KEYEXPIRED is actually a known keyword. We do check it earlier
+       on and reject in case the sig is bad (or unknown)
+
 2007-12-28  Anthony Towns  <ajt@debian.org>
 
        * daklib/utils.py (check_signature): add NOTATION_DATA and
        NOTATION_NAME to known keywords.
 
-       * daklib/queue.py (Upload.check_source_against_db): 
+       * daklib/queue.py (Upload.check_source_against_db):
 
        * dak/make_suite_file_list.py: add -f/--force option.
 
        process data.tar.bz2 (or whatever format it will be in the
        future).
 
+2007-12-24  Joerg Jaspert  <joerg@debian.org>
+
+       * Also run lintian on the .dsc file to check the source itself.
+
+       * Fix the direct usage of ar | tar etc to get the copyright file
+       and use dpkg-deb, which is made for this and makes us able to
+       process data.tar.bz2 (or whatever format it will be in the
+       future).
+
+2007-12-21  Joerg Jaspert  <joerg@debian.org>
+
+       * Remove the (now useless) check for a pre-depends on dpkg for
+         binaries that contain bzip2 compressed data tarballs.
+
 2007-12-21  Joerg Jaspert  <joerg@debian.org>
 
        * Remove the (now useless) check for a pre-depends on dpkg for
        holding queues, don't worry if dak has its own reasons for
        rejecting the package as well as the SRMs.
 
+2007-08-28  Anthony Towns  <ajt@debian.org>
+
+       * process_unchecked.py: Add support for automatic BYHAND
+       processing.
+       * config/debian/dak.conf, scripts/debian/byhand-tag: Automatic
+       processing of tag-overrides.
+       * examine_package.py: Summarise duplicate copyright file entries
+       (same md5sum) with a reference to the previous instance, rather
+       than repeating them.
+       * process_new.py: When rejecting from the p-u-new or o-p-u-new
+       holding queues, don't worry if dak has its own reasons for
+       rejecting the package as well as the SRMs.
+
+2007-06-19  Anthony Towns  <ajt@debian.org>
+
+       * Add nm.debian.org pseudopackage
+
 2007-06-19  Anthony Towns  <ajt@debian.org>
 
        * Add nm.debian.org pseudopackage
 
        * config/debian/dak.conf: typo fix for Dinstall::GPGKeyring,
        drop upload limitations, add release postgres user
-       
+
+       * dak/process_new.py: support for automatically accepting and rejecting
+       packages from proposed-updates holding queues via COMMENTS directory
+       * cron.daily: automatically process COMMENTS-based approvals
+       and rejections for proposed-updates holding queues
+
+       * dak/process_unchecked.py: add support for oldproposedupdates
+       holding queue
+
+       * dak/control_suite.py: allow control-suite to work with etch-m68k
+
+       * dak/generate_releases.py: unlink old Release files before updating
+       them if nlinks > 1 (ie, if two files used to be the same, maybe they
+       shouldn't be when generate-releases is run)
+
+       * dak/generate_releases.py: add a couple of commented lines to make
+       it easier to deal with point releases
+
+       * dak/make_overrides.py: generate overrides for !contrib udebs
+
+       * docs/README.stable-point-release: update docs for doing a
+       point release
+
+2007-06-18  Anthony Towns  <ajt@debian.org>
+
+       * daklib/logging.py: Set umask to not exclude group-writability
+       so we don't get reminded at the start of each month. Thanks to
+       Random J.
+       * dak/override.py: More changes from Herr von Wifflepuck: warn
+       if section of source is different to binary section; restore
+       functionality on source-only overrides; croak if trying to set
+       priority of a source override; never set priority of source
+       overrides; correct typo in logging (s/priority/section/ at
+       one place)
+
+       * config/debian/apt.conf.oldstable: Added for oldstable point releases.
+       * config/debian/cron.daily: auotmatically accept/reject
+       oldstable-proposed-updates based on COMMENTS directory
+
+2007-06-18  Anthony Towns  <ajt@debian.org>
+
+       * config/debian/apt.conf, config/debian/apt.conf.stable,
+       config/debian/dak.conf: update for 4.0r0 (etch), and 3.1r6
+       (sarge), support for oldstable-proposed-updates, dropping m68k
+       from etch, creating etch-m68k suite, creating lenny.
+
+       * config/debian/vars: update for lenny
+
+       * config/debian/dak.conf: typo fix for Dinstall::GPGKeyring,
+       drop upload limitations, add release postgres user
+
        * dak/process_new.py: support for automatically accepting and rejecting
        packages from proposed-updates holding queues via COMMENTS directory
        * cron.daily: automatically process COMMENTS-based approvals
        * config/debian/cron.unchecked: push version info to debbugs using
        ssh-move.
 
+2007-03-05  Anthony Towns  <ajt@debian.org>
+
+       * config/debian/dak.conf: update for 3.1r5.
+       * scripts/debian/ssh-move: add ssh-move script from debbugs
+       * config/debian/cron.unchecked: push version info to debbugs using
+       ssh-move.
+
+2007-02-14  James Troup  <troup@ries.debian.org>
+
+       * docs/README.config: remove Dinstall::GroupOverrideFilename.
+       * config/debian/dak.conf: likewise.
+       * config/debian-non-US/dak.conf: likewise.
+       * config/debian-security/dak.conf: likewise.
+
+       * daklib/queue.py (Upload.close_bugs): no longer handle NMUs or
+       experimental differently, just close the bugs and let version
+       tracking sort it out.
+        (nmu_p): remove entire class - now unused.
+        (Upload.__init__): don't use nmu_p.
+
 2007-02-14  James Troup  <troup@ries.debian.org>
 
        * docs/README.config: remove Dinstall::GroupOverrideFilename.
        * config/debian/dak.conf: likewise.
        * config/debian/vars: likewise.
        * scripts/debian/mkfilesindices: likewise.
-       
+
 2007-02-08  James Troup  <james@nocrew.org>
 
        * dak/process_unchecked.py (check_signed_by_key): new function to
        to configure per suite/component/architecture binary upload
        restrictions.
 
+2007-02-08  Anthony Towns  <ajt@debian.org>
+
+       * config/debian/dak.conf: update for 3.1r4.  Use new 'etch'
+       signing key.  Drop maximum index diffs down to 14.
+
+       * config/debian/apt.conf: add udeb support for non-free (testing,
+       unstable) and experimental.
+       * config/debian/dak.conf: likewise.
+
+       * dak/generate_releases.py (main): handle udebs in any component.
+
+       * daklib/queue.py (Upload.build_summaries): handle files without a
+       'type' gracefully.
+
+       * dak/generate_releases.py (print_sha256_files): new function.
+       (main): use it.
+
+       * dak/process_accepted.py (stable_install): fix name of template
+       mail.
+
+       * dak/process_unchecked.py (is_stableupdate): fix invocation of
+       database.get_suite_id().
+
+       * templates/process-new.bxa_notification: Update on request
+       of/after discussion with BIS staff.
+
+       * scripts/debian/mkfilesindices: also handle proposed-updates.
+
+2007-02-08  Ryan Murray  <rmurray@debian.org>
+
+       * config/debian/cron.monthly: use $ftpgroup instead of hardcoding
+       group name for chgrp of mail archives.
+
+       * daklib/queue.py (Upload.check_dsc_against_db): handle multiple
+       orig.tar.gz's by picking the first one by file id.
+
+       * dak/override.py (main): limit to binary overrides only for now.
+       (usage): update to match.
+
+       * config/debian/cron.daily: track when we have the accepted lock
+       and clean it up on exit if we have it.  Take/check the
+       cron.unchecked lock just before traping to cleanup on exit.
+       Remove potato override handling.  Remove any dangling symlinks in
+       /srv/incoming.d.o/buildd.  Clean up apt-ftparchive's databases.
+
+       * config/debian/apt.conf: change default compression scheme for
+       both Sources and Packages to gzip and bzip2 rather than
+       uncompressed and gzip (Packages) and gzip (Sources).  Use old
+       defaults for proposed-updates.
+
+       * dak/control_overrides.py (main): refuse to operate on
+       untouchable suites.
+
+       * config/debian/pseudo-packages.maintainers: drop install,
+       installation, boot-floppy, slink-cd, potato-cd and
+       nonus.debian.org.  Update base.
+       * config/debian/pseudo-packages.description: likewise.
+
+       * daklib/utils.py (re_srchasver): new regex.
+       (parse_changes): use regex to split 'Source (Version)' style
+       Source fields into 'source' and 'source-version'.
+
+       * config/debian/cron.daily: use $base instead of hardcoding path
+       name.
+
+       * scripts/debian/mkfilesindices: source 'vars' file and use it's
+       variables instead of hardcoding path names.
+
+       * config/debian/apt.conf: switch from /org to /srv.
+       * config/debian/apt.conf.buildd: likewise.
+       * config/debian/apt.conf.stable: likewise.
+       * config/debian/cron.daily: likewise.
+       * config/debian/cron.hourly: likewise.
+       * config/debian/cron.monthly: likewise.
+       * config/debian/cron.unchecked: likewise.
+       * config/debian/cron.weekly: likewise.
+       * config/debian/dak.conf: likewise.
+       * config/debian/vars: likewise.
+       * scripts/debian/mkfilesindices: likewise.
+
+2007-02-08  James Troup  <james@nocrew.org>
+
+       * dak/process_unchecked.py (check_signed_by_key): new function to
+       ensure .changes files are signed by an authorized uploader.
+       (process_it): use it.
+
+       * config/debian/dak.conf (Binary-Upload-Restrictions): new stanza
+       to configure per suite/component/architecture binary upload
+       restrictions.
+
+2006-10-09  James Troup  <james.troup@canonical.com>
+
+       * dak/process_unchecked.py (check_timestamps): change match to
+       search as recent versions of python-apt prefix the string with 'E: '.
+
 2006-10-09  James Troup  <james.troup@canonical.com>
 
        * dak/process_unchecked.py (check_timestamps): change match to
        from Source: field in changes file, and ensure what is left is a valid
        package name.
 
+2006-06-26  Ryan Murray  <rmurray@debian.org>
+
+       * dak/process_unchecked.py (check_files): strip optional source version
+       from Source: field in changes file, and ensure what is left is a valid
+       package name.
+
+2006-06-23  Ryan Murray  <rmurray@debian.org>
+
+       * dak/process_unchecked.py (check_files): also check ProposedUpdates
+       queue for source.
+
 2006-06-23  Ryan Murray  <rmurray@debian.org>
 
        * dak/process_unchecked.py (check_files): also check ProposedUpdates
        * dak/config/debian-security/apt.conf: set Packages::Compress to gzip
        and bzip2 for etch.
 
+2006-06-18  Ryan Murray  <rmurray@debian.org>
+
+       * dak/scripts/debian/update-ftpstats: look for dak named processes in
+       the log, too.
+
+       * dak/process_unchecked.py (check_files): only check embargoed and
+       unembargoed queues if the keys are set.
+
+       * dak/config/debian-security/apt.conf: set Packages::Compress to gzip
+       and bzip2 for etch.
+
 2006-06-16  James Troup  <james@nocrew.org>
 
        * dak/dak.py (init): add new-security-install.
        that signed a given file from a keyserver.
        (check_signature): add 'autofetch' argument that if not set
        defaults to the value of Dinstall::KeyAutoFetch (if that exists).
-       If 'autofetch' is true, invoke retrieve_key().  
+       If 'autofetch' is true, invoke retrieve_key().
 
        * docs/README.config: document Dinstall::KeyAutoFetch and
        Dinstall:KeyServer.
        * apt.conf.stable: update for sarge's release
        * apt.conf: bump daily max Contents change to 25MB from 12MB
 
-       * cron.daily: add accepted lock and invoke cindy  
+       * cron.daily: add accepted lock and invoke cindy
        * cron.daily: add daily.lock
        * cron.daily: invoke tiffani
        * cron.daily: rebuild accepted buildd stuff
 
        * amber (do_upload): Sort changes files in "katie" order so that
          source always arrives before binary-only rebuilds
-       
+
 2004-10-05  James Troup  <james@nocrew.org>
 
        * jennifer (check_dsc): correct reject message on invalid
        valid_dsc_p and don't run check_source() if check_dsc() failed.
        (check_dsc): on fatal failures return 0 so check_source() isn't
        run (since it makes fatal assumptions about the presence of
-       mandatory .dsc fields).  
+       mandatory .dsc fields).
        Remove unused and obsolete re_bad_diff and re_is_changes regexps.
 
 2004-05-07  James Troup  <james@nocrew.org>
index 41b10ef11e5b0ccd2aec7bd15ba15c7643086c1a..7d34e11d016738ae938209dcde11cc73a5fccda6 100644 (file)
@@ -46,7 +46,7 @@ tree "dists/testing/updates"
    FileList "/org/security.debian.org/dak-database/dists/testing_updates/$(SECTION)_binary-$(ARCH).list";
    SourceFileList "/org/security.debian.org/dak-database/dists/testing_updates/$(SECTION)_source.list";
    Sections "main contrib non-free";
-   Architectures "alpha amd64 arm hppa i386 ia64 mips mipsel powerpc s390 sparc source";
+   Architectures "alpha amd64 arm armel hppa i386 ia64 mips mipsel powerpc s390 sparc source";
    BinOverride "override.lenny.$(SECTION)";
    ExtraOverride "override.lenny.extra.$(SECTION)";
    SrcOverride "override.lenny.$(SECTION).src";
index 96607e4850c48a216b03b4acc7cb4959e49a7c0c..7e75bcbbfaae65333047869dae7fcfebe4750b83 100755 (executable)
@@ -4,7 +4,7 @@
 
 ARCHS_oldstable="alpha arm hppa i386 ia64 m68k mips mipsel powerpc sparc s390 amd64"
 ARCHS_stable="alpha amd64 arm hppa i386 ia64 mips mipsel powerpc sparc s390"
-ARCHS_testing="$ARCHS_stable"
+ARCHS_testing="alpha amd64 armel hppa i386 ia64 mips mipsel powerpc sparc s390"
 DISTS="oldstable stable testing"
 SSH_SOCKET=~/.ssh/buildd.debian.org.socket
 
old mode 100644 (file)
new mode 100755 (executable)
index dbc34b6..d8d2bd1
@@ -11,7 +11,7 @@ export SCRIPTVARS=/org/security.debian.org/dak/config/debian-security/vars
 # Fix overrides
 
 # disabled by ajt 2008-01-01: requires auth
-#rsync -ql ftp-master::indices/override\* $overridedir
+rsync --password-file /srv/non-us.debian.org/s3kr1t/rsync-password -ql security-master@ftp-master::indices/override\* $overridedir
 
 cd $overridedir
 find . -name override\*.gz -type f -maxdepth 1 -mindepth 1 | xargs gunzip -f
index fb219e5e52f0d46acce7a0e1a50d3731e56da675..bc978ee20a413e4f638955203f4c91bb48ce760b 100644 (file)
@@ -196,6 +196,7 @@ Suite
          amd64; 
          alpha; 
          arm;
+         armel;
          hppa;
          i386;
          ia64;
@@ -251,6 +252,7 @@ Dir
     Reject "/org/security.debian.org/queue/reject/";
     Unchecked "/org/security.debian.org/queue/unchecked/";
     ProposedUpdates "/does/not/exist/"; // XXX fixme
+    OldProposedUpdates "/does/not/exist/"; // XXX fixme
 
     Embargoed "/org/security.debian.org/queue/embargoed/";
     Unembargoed "/org/security.debian.org/queue/unembargoed/";
@@ -274,6 +276,7 @@ Architectures
   alpha "DEC Alpha";
   hppa "HP PA RISC";
   arm "ARM";
+  armel "ARM EABI";
   i386 "Intel ia32";
   ia64 "Intel ia64";
   m68k "Motorola Mc680x0";
index 5e81e765c8fe7312857004808b7af696ffbe550c..1bc4d68069fb27bcfa35dc56ab7f77fcd3b67596 100755 (executable)
@@ -4,32 +4,3 @@
 #
 ssh buildd@buildd /org/wanna-build/trigger.often
 exit 0
-
-cleanup() {
-       rm -f "$LOCKFILE"
-       kill -TERM $SSH_PID
-}
-
-ARCHS="alpha arm hppa i386 ia64 m68k mips mipsel powerpc sparc s390"
-
-set -e
-export SCRIPTVARS=/org/ftp.debian.org/dak/config/debian/vars
-. $SCRIPTVARS
-
-LOCKFILE="/org/wanna-build/tmp/DB_Maintenance_In_Progress"
-
-if [ ! -e "$ftpdir/Archive_Maintenance_In_Progress" ]; then
-       if lockfile -r3 $LOCKFILE; then
-               trap cleanup 0
-               cd /org/incoming.debian.org/buildd
-               cp /org/wanna-build/tmp/Sources.unstable-old Sources
-               gzip -cd Sources.gz >> Sources
-               for a in $ARCHS; do
-                       cp /org/wanna-build/tmp/Packages.unstable.$a-old Packages
-                       gzip -cd /org/incoming.debian.org/buildd/Packages.gz >> Packages
-                       quinn-diff -i -a /org/buildd.debian.org/web/quinn-diff/Packages-arch-specific -A $a 2>/dev/null | perl -pi -e 's#^(non-free)/.*$##msg' | wanna-build -b $a/build-db --merge-partial-quinn 2> /dev/null
-                       wanna-build -A $a -b $a/build-db --merge-packages Packages 2>/dev/null
-               done
-               rm -f Sources Packages
-       fi
-fi
index f40b6d81a1bac9a3e32760cefb9a45655e72c1f3..09d6a6b36b00df088a80bcaf2f9790bd86d10ed6 100755 (executable)
@@ -8,6 +8,11 @@ export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
 
 ################################################################################
 
+# Start logging
+NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+LOGFILE="$logdir/daily_${NOW}.log"
+exec > "$LOGFILE" 2>&1
+
 echo Archive maintenance started at $(date +%X)
 TS=0
 
@@ -22,6 +27,8 @@ cleanup() {
   if [ "$lockac" -eq "1" ]; then
     rm -f "$LOCKAC"
   fi
+  echo "Cleanup"
+  rm -f "$LOGFILE"
 }
 lockfile -l 3600 $LOCKCU
 trap cleanup 0
@@ -35,6 +42,10 @@ ending at about 15:30.  This file is then removed.
 You should not mirror the archive during this period.
 EOF
 
+# Push merkels qa user, so the qa pages can show "dinstall is running" information
+echo "Telling merkels QA user that we start dinstall"
+ssh -2 -i ~dak/.ssh/push_merkel_qa  -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 qa@merkel.debian.org sleep 1
+
 ################################################################################
 
 echo "Creating pre-daily-cron-job backup of projectb database..."
@@ -43,6 +54,7 @@ pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
 ################################################################################
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
+echo "Updating Bugs docu, Mirror list and mailing-lists.txt"
 cd $configdir
 $scriptsdir/update-bugdoctxt
 $scriptsdir/update-mirrorlists
@@ -51,12 +63,14 @@ $scriptsdir/update-mailingliststxt
 ################################################################################
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
+echo "Doing automated p-u-new processing"
 cd $queuedir/p-u-new
 date -u -R >> REPORT
 dak process-new -a -C COMMENTS >> REPORT
 echo >> REPORT
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
+echo "Doing automated o-p-u-new processing"
 cd $queuedir/o-p-u-new
 date -u -R >> REPORT
 dak process-new -a -C COMMENTS >> REPORT
@@ -67,6 +81,7 @@ echo >> REPORT
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 lockfile $LOCKAC
 lockac=1
+echo "Processing queue/accepted"
 cd $accepted
 rm -f REPORT
 dak process-accepted -pa *.changes | tee REPORT | \
@@ -75,31 +90,30 @@ chgrp debadmin REPORT
 chmod 664 REPORT
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
-
+echo "Checking for cruft in overrides"
 dak check-overrides
 rm -f $LOCKAC
 lockac=0
 
+echo "Fixing symlinks in $ftpdir"
 symlinks -d -r $ftpdir
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
+echo "Generating suite file lists for apt-ftparchive"
 dak make-suite-file-list
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
+echo "Updating fingerprints"
 # Update fingerprints
 dak import-keyring -L /srv/keyring.debian.org/keyrings/debian-keyring.gpg
 
 # Generate override files
+echo "Writing overrides into text files"
 cd $overridedir
 dak make-overrides
 
-# Update task overrides for testing and unstable
-# [JT 2004-02-04 disabled; copying in by hand for now]
-#cat $extoverridedir/task | perl -ne 'print if /^\S+\sTask\s\S+(,\s*\S+)*$/;' > override.sarge.extra.main
-#cat $extoverridedir/task | perl -ne 'print if /^\S+\sTask\s\S+(,\s*\S+)*$/;' > override.sid.extra.main
-
 # FIXME
 rm -f override.sid.all3
 for i in main contrib non-free main.debian-installer; do cat override.sid.$i >> override.sid.all3; done
@@ -107,16 +121,20 @@ for i in main contrib non-free main.debian-installer; do cat override.sid.$i >>
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Generate Packages and Sources files
+echo "Generating Packages and Sources files"
 cd $configdir
 apt-ftparchive generate apt.conf
 # Generate *.diff/ incremental updates
+echo "Generating pdiff files"
 dak generate-index-diffs
 # Generate Release files
+echo "Generating Release files"
 dak generate-releases
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Clean out old packages
+echo "Cleanup old packages/files"
 dak clean-suites
 dak clean-queues
 
@@ -124,12 +142,14 @@ TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Needs to be rebuilt, as files have moved.  Due to unaccepts, we need to
 # update this before wanna-build is updated.
+echo "Regenerating wanna-build/buildd information"
 psql projectb -A -t -q -c "SELECT filename FROM queue_build WHERE suite = 5 AND queue = 0 AND in_queue = true AND filename ~ 'd(sc|eb)$'" > $dbdir/dists/unstable_accepted.list
 symlinks -d /srv/incoming.debian.org/buildd > /dev/null
 apt-ftparchive generate apt.conf.buildd
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
+echo "Running various scripts from $scriptsdir"
 cd $scriptsdir
 ./mkmaintainers
 ./copyoverrides
@@ -137,9 +157,8 @@ cd $scriptsdir
 ./mkfilesindices
 ./mkchecksums
 #
-# Fetch bugs information before unchecked processing is allowed again.
-$base/testing/britney allowdaklock bugs || true
 rm -f $NOTICE
+echo "Trigger daily wanna-build run"
 ssh buildd@buildd /org/wanna-build/trigger.daily
 
 rm -f $LOCKCU
@@ -156,13 +175,6 @@ pg_dump projectb > $POSTDUMP
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
-# Vacuum the database
-# (JJ, 20-04-2008) disabled, as we have autovacuum set to on in postgres.
-# refer to http://www.postgresql.org/docs/current/static/routine-vacuuming.html#AUTOVACUUM
-# which says "Beginning in PostgreSQL 8.1, there is an optional feature called autovacuum,
-# whose purpose is to automate the execution of VACUUM and ANALYZE  commands."
-# echo "VACUUM; VACUUM ANALYZE;" | psql projectb 2>&1 | grep -v "^NOTICE:  Skipping.*only table owner can VACUUM it$"
-
 echo "Expiring old database dumps..."
 (cd $base/backup; $scriptsdir/expire_dumps -d . -p -f "dump_*")
 
@@ -171,36 +183,38 @@ echo "Expiring old database dumps..."
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Send a report on NEW/BYHAND packages
+echo "Nagging ftpteam about NEW/BYHAND packages"
 dak queue-report | mail -e -s "NEW and BYHAND on $(date +%D)" ftpmaster@ftp-master.debian.org
 # and one on crufty packages
+echo "Sending information about crufty packages"
 dak cruft-report > $webdir/cruft-report-daily.txt
 dak cruft-report -s experimental >> $webdir/cruft-report-daily.txt
 cat $webdir/cruft-report-daily.txt | mail -e -s "Debian archive cruft report for $(date +%D)" ftpmaster@ftp-master.debian.org
 
+echo "Updating DM html page"
 $scriptsdir/dm-monitor >$webdir/dm-uploaders.html
 
 ################################################################################
 
 # Push katie@merkel so it syncs the projectb there. Returns immediately, the sync runs detached
+echo "Trigger merkels projectb sync"
 ssh -2 -i ~/.ssh/push_merkel_projectb katie@merkel.debian.org sleep 1
 
-# Run mirror-split
-
-#time dak mirror-split
-
 ################################################################################
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 ulimit -m 90000 -d 90000 -s 10000 -v 200000
 
+echo "Using run-parts to run scripts in $base/scripts/distmnt"
 run-parts --report $base/scripts/distmnt
 
-echo Daily cron scripts successful.
+echo "Daily cron scripts successful."
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Stats pr0n
+echo "Updating stats data"
 cd $configdir
 $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
 R --slave --vanilla < $base/misc/ftpstats.R
@@ -208,14 +222,14 @@ R --slave --vanilla < $base/misc/ftpstats.R
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Clean up apt-ftparchive's databases
-
+echo "Clean up apt-ftparchive's databases"
 cd $configdir
 apt-ftparchive -q clean apt.conf
 
 TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
 
 # Compress psql backups older than a week, but no more than 20 of them
-
+echo "Compress old psql backups"
 (cd $base/backup/
  find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +7 | 
    sort | head -n20 | while read dumpname; do
@@ -224,4 +238,10 @@ TS=$(($TS+1)); echo Archive maintenance timestamp $TS: $(date +%X)
    done
 )
 
+echo "Finally, all is done, sending mail and compressing logfile"
+exec > /dev/null 2>&1
+
+cat "$LOGFILE" | mail -s "Log for cron.daily run of $(date +%Y.%m.%d)" cron@ftp-master.debian.org
+bzip2 -9 "$LOGFILE"
+
 ################################################################################
index f7fa9c044432013afe4447ab552a063c6643c455..99d16359365f196ce82c3325b3f46bbd8d987425 100755 (executable)
@@ -7,22 +7,43 @@ set -u
 export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
 . $SCRIPTVARS
 
+# Start logging
+NOW=`date "+%Y.%m.%d-%H:%M:%S"`
+LOGFILE="$logdir/weekly_${NOW}.log"
+exec > "$LOGFILE" 2>&1
+
+cleanup() {
+  echo "Cleanup"
+  rm -f "$LOGFILE"
+}
+trap cleanup 0
+
 ################################################################################
 
 # Purge empty directories
+echo "Purging empty directories in $ftpdir/pool/"
 
 if [ ! -z "$(find $ftpdir/pool/ -type d -empty)" ]; then
    find $ftpdir/pool/ -type d -empty | xargs rmdir;
 fi
 
-# Clean up apt-ftparchive's databases
-
 # Split queue/done
+echo "Splitting queue/done"
 dak split-done > /dev/null
 
+# Vacuum the database
+echo "VACUUM; VACUUM ANALYZE;" | psql --no-psqlrc projectb 2>&1 | grep -v "^NOTICE:  Skipping.*only table owner can VACUUM it$"
+
+# Clean up apt-ftparchive's databases
 cd $configdir
+echo "Cleanup apt-ftparchive's database"
 apt-ftparchive -q clean apt.conf
 apt-ftparchive -q clean apt.conf.buildd
 
+echo "Finally, all is done, compressing logfile"
+exec > /dev/null 2>&1
+
+bzip2 -9 "$LOGFILE"
+
 
 ################################################################################
index d5a7df219e7626abca60af5ea247d4eb75a8f732..4181fb01dae391cd09dd7d5f73b0725f2fcf1f79 100644 (file)
@@ -34,7 +34,7 @@ Dinstall
    Reject
    {
      NoSourceOnly "true";
-     ReleaseTransitions "/srv/ftp.debian.org/testing/hints/transitions.yaml";
+     ReleaseTransitions "/srv/ftp.debian.org/web/transitions.yaml";
    };
 };
 
@@ -68,6 +68,31 @@ Binary-Upload-Restrictions
   };
 };
 
+Binary-Upload-Restrictions
+{
+ Components
+ {
+   //main;
+   //contrib;
+   // Yay for consensus through GRs voted on by people not actually involved in the affected architectures
+   none;
+ };
+ unstable
+ {
+   arm
+   {
+     9BF093BC475BABF8B6AEA5F6D7C3F131AB2A91F5;
+     70BC7F9D8C60D2265B7076A23760DBCFFD6645AB;
+     F849E2025D1C194DE62BC6C829BE5D2268FD549F;
+   };
+   alpha 
+   {
+     9BF093BC475BABF8B6AEA5F6D7C3F131AB2A91F5;
+     70BC7F9D8C60D2265B7076A23760DBCFFD6645AB;
+   };   
+  };
+};
+
 Generate-Index-Diffs
 {
    Options
@@ -682,7 +707,7 @@ Dir
   MorgueReject "reject";
   Override "/srv/ftp.debian.org/scripts/override/";
   QueueBuild "/srv/incoming.debian.org/buildd/";
-  UrgencyLog "/srv/ftp.debian.org/testing/urgencies/";
+  UrgencyLog "/srv/release.debian.org/britney/input/urgencies/";
   Queue
   {
     Accepted "/srv/ftp.debian.org/queue/accepted/";
index e17e9af88b518983af39012202da5f830de88f7d..1e7ea43321dca1816a4a20fc8f6d009ec69e2c15 100644 (file)
@@ -20,12 +20,12 @@ def check_transition():
     if "source" not in changes["architecture"] or "unstable" not in changes["distribution"]:
         return
 
-    # Also only check if there is a file defined (and existant) with 
+    # Also only check if there is a file defined (and existant) with
     # checks.
     transpath = Cnf.get("Dinstall::Reject::ReleaseTransitions", "")
     if transpath == "" or not os.path.exists(transpath):
         return
-    
+
     # Parse the yaml file
     sourcefile = file(transpath, 'r')
     sourcecontent = sourcefile.read()
@@ -90,10 +90,10 @@ def check_signed_by_key(oldfn):
         if fpr == "5906F687BD03ACAD0D8E602EFCF37657" or uid == "iwj":
             reject("Upload blocked due to hijack attempt 2008/03/19")
 
-           # NB: 1.15.0, 1.15.2 signed by this key targetted at unstable
-           #     have been made available in the wild, and should remain
-           #     blocked until Debian's dpkg has revved past those version
-           #     numbers
+            # NB: 1.15.0, 1.15.2 signed by this key targetted at unstable
+            #     have been made available in the wild, and should remain
+            #     blocked until Debian's dpkg has revved past those version
+            #     numbers
 
     oldfn()
 
index fd201c80a9a2b0b487f7833de736de1d77f036f8..ab08f8f45f6b921fa773c47c3b972b0dca5e4bc6 100644 (file)
@@ -5,7 +5,6 @@ press                   Press release issues
 kernel                 Problems with the Linux kernel, or that shipped with Debian
 project                        Problems related to project administration
 general                        General problems (e.g. "many manpages are mode 755")
-listarchives           Problems with the WWW mailing list archives
 nm.debian.org          New Maintainer process and nm.debian.org webpages
 qa.debian.org          The Quality Assurance group
 ftp.debian.org         Problems with the FTP site
@@ -20,3 +19,6 @@ security.debian.org   The Debian Security Team
 installation-reports   Reports of installation problems with stable & testing
 upgrade-reports                Reports of upgrade problems for stable & testing
 release-notes          Problems with the Release Notes
+wiki.debian.org                Problems with the Debian wiki
+security-tracker       The Debian Security Bug Tracker
+release.debian.org     Requests regarding Debian releases and release team tools
index 10ded94fac7fddce2ca534921fe63f93c4a9dce4..37401fec3bdb3c354f67f007f2a42e25090133e2 100644 (file)
@@ -7,7 +7,6 @@ nm.debian.org           New Maintainer Front-Desk <new-maintainer@debian.org>
 qa.debian.org          debian-qa@lists.debian.org
 www.debian.org         Debian WWW Team <debian-www@lists.debian.org>
 mirrors                        Debian Mirrors Team <mirrors@debian.org>
-listarchives           Debian List Archive Team <listarchives@debian.org>
 project                        debian-project@lists.debian.org
 general                        debian-devel@lists.debian.org
 kernel                 Debian Kernel Team <debian-kernel@lists.debian.org>
@@ -20,3 +19,6 @@ security.debian.org     Debian Security Team <team@security.debian.org>
 installation-reports    Debian Install Team <debian-boot@lists.debian.org>
 upgrade-reports         Debian Testing Group <debian-testing@lists.debian.org>
 release-notes           Debian Documentation Team <debian-doc@lists.debian.org>
+wiki.debian.org         Debian WWW Team <debian-www@lists.debian.org>
+security-tracker        Debian Security Tracker Team <debian-security-tracker@lists.debian.org>
+release.debian.org      Debian Release Team <debian-release@lists.debian.org>
index b88a83d2f10b598a8d3a9f52ba88081bed6a0574..3f993fadda259afc24bcac9214a311848fb11b33 100644 (file)
@@ -14,6 +14,7 @@ dbdir=$base/database/
 lockdir=$base/lock/
 overridedir=$scriptdir/override
 extoverridedir=$scriptdir/external-overrides
+logdir=$base/log/cron/
 
 queuedir=$base/queue/
 unchecked=$queuedir/unchecked/
old mode 100755 (executable)
new mode 100644 (file)
index d60d530..ba208dd
@@ -94,14 +94,14 @@ def check_files():
     print "Missing files:"
     db_files.clear()
     for i in ql:
-       filename = os.path.abspath(i[0] + i[1])
+        filename = os.path.abspath(i[0] + i[1])
         db_files[filename] = ""
         if os.access(filename, os.R_OK) == 0:
-           if i[2]:
+            if i[2]:
                 print "(last used: %s) %s" % (i[2], filename)
-           else:
+            else:
                 print "%s" % (filename)
-       
+
 
     filename = Cnf["Dir::Override"]+'override.unreferenced'
     if os.path.exists(filename):
@@ -201,7 +201,7 @@ def check_md5sums():
 
     print "Checking file md5sums & sizes..."
     for i in ql:
-       filename = os.path.abspath(i[0] + i[1])
+        filename = os.path.abspath(i[0] + i[1])
         db_md5sum = i[2]
         db_size = int(i[3])
         try:
@@ -238,7 +238,7 @@ def check_timestamps():
     db_files.clear()
     count = 0
     for i in ql:
-       filename = os.path.abspath(i[0] + i[1])
+        filename = os.path.abspath(i[0] + i[1])
         if os.access(filename, os.R_OK):
             file = daklib.utils.open_file(filename)
             current_file = filename
@@ -375,7 +375,7 @@ def check_files_not_symlinks():
 
 #      q = projectB.query("BEGIN WORK")
     for i in q_files:
-       filename = os.path.normpath(i[0] + i[1])
+        filename = os.path.normpath(i[0] + i[1])
 #        file_id = i[2]
         if os.access(filename, os.R_OK) == 0:
             daklib.utils.warn("%s: doesn't exist." % (filename))
@@ -431,14 +431,14 @@ def main ():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Check-Archive::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Check-Archive::Options::%s" % (i)):
-           Cnf["Check-Archive::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Check-Archive::Options::%s" % (i)):
+            Cnf["Check-Archive::Options::%s" % (i)] = ""
 
     args = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Check-Archive::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     if len(args) < 1:
         daklib.utils.warn("dak check-archive requires at least one argument")
old mode 100755 (executable)
new mode 100644 (file)
index 7ed3814..ecbaa75
@@ -156,7 +156,7 @@ SELECT s.source FROM source s, src_associations sa, files f, location l,
             if not src_packages.has_key(package) or src_packages[package]:
                 continue
             src_packages[package] = 1
-            
+
             Logger.log(["add missing override", osuite, component,
                 type, package, "source", sections[i[2]], i[3]])
             if not Options["No-Action"]:
@@ -327,7 +327,7 @@ def main ():
         suiteids = []
         for i in q.getresult():
             suiteids.append(i[0])
-            
+
         if len(suiteids) != len(suites) or len(suiteids) < 1:
             daklib.utils.fubar("Couldn't find id's of all suites: %s" % suites)
 
@@ -351,4 +351,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 005ebec..529f0a0
 ################################################################################
 
 # | > amd64 is more mature than even some released architectures
-# |  
+# |
 # | This might be true of the architecture, unfortunately it seems to be the
 # | exact opposite for most of the people involved with it.
-# 
+#
 # <1089213290.24029.6.camel@descent.netsplit.com>
 
 ################################################################################
@@ -274,8 +274,8 @@ def main ():
                  ('v',"verbose","Check-Proposed-Updates::Options::Verbose"),
                  ('h',"help","Check-Proposed-Updates::Options::Help")]
     for i in [ "debug", "quiet", "verbose", "help" ]:
-       if not Cnf.has_key("Check-Proposed-Updates::Options::%s" % (i)):
-           Cnf["Check-Proposed-Updates::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Check-Proposed-Updates::Options::%s" % (i)):
+            Cnf["Check-Proposed-Updates::Options::%s" % (i)] = ""
 
     arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Check-Proposed-Updates::Options")
old mode 100755 (executable)
new mode 100644 (file)
index 278dfdf..02032b1
@@ -167,8 +167,8 @@ def main ():
                  ('s', "suite", "Clean-Proposed-Updates::Options::Suite", "HasArg"),
                  ('n', "no-action", "Clean-Proposed-Updates::Options::No-Action"),]
     for i in [ "debug", "verbose", "help", "no-action" ]:
-       if not Cnf.has_key("Clean-Proposed-Updates::Options::%s" % (i)):
-           Cnf["Clean-Proposed-Updates::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Clean-Proposed-Updates::Options::%s" % (i)):
+            Cnf["Clean-Proposed-Updates::Options::%s" % (i)] = ""
 
     # suite defaults to proposed-updates
     if not Cnf.has_key("Clean-Proposed-Updates::Options::Suite"):
@@ -199,4 +199,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index f987257..43f8ffb
@@ -173,10 +173,10 @@ def main ():
     Cnf = daklib.utils.get_conf()
 
     for i in ["Help", "Incoming", "No-Action", "Verbose" ]:
-       if not Cnf.has_key("Clean-Queues::Options::%s" % (i)):
-           Cnf["Clean-Queues::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Clean-Queues::Options::%s" % (i)):
+            Cnf["Clean-Queues::Options::%s" % (i)] = ""
     if not Cnf.has_key("Clean-Queues::Options::Days"):
-       Cnf["Clean-Queues::Options::Days"] = "14"
+        Cnf["Clean-Queues::Options::Days"] = "14"
 
     Arguments = [('h',"help","Clean-Queues::Options::Help"),
                  ('d',"days","Clean-Queues::Options::Days", "IntLevel"),
@@ -188,7 +188,7 @@ def main ():
     Options = Cnf.SubTree("Clean-Queues::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     init()
 
old mode 100755 (executable)
new mode 100644 (file)
index e680f5e..cb7225b
@@ -325,8 +325,8 @@ def main():
 
     Cnf = daklib.utils.get_conf()
     for i in ["Help", "No-Action" ]:
-       if not Cnf.has_key("Clean-Suites::Options::%s" % (i)):
-           Cnf["Clean-Suites::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Clean-Suites::Options::%s" % (i)):
+            Cnf["Clean-Suites::Options::%s" % (i)] = ""
 
     Arguments = [('h',"help","Clean-Suites::Options::Help"),
                  ('n',"no-action","Clean-Suites::Options::No-Action")]
@@ -355,4 +355,3 @@ def main():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index a46bc12..e94d9f5
@@ -47,14 +47,14 @@ def main ():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Compare-Suites::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Compare-Suites::Options::%s" % (i)):
-           Cnf["Compare-Suites::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Compare-Suites::Options::%s" % (i)):
+            Cnf["Compare-Suites::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Compare-Suites::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
     daklib.database.init(Cnf, projectB)
@@ -99,4 +99,3 @@ ORDER BY b_src.package;"""
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 01eee0e..2b1c0e0
 ################################################################################
 
 # On 30 Nov 1998, James Troup wrote:
-# 
+#
 # > James Troup<2> <troup2@debian.org>
-# > 
+# >
 # >    James is a clone of James; he's going to take over the world.
 # >    After he gets some sleep.
-# 
+#
 # Could you clone other things too? Sheep? Llamas? Giant mutant turnips?
-# 
+#
 # Your clone will need some help to take over the world, maybe clone up an
 # army of penguins and threaten to unleash them on the world, forcing
 # governments to sway to the new James' will!
-# 
+#
 # Yes, I can envision a day when James' duplicate decides to take a horrific
 # vengance on the James that spawned him and unleashes his fury in the form
 # of thousands upon thousands of chickens that look just like Captin Blue
 # Eye! Oh the horror.
-# 
+#
 # Now you'll have to were name tags to people can tell you apart, unless of
 # course the new clone is truely evil in which case he should be easy to
 # identify!
-# 
+#
 # Jason
 # Chicken. Black. Helicopters.
 # Be afraid.
@@ -164,7 +164,7 @@ def process_file (file, suite, component, type, action):
             if action == "add" or old_priority_id == priority_id and \
                old_section_id == section_id and \
                ((old_maintainer_override == maintainer_override) or \
-               (old_maintainer_override == "" and maintainer_override == None)):
+                (old_maintainer_override == "" and maintainer_override == None)):
                 # If it's unchanged or we're in 'add only' mode, ignore it
                 c_skipped += 1
                 continue
@@ -251,14 +251,14 @@ def main ():
 
     # Default arguments
     for i in [ "add", "help", "list", "quiet", "set" ]:
-       if not Cnf.has_key("Control-Overrides::Options::%s" % (i)):
-           Cnf["Control-Overrides::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Control-Overrides::Options::%s" % (i)):
+            Cnf["Control-Overrides::Options::%s" % (i)] = ""
     if not Cnf.has_key("Control-Overrides::Options::Component"):
-       Cnf["Control-Overrides::Options::Component"] = "main"
+        Cnf["Control-Overrides::Options::Component"] = "main"
     if not Cnf.has_key("Control-Overrides::Options::Suite"):
-       Cnf["Control-Overrides::Options::Suite"] = "unstable"
+        Cnf["Control-Overrides::Options::Suite"] = "unstable"
     if not Cnf.has_key("Control-Overrides::Options::Type"):
-       Cnf["Control-Overrides::Options::Type"] = "deb"
+        Cnf["Control-Overrides::Options::Type"] = "deb"
 
     file_list = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
 
@@ -283,7 +283,7 @@ def main ():
         list(suite, component, type)
     else:
         if Cnf.has_key("Suite::%s::Untouchable" % suite) and Cnf["Suite::%s::Untouchable" % suite] != 0:
-           daklib.utils.fubar("%s: suite is untouchable" % suite)
+            daklib.utils.fubar("%s: suite is untouchable" % suite)
 
         Logger = daklib.logging.Logger(Cnf, "control-overrides")
         if file_list:
@@ -297,4 +297,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 90b48d7..5291b59
@@ -242,14 +242,14 @@ def main ():
                  ('s',"set", "Control-Suite::Options::Set", "HasArg")]
 
     for i in ["add", "help", "list", "remove", "set", "version" ]:
-       if not Cnf.has_key("Control-Suite::Options::%s" % (i)):
-           Cnf["Control-Suite::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Control-Suite::Options::%s" % (i)):
+            Cnf["Control-Suite::Options::%s" % (i)] = ""
 
     file_list = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Control-Suite::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"],int(Cnf["DB::Port"]))
 
@@ -290,4 +290,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 4969a7c..02a38e0
@@ -133,7 +133,7 @@ SELECT s.source, s.version AS experimental, s2.version AS unstable
         nviu_to_remove = []
         print "Newer version in unstable"
         print "-------------------------"
-        print 
+        print
         for i in ql:
             (source, experimental_version, unstable_version) = i
             print " o %s (%s, %s)" % (source, experimental_version, unstable_version)
@@ -180,7 +180,7 @@ def do_nbs(real_nbs):
 def do_dubious_nbs(dubious_nbs):
     print "Dubious NBS"
     print "-----------"
-    print 
+    print
 
     dubious_nbs_keys = dubious_nbs.keys()
     dubious_nbs_keys.sort()
@@ -196,7 +196,7 @@ def do_dubious_nbs(dubious_nbs):
             packages.sort()
             print "        o %s: %s" % (version, ", ".join(packages))
 
-        print 
+        print
 
 ################################################################################
 
@@ -253,8 +253,8 @@ def main ():
                  ('m',"mode","Cruft-Report::Options::Mode", "HasArg"),
                  ('s',"suite","Cruft-Report::Options::Suite","HasArg")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Cruft-Report::Options::%s" % (i)):
-           Cnf["Cruft-Report::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Cruft-Report::Options::%s" % (i)):
+            Cnf["Cruft-Report::Options::%s" % (i)] = ""
     Cnf["Cruft-Report::Options::Suite"] = Cnf["Dinstall::DefaultSuite"]
 
     if not Cnf.has_key("Cruft-Report::Options::Mode"):
@@ -264,7 +264,7 @@ def main ():
 
     Options = Cnf.SubTree("Cruft-Report::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     # Set up checks based on mode
     if Options["Mode"] == "daily":
@@ -400,7 +400,7 @@ def main ():
                             duplicate_bins[key].append(package)
             packages.close()
             os.unlink(temp_filename)
-    
+
     if "obsolete source" in checks:
         do_obsolete_source(duplicate_bins, bin2source)
 
@@ -440,24 +440,24 @@ def main ():
             binaries = bin_not_built[source].keys()
             binaries.sort()
             print " o %s: %s" % (source, ", ".join(binaries))
-        print 
+        print
 
     if "bms" in checks:
         print "Built from multiple source packages"
         print "-----------------------------------"
-        print 
+        print
         keys = duplicate_bins.keys()
         keys.sort()
         for key in keys:
             (source_a, source_b) = key.split("_")
             print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key]))
-        print 
+        print
 
     if "anais" in checks:
         print "Architecture Not Allowed In Source"
         print "----------------------------------"
         print anais_output
-        print 
+        print
 
     if "dubious nbs" in checks:
         do_dubious_nbs(dubious_nbs)
old mode 100755 (executable)
new mode 100644 (file)
index 0eeb9d7..5a986d5
@@ -36,12 +36,12 @@ import daklib.utils, daklib.extensions
 class UserExtension:
     def __init__(self, user_extension = None):
         if user_extension:
-           m = imp.load_source("dak_userext", user_extension)
-           d = m.__dict__
+            m = imp.load_source("dak_userext", user_extension)
+            d = m.__dict__
         else:
             m, d = None, {}
-       self.__dict__["_module"] = m
-       self.__dict__["_d"] = d
+        self.__dict__["_module"] = m
+        self.__dict__["_d"] = d
 
     def __getattr__(self, a):
         if a in self.__dict__: return self.__dict__[a]
@@ -49,7 +49,27 @@ class UserExtension:
         return self._d.get(a, None)
 
     def __setattr__(self, a, v):
-       self._d[a] = v
+        self._d[a] = v
+
+################################################################################
+
+class UserExtension:
+    def __init__(self, user_extension = None):
+        if user_extension:
+            m = imp.load_source("dak_userext", user_extension)
+            d = m.__dict__
+        else:
+            m, d = None, {}
+        self.__dict__["_module"] = m
+        self.__dict__["_d"] = d
+
+    def __getattr__(self, a):
+        if a in self.__dict__: return self.__dict__[a]
+        if a[0] == "_": raise AttributeError, a
+        return self._d.get(a, None)
+
+    def __setattr__(self, a, v):
+        self._d[a] = v
 
 ################################################################################
 
@@ -68,17 +88,17 @@ def init():
          "Produce a report on NEW and BYHAND packages"),
         ("show-new",
          "Output html for packages in NEW"),
-        
+
         ("rm",
          "Remove packages from suites"),
-        
+
         ("process-new",
          "Process NEW and BYHAND packages"),
         ("process-unchecked",
          "Process packages in queue/unchecked"),
         ("process-accepted",
          "Install packages into the pool"),
-        
+
         ("make-suite-file-list",
          "Generate lists of packages per suite for apt-ftparchive"),
         ("generate-releases",
@@ -146,7 +166,7 @@ def init():
          "Generate compatability symlinks from dists/ into pool/"),
         ]
     return functionality
-    
+
 ################################################################################
 
 def usage(functionality, exit_code=0):
@@ -174,7 +194,7 @@ def main():
 
     functionality = init()
     modules = [ command for (command, _) in functionality ]
-    
+
     if len(sys.argv) == 0:
         daklib.utils.fubar("err, argc == 0? how is that possible?")
     elif (len(sys.argv) == 1
old mode 100755 (executable)
new mode 100644 (file)
index 00b0d11..b6cee44
@@ -28,7 +28,7 @@
 
 import sys
 import apt_pkg
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 ################################################################################
@@ -46,21 +46,21 @@ def main():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Decode-Dot-Dak::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Decode-Dot-Dak::Options::%s" % (i)):
-           Cnf["Decode-Dot-Dak::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Decode-Dot-Dak::Options::%s" % (i)):
+            Cnf["Decode-Dot-Dak::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Decode-Dot-Dak::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     k = daklib.queue.Upload(Cnf)
     for arg in sys.argv[1:]:
         arg = daklib.utils.validate_changes_file_arg(arg,require_changes=-1)
         k.pkg.changes_file = arg
         print "%s:" % (arg)
-       k.init_vars()
+        k.init_vars()
         k.update_vars()
 
         changes = k.pkg.changes
@@ -131,4 +131,3 @@ def main():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 626b932..182ed2d
@@ -89,24 +89,24 @@ PACKAGE can be a .changes, .dsc, .deb or .udeb filename."""
 # probably xml.sax.saxutils would work as well
 
 def html_escape(s):
-  return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
+    return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
 
 def escape_if_needed(s):
-  if use_html:
-      return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
-  else:
-    return s
-  
+    if use_html:
+        return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
+    else:
+        return s
+
 def headline(s, level=2, bodyelement=None):
-  if use_html:
-    if bodyelement:
-      print """<thead>
-          <tr><th colspan="2" class="title" onclick="toggle('%(bodyelement)s', 'table-row-group', 'table-row-group')">%(title)s</th></tr>
-        </thead>"""%{"bodyelement":bodyelement,"title":html_escape(s)}
+    if use_html:
+        if bodyelement:
+            print """<thead>
+                <tr><th colspan="2" class="title" onclick="toggle('%(bodyelement)s', 'table-row-group', 'table-row-group')">%(title)s</th></tr>
+              </thead>"""%{"bodyelement":bodyelement,"title":html_escape(s)}
+        else:
+            print "<h%d>%s</h%d>" % (level, html_escape(s), level)
     else:
-      print "<h%d>%s</h%d>" % (level, html_escape(s), level)
-  else:
-    print "---- %s ----" % (s)
+        print "---- %s ----" % (s)
 
 # Colour definitions, 'end' isn't really for use
 
@@ -128,26 +128,26 @@ html_colours = {
   'maintainer': ('<span style="color: green">',"</span>")}
 
 def colour_output(s, colour):
-  if use_html:
-    return ("%s%s%s" % (html_colours[colour][0], html_escape(s), html_colours[colour][1]))
-  else:
-    return ("%s%s%s" % (ansi_colours[colour], s, ansi_colours['end']))
+    if use_html:
+        return ("%s%s%s" % (html_colours[colour][0], html_escape(s), html_colours[colour][1]))
+    else:
+        return ("%s%s%s" % (ansi_colours[colour], s, ansi_colours['end']))
 
 def escaped_text(s, strip=False):
-  if use_html:
-    if strip:
-        s = s.strip()
-    return "<pre>%s</pre>" % (s)
-  else:
-    return s  
+    if use_html:
+        if strip:
+            s = s.strip()
+        return "<pre>%s</pre>" % (s)
+    else:
+        return s
 
 def formatted_text(s, strip=False):
-  if use_html:
-    if strip:
-        s = s.strip()
-    return "<pre>%s</pre>" % (html_escape(s))
-  else:
-    return s
+    if use_html:
+        if strip:
+            s = s.strip()
+        return "<pre>%s</pre>" % (html_escape(s))
+    else:
+        return s
 
 def output_row(s):
     if use_html:
@@ -181,9 +181,9 @@ def foldable_output(title, elementnameprefix, content, norow=False):
 def get_depends_parts(depend) :
     v_match = re_version.match(depend)
     if v_match:
-       d_parts = { 'name' : v_match.group(1), 'version' : v_match.group(2) }
+        d_parts = { 'name' : v_match.group(1), 'version' : v_match.group(2) }
     else :
-       d_parts = { 'name' : depend , 'version' : '' }
+        d_parts = { 'name' : depend , 'version' : '' }
     return d_parts
 
 def get_or_list(depend) :
@@ -203,19 +203,19 @@ def split_depends (d_str) :
     dep_list = get_comma_list(d_str)
     d = 0
     while d < len(dep_list):
-       # put depends into their own list
-       depends_tree.append([dep_list[d]])
-       d += 1
+        # put depends into their own list
+        depends_tree.append([dep_list[d]])
+        d += 1
     d = 0
     while d < len(depends_tree):
-       k = 0
-       # split up Or'd depends into a multi-item list
-       depends_tree[d] = get_or_list(depends_tree[d][0])
-       while k < len(depends_tree[d]):
-           # split depends into {package, version relation}
-           depends_tree[d][k] = get_depends_parts(depends_tree[d][k])
-           k += 1
-       d += 1
+        k = 0
+        # split up Or'd depends into a multi-item list
+        depends_tree[d] = get_or_list(depends_tree[d][0])
+        while k < len(depends_tree[d]):
+            # split depends into {package, version relation}
+            depends_tree[d][k] = get_depends_parts(depends_tree[d][k])
+            k += 1
+        d += 1
     return depends_tree
 
 def read_control (filename):
@@ -227,10 +227,10 @@ def read_control (filename):
 
     deb_file = daklib.utils.open_file(filename)
     try:
-       extracts = apt_inst.debExtractControl(deb_file)
-       control = apt_pkg.ParseSection(extracts)
+        extracts = apt_inst.debExtractControl(deb_file)
+        control = apt_pkg.ParseSection(extracts)
     except:
-       print formatted_text("can't parse control info")
+        print formatted_text("can't parse control info")
         deb_file.close()
         raise
 
@@ -239,40 +239,40 @@ def read_control (filename):
     control_keys = control.keys()
 
     if control.has_key("Depends"):
-       depends_str = control.Find("Depends")
-       # create list of dependancy lists
-       depends = split_depends(depends_str)
+        depends_str = control.Find("Depends")
+        # create list of dependancy lists
+        depends = split_depends(depends_str)
 
     if control.has_key("Recommends"):
-       recommends_str = control.Find("Recommends")
-       recommends = split_depends(recommends_str)
+        recommends_str = control.Find("Recommends")
+        recommends = split_depends(recommends_str)
 
     if control.has_key("Section"):
-       section_str = control.Find("Section")
-
-       c_match = re_contrib.search(section_str)
-       nf_match = re_nonfree.search(section_str)
-       if c_match :
-           # contrib colour
-           section = colour_output(section_str, 'contrib')
-       elif nf_match :
-           # non-free colour
-           section = colour_output(section_str, 'nonfree')
-       else :
-           # main
-           section = colour_output(section_str, 'main')
+        section_str = control.Find("Section")
+
+        c_match = re_contrib.search(section_str)
+        nf_match = re_nonfree.search(section_str)
+        if c_match :
+            # contrib colour
+            section = colour_output(section_str, 'contrib')
+        elif nf_match :
+            # non-free colour
+            section = colour_output(section_str, 'nonfree')
+        else :
+            # main
+            section = colour_output(section_str, 'main')
     if control.has_key("Architecture"):
-       arch_str = control.Find("Architecture")
-       arch = colour_output(arch_str, 'arch')
+        arch_str = control.Find("Architecture")
+        arch = colour_output(arch_str, 'arch')
 
     if control.has_key("Maintainer"):
-       maintainer = control.Find("Maintainer")
-       localhost = re_localhost.search(maintainer)
-       if localhost:
-           #highlight bad email
-           maintainer = colour_output(maintainer, 'maintainer')
-       else:
-           maintainer = escape_if_needed(maintainer)
+        maintainer = control.Find("Maintainer")
+        localhost = re_localhost.search(maintainer)
+        if localhost:
+            #highlight bad email
+            maintainer = colour_output(maintainer, 'maintainer')
+        else:
+            maintainer = escape_if_needed(maintainer)
 
     return (control, control_keys, section, depends, recommends, arch, maintainer)
 
@@ -281,9 +281,9 @@ def read_changes_or_dsc (filename):
 
     dsc_file = daklib.utils.open_file(filename)
     try:
-       dsc = daklib.utils.parse_changes(filename)
+        dsc = daklib.utils.parse_changes(filename)
     except:
-       return formatted_text("can't parse .dsc control info")
+        return formatted_text("can't parse .dsc control info")
     dsc_file.close()
 
     filecontents = strip_pgp_signature(filename)
@@ -318,43 +318,43 @@ def create_depends_string (depends_tree):
     result = ""
     comma_count = 1
     for l in depends_tree:
-       if (comma_count >= 2):
-           result += ", "
-       or_count = 1
-       for d in l:
-           if (or_count >= 2 ):
-               result += " | "
-           # doesn't do version lookup yet.
-
-           q = projectB.query("SELECT DISTINCT(b.package), b.version, c.name, su.suite_name FROM  binaries b, files fi, location l, component c, bin_associations ba, suite su WHERE b.package='%s' AND b.file = fi.id AND fi.location = l.id AND l.component = c.id AND ba.bin=b.id AND ba.suite = su.id AND su.suite_name='%s' ORDER BY b.version desc" % (d['name'], suite))
-           ql = q.getresult()
-           if ql:
-               i = ql[0]
-
-               adepends = d['name']
-               if d['version'] != '' :
-                   adepends += " (%s)" % (d['version'])
-               
-               if i[2] == "contrib":
-                   result += colour_output(adepends, "contrib")
-               elif i[2] == "non-free":
-                   result += colour_output(adepends, "nonfree")
-               else :
-                   result += colour_output(adepends, "main")
-           else:
-               adepends = d['name']
-               if d['version'] != '' :
-                   adepends += " (%s)" % (d['version'])
-               result += colour_output(adepends, "bold")
-           or_count += 1
-       comma_count += 1
+        if (comma_count >= 2):
+            result += ", "
+        or_count = 1
+        for d in l:
+            if (or_count >= 2 ):
+                result += " | "
+            # doesn't do version lookup yet.
+
+            q = projectB.query("SELECT DISTINCT(b.package), b.version, c.name, su.suite_name FROM  binaries b, files fi, location l, component c, bin_associations ba, suite su WHERE b.package='%s' AND b.file = fi.id AND fi.location = l.id AND l.component = c.id AND ba.bin=b.id AND ba.suite = su.id AND su.suite_name='%s' ORDER BY b.version desc" % (d['name'], suite))
+            ql = q.getresult()
+            if ql:
+                i = ql[0]
+
+                adepends = d['name']
+                if d['version'] != '' :
+                    adepends += " (%s)" % (d['version'])
+
+                if i[2] == "contrib":
+                    result += colour_output(adepends, "contrib")
+                elif i[2] == "non-free":
+                    result += colour_output(adepends, "nonfree")
+                else :
+                    result += colour_output(adepends, "main")
+            else:
+                adepends = d['name']
+                if d['version'] != '' :
+                    adepends += " (%s)" % (d['version'])
+                result += colour_output(adepends, "bold")
+            or_count += 1
+        comma_count += 1
     return result
 
 def output_deb_info(filename):
     (control, control_keys, section, depends, recommends, arch, maintainer) = read_control(filename)
 
     if control == '':
-       return formatted_text("no control info")
+        return formatted_text("no control info")
     to_print = ""
     for key in control_keys :
         if key == 'Depends':
@@ -410,7 +410,7 @@ def get_copyright (deb_filename):
         res += formatted_text( "NOTE: Copyright is the same as %s.\n\n" % \
                                (printed_copyrights[copyrightmd5]))
     else:
-       printed_copyrights[copyrightmd5] = "%s (%s)" % (package, deb_filename)
+        printed_copyrights[copyrightmd5] = "%s (%s)" % (package, deb_filename)
     return res+formatted_text(copyright)
 
 def check_dsc (dsc_filename):
@@ -423,9 +423,9 @@ def check_deb (deb_filename):
     packagename = filename.split('_')[0]
 
     if filename.endswith(".udeb"):
-       is_a_udeb = 1
+        is_a_udeb = 1
     else:
-       is_a_udeb = 0
+        is_a_udeb = 0
 
 
     foldable_output("control file for %s" % (filename), "binary-%s-control"%packagename,
@@ -475,7 +475,7 @@ def strip_pgp_signature (filename):
         if line.startswith("-----END PGP SIGNATURE"):
             inside_signature = 0
             continue
-       contents += line
+        contents += line
     file.close()
     return contents
 
@@ -489,8 +489,8 @@ def check_changes (changes_filename):
     changes = daklib.utils.parse_changes (changes_filename)
     files = daklib.utils.build_file_list(changes)
     for file in files.keys():
-       if file.endswith(".deb") or file.endswith(".udeb"):
-           check_deb(file)
+        if file.endswith(".deb") or file.endswith(".udeb"):
+            check_deb(file)
         if file.endswith(".dsc"):
             check_dsc(file)
         # else: => byhand
@@ -504,24 +504,24 @@ def main ():
                  ('H',"html-output","Examine-Package::Options::Html-Output"),
                 ]
     for i in [ "Help", "Html-Output", "partial-html" ]:
-       if not Cnf.has_key("Examine-Package::Options::%s" % (i)):
-           Cnf["Examine-Package::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Examine-Package::Options::%s" % (i)):
+            Cnf["Examine-Package::Options::%s" % (i)] = ""
 
     args = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Examine-Package::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     stdout_fd = sys.stdout
 
     for file in args:
         try:
-           if not Options["Html-Output"]:
-               # Pipe output for each argument through less
-               less_fd = os.popen("less -R -", 'w', 0)
-               # -R added to display raw control chars for colour
-               sys.stdout = less_fd
+            if not Options["Html-Output"]:
+                # Pipe output for each argument through less
+                less_fd = os.popen("less -R -", 'w', 0)
+                # -R added to display raw control chars for colour
+                sys.stdout = less_fd
             try:
                 if file.endswith(".changes"):
                     check_changes(file)
@@ -532,10 +532,10 @@ def main ():
                 else:
                     daklib.utils.fubar("Unrecognised file type: '%s'." % (file))
             finally:
-               if not Options["Html-Output"]:
-                   # Reset stdout here so future less invocations aren't FUBAR
-                   less_fd.close()
-                   sys.stdout = stdout_fd
+                if not Options["Html-Output"]:
+                    # Reset stdout here so future less invocations aren't FUBAR
+                    less_fd.close()
+                    sys.stdout = stdout_fd
         except IOError, e:
             if errno.errorcode[e.errno] == 'EPIPE':
                 daklib.utils.warn("[examine-package] Caught EPIPE; skipping.")
@@ -550,4 +550,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 295c95d..727ed31
@@ -53,14 +53,14 @@ def main():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Find-Null-Maintainers::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Find-Null-Maintainers::Options::%s" % (i)):
-           Cnf["Find-Null-Maintainers::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Find-Null-Maintainers::Options::%s" % (i)):
+            Cnf["Find-Null-Maintainers::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Find-Null-Maintainers::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
 
old mode 100755 (executable)
new mode 100644 (file)
index 992edf9..03d5165
@@ -107,51 +107,51 @@ class Updates:
         self.filesizesha1 = None
 
         if readpath:
-          try:
-            f = open(readpath + "/Index")
-            x = f.readline()
+            try:
+                f = open(readpath + "/Index")
+                x = f.readline()
 
-            def read_hashs(ind, f, self, x=x):
-                while 1:
-                    x = f.readline()
-                    if not x or x[0] != " ": break
+                def read_hashs(ind, f, self, x=x):
+                    while 1:
+                        x = f.readline()
+                        if not x or x[0] != " ": break
+                        l = x.split()
+                        if not self.history.has_key(l[2]):
+                            self.history[l[2]] = [None,None]
+                            self.history_order.append(l[2])
+                        self.history[l[2]][ind] = (l[0], int(l[1]))
+                    return x
+
+                while x:
                     l = x.split()
-                    if not self.history.has_key(l[2]):
-                        self.history[l[2]] = [None,None]
-                       self.history_order.append(l[2])
-                    self.history[l[2]][ind] = (l[0], int(l[1]))
-                return x
 
-            while x:
-                l = x.split()
-
-                if len(l) == 0:
-                    x = f.readline()
-                    continue
+                    if len(l) == 0:
+                        x = f.readline()
+                        continue
 
-                if l[0] == "SHA1-History:":
-                    x = read_hashs(0,f,self)
-                    continue
+                    if l[0] == "SHA1-History:":
+                        x = read_hashs(0,f,self)
+                        continue
 
-                if l[0] == "SHA1-Patches:":
-                    x = read_hashs(1,f,self)
-                    continue
+                    if l[0] == "SHA1-Patches:":
+                        x = read_hashs(1,f,self)
+                        continue
 
-                if l[0] == "Canonical-Name:" or l[0]=="Canonical-Path:":
-                    self.can_path = l[1]
+                    if l[0] == "Canonical-Name:" or l[0]=="Canonical-Path:":
+                        self.can_path = l[1]
 
-                if l[0] == "SHA1-Current:" and len(l) == 3:
-                    self.filesizesha1 = (l[1], int(l[2]))
+                    if l[0] == "SHA1-Current:" and len(l) == 3:
+                        self.filesizesha1 = (l[1], int(l[2]))
 
-                x = f.readline()
+                    x = f.readline()
 
-          except IOError:
-            0
+            except IOError:
+                0
 
     def dump(self, out=sys.stdout):
         if self.can_path:
             out.write("Canonical-Path: %s\n" % (self.can_path))
-        
+
         if self.filesizesha1:
             out.write("SHA1-Current: %s %7d\n" % (self.filesizesha1))
 
@@ -164,7 +164,7 @@ class Updates:
                 tryunlink("%s/%s.gz" % (self.readpath, h))
                 del hs[h]
             l = l[cnt-self.max:]
-           self.history_order = l[:]
+            self.history_order = l[:]
 
         out.write("SHA1-History:\n")
         for h in l:
@@ -192,7 +192,7 @@ def sizesha1(f):
     return (sha1sum, size)
 
 def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14):
-    if Options.has_key("NoAct"): 
+    if Options.has_key("NoAct"):
         return
 
     patchname = Options["PatchName"]
@@ -258,7 +258,7 @@ def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14):
         print "%s: unchanged" % (origfile)
     else:
         if not os.path.isdir(outdir): os.mkdir(outdir)
-        w = os.popen("diff --ed - %s | gzip -c -9 > %s.gz" % 
+        w = os.popen("diff --ed - %s | gzip -c -9 > %s.gz" %
                          (newfile, difffile), "w")
         pipe_file(oldf, w)
         oldf.close()
@@ -293,7 +293,7 @@ def main():
                   ('r', "rootdir", "Generate-Index-Diffs::Options::RootDir", "hasArg"),
                   ('d', "tmpdir", "Generate-Index-Diffs::Options::TempDir", "hasArg"),
                   ('m', "maxdiffs", "Generate-Index-Diffs::Options::MaxDiffs", "hasArg"),
-                 ('n', "n-act", "Generate-Index-Diffs::Options::NoAct"),
+                  ('n', "n-act", "Generate-Index-Diffs::Options::NoAct"),
                 ]
     suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Generate-Index-Diffs::Options")
old mode 100755 (executable)
new mode 100644 (file)
index 213ad49..867fbc8
@@ -62,18 +62,18 @@ def compressnames (tree,type,file):
     cl = compress.split()
     uncompress = ("." not in cl)
     for mode in compress.split():
-       if mode == ".":
-           result.append(file)
-       elif mode == "gzip":
-           if uncompress:
-               result.append("<zcat/.gz>" + file)
-               uncompress = 0
-           result.append(file + ".gz")
-       elif mode == "bzip2":
-           if uncompress:
-               result.append("<bzcat/.bz2>" + file)
-               uncompress = 0
-           result.append(file + ".bz2")
+        if mode == ".":
+            result.append(file)
+        elif mode == "gzip":
+            if uncompress:
+                result.append("<zcat/.gz>" + file)
+                uncompress = 0
+            result.append(file + ".gz")
+        elif mode == "bzip2":
+            if uncompress:
+                result.append("<bzcat/.bz2>" + file)
+                uncompress = 0
+            result.append(file + ".bz2")
     return result
 
 def create_temp_file (cmd):
@@ -83,13 +83,13 @@ def create_temp_file (cmd):
     r = r[0]
     size = 0
     while 1:
-       x = r.readline()
-       if not x:
-           r.close()
-           del x,r
-           break
-       f.write(x)
-       size += len(x)
+        x = r.readline()
+        if not x:
+            r.close()
+            del x,r
+            break
+        f.write(x)
+        size += len(x)
     f.flush()
     f.seek(0)
     return (size, f)
@@ -98,21 +98,21 @@ def print_md5sha_files (tree, files, hashop):
     path = Cnf["Dir::Root"] + tree + "/"
     for name in files:
         try:
-           if name[0] == "<":
-               j = name.index("/")
-               k = name.index(">")
-               (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
-               (size, file_handle) = create_temp_file("%s %s%s%s" %
-                   (cat, path, name, ext))
-           else:
-               size = os.stat(path + name)[stat.ST_SIZE]
-                       file_handle = daklib.utils.open_file(path + name)
+            if name[0] == "<":
+                j = name.index("/")
+                k = name.index(">")
+                (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
+                (size, file_handle) = create_temp_file("%s %s%s%s" %
+                    (cat, path, name, ext))
+            else:
+                size = os.stat(path + name)[stat.ST_SIZE]
+                file_handle = daklib.utils.open_file(path + name)
         except daklib.utils.cant_open_exc:
             print "ALERT: Couldn't open " + path + name
         else:
-           hash = hashop(file_handle)
-           file_handle.close()
-           out.write(" %s %8d %s\n" % (hash, size, name))
+            hash = hashop(file_handle)
+            file_handle.close()
+            out.write(" %s %8d %s\n" % (hash, size, name))
 
 def print_md5_files (tree, files):
     print_md5sha_files (tree, files, apt_pkg.md5sum)
@@ -132,18 +132,18 @@ def main ():
     Cnf = daklib.utils.get_conf()
 
     Arguments = [('h',"help","Generate-Releases::Options::Help"),
-                ('a',"apt-conf","Generate-Releases::Options::Apt-Conf", "HasArg"),
-                ('f',"force-touch","Generate-Releases::Options::Force-Touch"),
-               ]
+                 ('a',"apt-conf","Generate-Releases::Options::Apt-Conf", "HasArg"),
+                 ('f',"force-touch","Generate-Releases::Options::Force-Touch"),
+                ]
     for i in [ "help", "apt-conf", "force-touch" ]:
-       if not Cnf.has_key("Generate-Releases::Options::%s" % (i)):
-           Cnf["Generate-Releases::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Generate-Releases::Options::%s" % (i)):
+            Cnf["Generate-Releases::Options::%s" % (i)] = ""
 
     suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Generate-Releases::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     if not Options["Apt-Conf"]:
         Options["Apt-Conf"] = daklib.utils.which_apt_conf_file()
@@ -158,36 +158,36 @@ def main ():
 
     for suite in suites:
         print "Processing: " + suite
-       SuiteBlock = Cnf.SubTree("Suite::" + suite)
+        SuiteBlock = Cnf.SubTree("Suite::" + suite)
 
-       if SuiteBlock.has_key("Untouchable") and not Options["Force-Touch"]:
+        if SuiteBlock.has_key("Untouchable") and not Options["Force-Touch"]:
             print "Skipping: " + suite + " (untouchable)"
             continue
 
-       suite = suite.lower()
+        suite = suite.lower()
 
-       origin = SuiteBlock["Origin"]
-       label = SuiteBlock.get("Label", origin)
-       codename = SuiteBlock.get("CodeName", "")
+        origin = SuiteBlock["Origin"]
+        label = SuiteBlock.get("Label", origin)
+        codename = SuiteBlock.get("CodeName", "")
 
-       version = ""
-       description = ""
+        version = ""
+        description = ""
 
-       q = projectB.query("SELECT version, description FROM suite WHERE suite_name = '%s'" % (suite))
-       qs = q.getresult()
-       if len(qs) == 1:
-           if qs[0][0] != "-": version = qs[0][0]
-           if qs[0][1]: description = qs[0][1]
+        q = projectB.query("SELECT version, description FROM suite WHERE suite_name = '%s'" % (suite))
+        qs = q.getresult()
+        if len(qs) == 1:
+            if qs[0][0] != "-": version = qs[0][0]
+            if qs[0][1]: description = qs[0][1]
 
-       if SuiteBlock.has_key("NotAutomatic"):
-           notautomatic = "yes"
-       else:
-           notautomatic = ""
+        if SuiteBlock.has_key("NotAutomatic"):
+            notautomatic = "yes"
+        else:
+            notautomatic = ""
 
-       if SuiteBlock.has_key("Components"):
-           components = SuiteBlock.ValueList("Components")
-       else:
-           components = []
+        if SuiteBlock.has_key("Components"):
+            components = SuiteBlock.ValueList("Components")
+        else:
+            components = []
 
         suite_suffix = Cnf.Find("Dinstall::SuiteSuffix")
         if components and suite_suffix:
@@ -195,70 +195,70 @@ def main ():
         else:
             longsuite = suite
 
-       tree = SuiteBlock.get("Tree", "dists/%s" % (longsuite))
+        tree = SuiteBlock.get("Tree", "dists/%s" % (longsuite))
 
-       if AptCnf.has_key("tree::%s" % (tree)):
-           pass
-       elif AptCnf.has_key("bindirectory::%s" % (tree)):
-           pass
-       else:
+        if AptCnf.has_key("tree::%s" % (tree)):
+            pass
+        elif AptCnf.has_key("bindirectory::%s" % (tree)):
+            pass
+        else:
             aptcnf_filename = os.path.basename(daklib.utils.which_apt_conf_file())
-           print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
-           continue
-
-       print Cnf["Dir::Root"] + tree + "/Release"
-       out = open(Cnf["Dir::Root"] + tree + "/Release", "w")
-
-       out.write("Origin: %s\n" % (origin))
-       out.write("Label: %s\n" % (label))
-       out.write("Suite: %s\n" % (suite))
-       if version != "":
-           out.write("Version: %s\n" % (version))
-       if codename != "":
-           out.write("Codename: %s\n" % (codename))
-       out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
-       if notautomatic != "":
-           out.write("NotAutomatic: %s\n" % (notautomatic))
-       out.write("Architectures: %s\n" % (" ".join(filter(daklib.utils.real_arch, SuiteBlock.ValueList("Architectures")))))
-       if components:
+            print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
+            continue
+
+        print Cnf["Dir::Root"] + tree + "/Release"
+        out = open(Cnf["Dir::Root"] + tree + "/Release", "w")
+
+        out.write("Origin: %s\n" % (origin))
+        out.write("Label: %s\n" % (label))
+        out.write("Suite: %s\n" % (suite))
+        if version != "":
+            out.write("Version: %s\n" % (version))
+        if codename != "":
+            out.write("Codename: %s\n" % (codename))
+        out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
+        if notautomatic != "":
+            out.write("NotAutomatic: %s\n" % (notautomatic))
+        out.write("Architectures: %s\n" % (" ".join(filter(daklib.utils.real_arch, SuiteBlock.ValueList("Architectures")))))
+        if components:
             out.write("Components: %s\n" % (" ".join(components)))
 
-       if description:
-           out.write("Description: %s\n" % (description))
-
-       files = []
-
-       if AptCnf.has_key("tree::%s" % (tree)):
-           for sec in AptCnf["tree::%s::Sections" % (tree)].split():
-               for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
-                   if arch == "source":
-                       filepath = "%s/%s/Sources" % (sec, arch)
-                       for file in compressnames("tree::%s" % (tree), "Sources", filepath):
-                           files.append(file)
-                       add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-                   else:
-                       disks = "%s/disks-%s" % (sec, arch)
-                       diskspath = Cnf["Dir::Root"]+tree+"/"+disks
-                       if os.path.exists(diskspath):
-                           for dir in os.listdir(diskspath):
-                               if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
-                                   files.append("%s/%s/md5sum.txt" % (disks, dir))
-
-                       filepath = "%s/binary-%s/Packages" % (sec, arch)
-                       for file in compressnames("tree::%s" % (tree), "Packages", filepath):
-                           files.append(file)
-                       add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-
-                   if arch == "source":
-                       rel = "%s/%s/Release" % (sec, arch)
-                   else:
-                       rel = "%s/binary-%s/Release" % (sec, arch)
-                   relpath = Cnf["Dir::Root"]+tree+"/"+rel
+        if description:
+            out.write("Description: %s\n" % (description))
+
+        files = []
+
+        if AptCnf.has_key("tree::%s" % (tree)):
+            for sec in AptCnf["tree::%s::Sections" % (tree)].split():
+                for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
+                    if arch == "source":
+                        filepath = "%s/%s/Sources" % (sec, arch)
+                        for file in compressnames("tree::%s" % (tree), "Sources", filepath):
+                            files.append(file)
+                        add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
+                    else:
+                        disks = "%s/disks-%s" % (sec, arch)
+                        diskspath = Cnf["Dir::Root"]+tree+"/"+disks
+                        if os.path.exists(diskspath):
+                            for dir in os.listdir(diskspath):
+                                if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
+                                    files.append("%s/%s/md5sum.txt" % (disks, dir))
+
+                        filepath = "%s/binary-%s/Packages" % (sec, arch)
+                        for file in compressnames("tree::%s" % (tree), "Packages", filepath):
+                            files.append(file)
+                        add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
+
+                    if arch == "source":
+                        rel = "%s/%s/Release" % (sec, arch)
+                    else:
+                        rel = "%s/binary-%s/Release" % (sec, arch)
+                    relpath = Cnf["Dir::Root"]+tree+"/"+rel
 
                     try:
-                       if os.access(relpath, os.F_OK):
-                           if os.stat(relpath).st_nlink > 1:
-                               os.unlink(relpath)
+                        if os.access(relpath, os.F_OK):
+                            if os.stat(relpath).st_nlink > 1:
+                                os.unlink(relpath)
                         release = open(relpath, "w")
                         #release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
                     except IOError:
@@ -279,70 +279,69 @@ def main ():
                     release.close()
                     files.append(rel)
 
-           if AptCnf.has_key("tree::%s/main" % (tree)):
-               for dis in ["main", "contrib", "non-free"]:
-                   if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
-                   sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
-                   if sec != "debian-installer":
-                       print "ALERT: weird non debian-installer section in %s" % (tree)
-
-                   for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
-                       if arch != "source":  # always true
-                           for file in compressnames("tree::%s/%s" % (tree,dis),
-                               "Packages", 
-                               "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
-                               files.append(file)
-           elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
-               usetree = AptCnf["tree::%s::FakeDI" % (tree)]
-               sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
-               if sec != "debian-installer":
-                   print "ALERT: weird non debian-installer section in %s" % (usetree)
-               for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
-                   if arch != "source":  # always true
-                       for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
-                           files.append(file)
-
-       elif AptCnf.has_key("bindirectory::%s" % (tree)):
-           for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
-               files.append(file.replace(tree+"/","",1))
-           for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
-               files.append(file.replace(tree+"/","",1))
-       else:
-           print "ALERT: no tree/bindirectory for %s" % (tree)
-
-       out.write("MD5Sum:\n")
-       print_md5_files(tree, files)
-       out.write("SHA1:\n")
-       print_sha1_files(tree, files)
-       out.write("SHA256:\n")
-       print_sha256_files(tree, files)
-
-       out.close()
-       if Cnf.has_key("Dinstall::SigningKeyring"):
-           keyring = "--secret-keyring \"%s\"" % Cnf["Dinstall::SigningKeyring"]
-           if Cnf.has_key("Dinstall::SigningPubKeyring"):
-               keyring += " --keyring \"%s\"" % Cnf["Dinstall::SigningPubKeyring"]
-
-           arguments = "--no-options --batch --no-tty --armour"
-           if Cnf.has_key("Dinstall::SigningKeyIds"):
-               signkeyids = Cnf["Dinstall::SigningKeyIds"].split()
-           else:
-               signkeyids = [""]
-
-           dest = Cnf["Dir::Root"] + tree + "/Release.gpg"
-           if os.path.exists(dest):
-               os.unlink(dest)
-
-           for keyid in signkeyids:
-               if keyid != "": defkeyid = "--default-key %s" % keyid
-               else: defkeyid = ""
-               os.system("gpg %s %s %s --detach-sign <%s >>%s" %
-                       (keyring, defkeyid, arguments,
-                       Cnf["Dir::Root"] + tree + "/Release", dest))
+            if AptCnf.has_key("tree::%s/main" % (tree)):
+                for dis in ["main", "contrib", "non-free"]:
+                    if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
+                    sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
+                    if sec != "debian-installer":
+                        print "ALERT: weird non debian-installer section in %s" % (tree)
+
+                    for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
+                        if arch != "source":  # always true
+                            for file in compressnames("tree::%s/%s" % (tree,dis),
+                                "Packages",
+                                "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
+                                files.append(file)
+            elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
+                usetree = AptCnf["tree::%s::FakeDI" % (tree)]
+                sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
+                if sec != "debian-installer":
+                    print "ALERT: weird non debian-installer section in %s" % (usetree)
+
+                for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
+                    if arch != "source":  # always true
+                        for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
+                            files.append(file)
+
+        elif AptCnf.has_key("bindirectory::%s" % (tree)):
+            for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
+                files.append(file.replace(tree+"/","",1))
+            for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
+                files.append(file.replace(tree+"/","",1))
+        else:
+            print "ALERT: no tree/bindirectory for %s" % (tree)
+
+        out.write("MD5Sum:\n")
+        print_md5_files(tree, files)
+        out.write("SHA1:\n")
+        print_sha1_files(tree, files)
+        out.write("SHA256:\n")
+        print_sha256_files(tree, files)
+
+        out.close()
+        if Cnf.has_key("Dinstall::SigningKeyring"):
+            keyring = "--secret-keyring \"%s\"" % Cnf["Dinstall::SigningKeyring"]
+            if Cnf.has_key("Dinstall::SigningPubKeyring"):
+                keyring += " --keyring \"%s\"" % Cnf["Dinstall::SigningPubKeyring"]
+
+            arguments = "--no-options --batch --no-tty --armour"
+            if Cnf.has_key("Dinstall::SigningKeyIds"):
+                signkeyids = Cnf["Dinstall::SigningKeyIds"].split()
+            else:
+                signkeyids = [""]
+
+            dest = Cnf["Dir::Root"] + tree + "/Release.gpg"
+            if os.path.exists(dest):
+                os.unlink(dest)
+
+            for keyid in signkeyids:
+                if keyid != "": defkeyid = "--default-key %s" % keyid
+                else: defkeyid = ""
+                os.system("gpg %s %s %s --detach-sign <%s >>%s" %
+                        (keyring, defkeyid, arguments,
+                        Cnf["Dir::Root"] + tree + "/Release", dest))
 
 #######################################################################################
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 7f62fa3..82def1e
@@ -490,14 +490,14 @@ def do_da_do_da ():
     Arguments = [('a', "action", "Import-Archive::Options::Action"),
                  ('h', "help", "Import-Archive::Options::Help")]
     for i in [ "action", "help" ]:
-       if not Cnf.has_key("Import-Archive::Options::%s" % (i)):
-           Cnf["Import-Archive::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Import-Archive::Options::%s" % (i)):
+            Cnf["Import-Archive::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Import-Archive::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     if not Options["Action"]:
         daklib.utils.warn("""no -a/--action given; not doing anything.
old mode 100755 (executable)
new mode 100644 (file)
index be35a5c..63c0724
@@ -37,7 +37,7 @@ def get_uid_info():
     q = projectB.query("SELECT id, uid, name FROM uid")
     for (id, uid, name) in q.getresult():
         byname[uid] = (id, name)
-       byid[id] = (uid, name)
+        byid[id] = (uid, name)
     return (byname, byid)
 
 def get_fingerprint_info():
@@ -50,121 +50,121 @@ def get_fingerprint_info():
 ################################################################################
 
 def get_ldap_name(entry):
-       name = []
-       for k in ["cn", "mn", "sn"]:
-               ret = entry.get(k)
-               if ret and ret[0] != "" and ret[0] != "-":
-                       name.append(ret[0])
-       return " ".join(name)
+    name = []
+    for k in ["cn", "mn", "sn"]:
+        ret = entry.get(k)
+        if ret and ret[0] != "" and ret[0] != "-":
+            name.append(ret[0])
+    return " ".join(name)
 
 ################################################################################
 
 class Keyring:
-       gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
-                        " --with-colons --fingerprint --fingerprint"
-       keys = {}
-       fpr_lookup = {}
-
-       def de_escape_gpg_str(self, str):
-               esclist = re.split(r'(\\x..)', str)
-               for x in range(1,len(esclist),2):
-                       esclist[x] = "%c" % (int(esclist[x][2:],16))
-               return "".join(esclist)
-
-       def __init__(self, keyring):
-               k = os.popen(self.gpg_invocation % keyring, "r")
-               keys = self.keys
-               key = None
-               fpr_lookup = self.fpr_lookup
-               signingkey = False
-               for line in k.xreadlines():
-                       field = line.split(":")
-                       if field[0] == "pub":
-                               key = field[4]
-                               (name, addr) = email.Utils.parseaddr(field[9])
-                               name = re.sub(r"\s*[(].*[)]", "", name)
-                               if name == "" or addr == "" or "@" not in addr:
-                                       name = field[9]
-                                       addr = "invalid-uid"
-                               name = self.de_escape_gpg_str(name)
-                               keys[key] = {"email": addr}
-                               if name != "": keys[key]["name"] = name
-                               keys[key]["aliases"] = [name]
-                               keys[key]["fingerprints"] = []
-                               signingkey = True
-                       elif key and field[0] == "sub" and len(field) >= 12:
-                               signingkey = ("s" in field[11])
-                       elif key and field[0] == "uid":
-                               (name, addr) = email.Utils.parseaddr(field[9])
-                               if name and name not in keys[key]["aliases"]:
-                                       keys[key]["aliases"].append(name)
-                       elif signingkey and field[0] == "fpr":
-                               keys[key]["fingerprints"].append(field[9])
-                               fpr_lookup[field[9]] = key
-
-       def generate_desired_users(self):
-               if Options["Generate-Users"]:
-                       format = Options["Generate-Users"]
-                       return self.generate_users_from_keyring(format)
-               if Options["Import-Ldap-Users"]:
-                       return self.import_users_from_ldap()
-               return ({}, {})
-
-       def import_users_from_ldap(self):
-               LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
-               LDAPServer = Cnf["Import-LDAP-Fingerprints::LDAPServer"]
-               l = ldap.open(LDAPServer)
-               l.simple_bind_s("","")
-               Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
-                       "(&(keyfingerprint=*)(gidnumber=%s))" % (Cnf["Import-Users-From-Passwd::ValidGID"]),
-                       ["uid", "keyfingerprint", "cn", "mn", "sn"])
-
-               ldap_fin_uid_id = {}
-
-               byuid = {}
-               byname = {}
-               keys = self.keys
-               fpr_lookup = self.fpr_lookup
-
-               for i in Attrs:
-                       entry = i[1]
-                       uid = entry["uid"][0]
-                       name = get_ldap_name(entry)
-                       fingerprints = entry["keyFingerPrint"]
-                       id = None
-                       for f in fingerprints:
-                               key = fpr_lookup.get(f, None)
-                               if key not in keys: continue
-                               keys[key]["uid"] = uid
-
-                               if id != None: continue
-                               id = daklib.database.get_or_set_uid_id(uid)
-                               byuid[id] = (uid, name)
-                               byname[uid] = (id, name)
-                       
-               return (byname, byuid)
-
-       def generate_users_from_keyring(self, format):
-               byuid = {}
-               byname = {}
-               keys = self.keys
-               any_invalid = False
-               for x in keys.keys():
-                       if keys[x]["email"] == "invalid-uid":
-                               any_invalid = True
-                               keys[x]["uid"] = format % "invalid-uid"
-                       else:
-                               uid = format % keys[x]["email"]
-                               id = daklib.database.get_or_set_uid_id(uid)
-                               byuid[id] = (uid, keys[x]["name"])
-                               byname[uid] = (id, keys[x]["name"])
-                               keys[x]["uid"] = uid
-               if any_invalid:
-                       uid = format % "invalid-uid"
-                       id = daklib.database.get_or_set_uid_id(uid)
-                       byuid[id] = (uid, "ungeneratable user id")
-                       byname[uid] = (id, "ungeneratable user id")
-               return (byname, byuid)
+    gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
+                     " --with-colons --fingerprint --fingerprint"
+    keys = {}
+    fpr_lookup = {}
+
+    def de_escape_gpg_str(self, str):
+        esclist = re.split(r'(\\x..)', str)
+        for x in range(1,len(esclist),2):
+            esclist[x] = "%c" % (int(esclist[x][2:],16))
+        return "".join(esclist)
+
+    def __init__(self, keyring):
+        k = os.popen(self.gpg_invocation % keyring, "r")
+        keys = self.keys
+        key = None
+        fpr_lookup = self.fpr_lookup
+        signingkey = False
+        for line in k.xreadlines():
+            field = line.split(":")
+            if field[0] == "pub":
+                key = field[4]
+                (name, addr) = email.Utils.parseaddr(field[9])
+                name = re.sub(r"\s*[(].*[)]", "", name)
+                if name == "" or addr == "" or "@" not in addr:
+                    name = field[9]
+                    addr = "invalid-uid"
+                name = self.de_escape_gpg_str(name)
+                keys[key] = {"email": addr}
+                if name != "": keys[key]["name"] = name
+                keys[key]["aliases"] = [name]
+                keys[key]["fingerprints"] = []
+                signingkey = True
+            elif key and field[0] == "sub" and len(field) >= 12:
+                signingkey = ("s" in field[11])
+            elif key and field[0] == "uid":
+                (name, addr) = email.Utils.parseaddr(field[9])
+                if name and name not in keys[key]["aliases"]:
+                    keys[key]["aliases"].append(name)
+            elif signingkey and field[0] == "fpr":
+                keys[key]["fingerprints"].append(field[9])
+                fpr_lookup[field[9]] = key
+
+    def generate_desired_users(self):
+        if Options["Generate-Users"]:
+            format = Options["Generate-Users"]
+            return self.generate_users_from_keyring(format)
+        if Options["Import-Ldap-Users"]:
+            return self.import_users_from_ldap()
+        return ({}, {})
+
+    def import_users_from_ldap(self):
+        LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
+        LDAPServer = Cnf["Import-LDAP-Fingerprints::LDAPServer"]
+        l = ldap.open(LDAPServer)
+        l.simple_bind_s("","")
+        Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+               "(&(keyfingerprint=*)(gidnumber=%s))" % (Cnf["Import-Users-From-Passwd::ValidGID"]),
+               ["uid", "keyfingerprint", "cn", "mn", "sn"])
+
+        ldap_fin_uid_id = {}
+
+        byuid = {}
+        byname = {}
+        keys = self.keys
+        fpr_lookup = self.fpr_lookup
+
+        for i in Attrs:
+            entry = i[1]
+            uid = entry["uid"][0]
+            name = get_ldap_name(entry)
+            fingerprints = entry["keyFingerPrint"]
+            id = None
+            for f in fingerprints:
+                key = fpr_lookup.get(f, None)
+                if key not in keys: continue
+                keys[key]["uid"] = uid
+
+                if id != None: continue
+                id = daklib.database.get_or_set_uid_id(uid)
+                byuid[id] = (uid, name)
+                byname[uid] = (id, name)
+
+        return (byname, byuid)
+
+    def generate_users_from_keyring(self, format):
+        byuid = {}
+        byname = {}
+        keys = self.keys
+        any_invalid = False
+        for x in keys.keys():
+            if keys[x]["email"] == "invalid-uid":
+                any_invalid = True
+                keys[x]["uid"] = format % "invalid-uid"
+            else:
+                uid = format % keys[x]["email"]
+                id = daklib.database.get_or_set_uid_id(uid)
+                byuid[id] = (uid, keys[x]["name"])
+                byname[uid] = (id, keys[x]["name"])
+                keys[x]["uid"] = uid
+        if any_invalid:
+            uid = format % "invalid-uid"
+            id = daklib.database.get_or_set_uid_id(uid)
+            byuid[id] = (uid, "ungeneratable user id")
+            byname[uid] = (id, "ungeneratable user id")
+        return (byname, byuid)
 
 ################################################################################
 
@@ -183,9 +183,9 @@ def main():
 
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Import-Keyring::Options::Help"),
-                ('L',"import-ldap-users","Import-Keyring::Options::Import-Ldap-Users"),
-                ('U',"generate-users","Import-Keyring::Options::Generate-Users", "HasArg"),
-               ]
+                 ('L',"import-ldap-users","Import-Keyring::Options::Import-Ldap-Users"),
+                 ('U',"generate-users","Import-Keyring::Options::Generate-Users", "HasArg"),
+                ]
 
     for i in [ "help", "report-changes", "generate-users", "import-ldap-users" ]:
         if not Cnf.has_key("Import-Keyring::Options::%s" % (i)):
@@ -200,7 +200,7 @@ def main():
         usage()
 
     if len(keyring_names) != 1:
-       usage(1)
+        usage(1)
 
     ### Keep track of changes made
 
@@ -223,7 +223,7 @@ def main():
     keyring = Keyring(keyringname)
 
     keyring_id = daklib.database.get_or_set_keyring_id(
-                       keyringname.split("/")[-1])
+                        keyringname.split("/")[-1])
 
     ### Generate new uid entries if they're needed (from LDAP or the keyring)
     (desuid_byname, desuid_byid) = keyring.generate_desired_users()
@@ -235,11 +235,11 @@ def main():
     for id in desuid_byid.keys():
         uid = (id, desuid_byid[id][0])
         name = desuid_byid[id][1]
-       oname = db_uid_byid[id][1]
-       if name and oname != name:
-           changes.append((uid[1], "Full name: %s" % (name)))
+        oname = db_uid_byid[id][1]
+        if name and oname != name:
+            changes.append((uid[1], "Full name: %s" % (name)))
             projectB.query("UPDATE uid SET name = '%s' WHERE id = %s" %
-               (pg.escape_string(name), id))
+                (pg.escape_string(name), id))
 
     # The fingerprint table (fpr) points to a uid and a keyring.
     #   If the uid is being decided here (ldap/generate) we set it to it.
@@ -251,9 +251,9 @@ def main():
     for z in keyring.keys.keys():
         id = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0]
         if id == None:
-           id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
-       for y in keyring.keys[z]["fingerprints"]:
-           fpr[y] = (id,keyring_id)
+            id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
+        for y in keyring.keys[z]["fingerprints"]:
+            fpr[y] = (id,keyring_id)
 
     # For any keys that used to be in this keyring, disassociate them.
     # We don't change the uid, leaving that for historical info; if
@@ -261,37 +261,37 @@ def main():
 
     for f,(u,fid,kr) in db_fin_info.iteritems():
         if kr != keyring_id: continue
-       if f in fpr: continue
-       changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
-       projectB.query("UPDATE fingerprint SET keyring = NULL WHERE id = %d" % (fid))
+        if f in fpr: continue
+        changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
+        projectB.query("UPDATE fingerprint SET keyring = NULL WHERE id = %d" % (fid))
 
     # For the keys in this keyring, add/update any fingerprints that've
     # changed.
 
     for f in fpr:
         newuid = fpr[f][0]
-       newuiduid = db_uid_byid.get(newuid, [None])[0] 
-       (olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
-       if olduid == None: olduid = -1
-       if oldkid == None: oldkid = -1
-       if oldfid == -1:
-           changes.append((newuiduid, "Added key: %s" % (f)))
+        newuiduid = db_uid_byid.get(newuid, [None])[0]
+        (olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
+        if olduid == None: olduid = -1
+        if oldkid == None: oldkid = -1
+        if oldfid == -1:
+            changes.append((newuiduid, "Added key: %s" % (f)))
             if newuid:
-               projectB.query("INSERT INTO fingerprint (fingerprint, uid, keyring) VALUES ('%s', %d, %d)" % (f, newuid, keyring_id))
-           else:
-               projectB.query("INSERT INTO fingerprint (fingerprint, keyring) VALUES ('%s', %d)" % (f, keyring_id))
-       else:
-           if newuid and olduid != newuid:
-               if olduid != -1:
-                   changes.append((newuiduid, "Linked key: %s" % f))
-                   changes.append((newuiduid, "  (formerly belonging to %s)" % (db_uid_byid[olduid][0])))
-               else:
-                   changes.append((newuiduid, "Linked key: %s" % f))
-                   changes.append((newuiduid, "  (formerly unowned)"))
-               projectB.query("UPDATE fingerprint SET uid = %d WHERE id = %d" % (newuid, oldfid))
-
-           if oldkid != keyring_id:
-               projectB.query("UPDATE fingerprint SET keyring = %d WHERE id = %d" % (keyring_id, oldfid))
+                projectB.query("INSERT INTO fingerprint (fingerprint, uid, keyring) VALUES ('%s', %d, %d)" % (f, newuid, keyring_id))
+            else:
+                projectB.query("INSERT INTO fingerprint (fingerprint, keyring) VALUES ('%s', %d)" % (f, keyring_id))
+        else:
+            if newuid and olduid != newuid:
+                if olduid != -1:
+                    changes.append((newuiduid, "Linked key: %s" % f))
+                    changes.append((newuiduid, "  (formerly belonging to %s)" % (db_uid_byid[olduid][0])))
+                else:
+                    changes.append((newuiduid, "Linked key: %s" % f))
+                    changes.append((newuiduid, "  (formerly unowned)"))
+                projectB.query("UPDATE fingerprint SET uid = %d WHERE id = %d" % (newuid, oldfid))
+
+            if oldkid != keyring_id:
+                projectB.query("UPDATE fingerprint SET keyring = %d WHERE id = %d" % (keyring_id, oldfid))
 
     # All done!
 
old mode 100755 (executable)
new mode 100644 (file)
index eda3710..f204a9f
@@ -91,14 +91,14 @@ def main():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Import-LDAP-Fingerprints::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Import-LDAP-Fingerprints::Options::%s" % (i)):
-           Cnf["Import-LDAP-Fingerprints::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Import-LDAP-Fingerprints::Options::%s" % (i)):
+            Cnf["Import-LDAP-Fingerprints::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Import-LDAP-Fingerprints::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
     daklib.database.init(Cnf, projectB)
@@ -139,8 +139,8 @@ SELECT f.fingerprint, f.id, u.uid FROM fingerprint f, uid u WHERE f.uid = u.id
         uid_id = daklib.database.get_or_set_uid_id(uid)
 
         if not db_uid_name.has_key(uid_id) or db_uid_name[uid_id] != name:
-           q = projectB.query("UPDATE uid SET name = '%s' WHERE id = %d" % (escape_string(name), uid_id))
-           print "Assigning name of %s as %s" % (uid, name)
+            q = projectB.query("UPDATE uid SET name = '%s' WHERE id = %d" % (escape_string(name), uid_id))
+            print "Assigning name of %s as %s" % (uid, name)
 
         for fingerprint in fingerprints:
             ldap_fin_uid_id[fingerprint] = (uid, uid_id)
@@ -149,9 +149,9 @@ SELECT f.fingerprint, f.id, u.uid FROM fingerprint f, uid u WHERE f.uid = u.id
                 if not existing_uid:
                     q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
                     print "Assigning %s to 0x%s." % (uid, fingerprint)
-               elif existing_uid == uid:
-                   pass
-               elif existing_uid[:3] == "dm:":
+                elif existing_uid == uid:
+                    pass
+                elif existing_uid[:3] == "dm:":
                     q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
                     print "Promoting DM %s to DD %s with keyid 0x%s." % (existing_uid, uid, fingerprint)
                 else:
@@ -173,10 +173,10 @@ SELECT f.fingerprint, f.id, u.uid FROM fingerprint f, uid u WHERE f.uid = u.id
             primary_key = primary_key.replace(" ","")
             if not ldap_fin_uid_id.has_key(primary_key):
                 daklib.utils.warn("0x%s (from 0x%s): no UID found in LDAP" % (primary_key, fingerprint))
-           else:
-               (uid, uid_id) = ldap_fin_uid_id[primary_key]
-               q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
-               print "Assigning %s to 0x%s." % (uid, fingerprint)
+            else:
+                (uid, uid_id) = ldap_fin_uid_id[primary_key]
+                q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
+                print "Assigning %s to 0x%s." % (uid, fingerprint)
         else:
             extra_keyrings = ""
             for keyring in Cnf.ValueList("Import-LDAP-Fingerprints::ExtraKeyrings"):
old mode 100755 (executable)
new mode 100644 (file)
index 994a558..fa34772
@@ -61,8 +61,8 @@ def main ():
                  ('v', "verbose", "Import-Users-From-Passwd::Options::Verbose"),
                  ('h', "help", "Import-Users-From-Passwd::Options::Help")]
     for i in [ "no-action", "quiet", "verbose", "help" ]:
-       if not Cnf.has_key("Import-Users-From-Passwd::Options::%s" % (i)):
-           Cnf["Import-Users-From-Passwd::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Import-Users-From-Passwd::Options::%s" % (i)):
+            Cnf["Import-Users-From-Passwd::Options::%s" % (i)] = ""
 
     arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Import-Users-From-Passwd::Options")
@@ -117,4 +117,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 89c4ed2..3185b6e
@@ -190,7 +190,7 @@ def do_section():
     projectB.query("COMMIT WORK")
 
 ################################################################################
-    
+
 def main ():
     """Sync dak.conf configuartion file and the SQL database"""
 
old mode 100755 (executable)
new mode 100644 (file)
index 0d9eff2..f9d3e80
@@ -55,14 +55,14 @@ it."""
 
 def process_file(config, config_name):
     """Create directories for a config entry that's a filename."""
-    
+
     if config.has_key(config_name):
         target = os.path.dirname(config[config_name])
         do_dir(target, config_name)
 
 def process_tree(config, tree):
     """Create directories for a config tree."""
-    
+
     for entry in config.SubTree(tree).List():
         entry = entry.lower()
         if tree == "Dir":
@@ -74,7 +74,7 @@ def process_tree(config, tree):
 
 def process_morguesubdir(subdir):
     """Create directories for morgue sub directories."""
-    
+
     config_name = "%s::MorgueSubDir" % (subdir)
     if Cnf.has_key(config_name):
         target = os.path.join(Cnf["Dir::Morgue"], Cnf[config_name])
@@ -142,4 +142,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 21b12f6..6ed845f
--- a/dak/ls.py
+++ b/dak/ls.py
@@ -76,8 +76,8 @@ def main ():
     for i in [ "architecture", "binarytype", "component", "format",
                "greaterorequal", "greaterthan", "regex", "suite",
                "source-and-binary", "help" ]:
-       if not Cnf.has_key("Ls::Options::%s" % (i)):
-           Cnf["Ls::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Ls::Options::%s" % (i)):
+            Cnf["Ls::Options::%s" % (i)] = ""
 
     packages = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Ls::Options")
@@ -198,4 +198,3 @@ SELECT s.source, s.version, 'source', su.suite_name, c.name, m.name
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 86abde2..077386a
@@ -80,7 +80,7 @@ def main():
 
     Arguments = [('h',"help","Make-Maintainers::Options::Help")]
     if not Cnf.has_key("Make-Maintainers::Options::Help"):
-       Cnf["Make-Maintainers::Options::Help"] = ""
+        Cnf["Make-Maintainers::Options::Help"] = ""
 
     extra_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Make-Maintainers::Options")
@@ -159,4 +159,3 @@ def main():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 53bf2df..77074ed
@@ -86,12 +86,12 @@ def main ():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Make-Overrides::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Make-Overrides::Options::%s" % (i)):
-           Cnf["Make-Overrides::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Make-Overrides::Options::%s" % (i)):
+            Cnf["Make-Overrides::Options::%s" % (i)] = ""
     apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Make-Overrides::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
     daklib.database.init(Cnf, projectB)
old mode 100755 (executable)
new mode 100644 (file)
index 9836560..a63a617
@@ -85,7 +85,7 @@ def delete_packages(delete_versions, pkg, dominant_arch, suite,
         delete_version = version[0]
         delete_id = packages[delete_unique_id]["id"]
         delete_arch = packages[delete_unique_id]["arch"]
-       if not Cnf.Find("Suite::%s::Untouchable" % (suite)) or Options["Force"]:
+        if not Cnf.Find("Suite::%s::Untouchable" % (suite)) or Options["Force"]:
             if Options["No-Delete"]:
                 print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch)
             else:
@@ -363,7 +363,7 @@ def do_da_do_da():
                 daklib.utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite))
                 suites.append(archall_suite)
         Options["Suite"] = ",".join(suites)
-    
+
     (con_suites, con_architectures, con_components, check_source) = \
                  daklib.utils.parse_args(Options)
 
@@ -414,11 +414,11 @@ def main():
                  ('c', "component", "Make-Suite-File-List::Options::Component", "HasArg"),
                  ('h', "help", "Make-Suite-File-List::Options::Help"),
                  ('n', "no-delete", "Make-Suite-File-List::Options::No-Delete"),
-                ('f', "force", "Make-Suite-File-List::Options::Force"),
+                 ('f', "force", "Make-Suite-File-List::Options::Force"),
                  ('s', "suite", "Make-Suite-File-List::Options::Suite", "HasArg")]
     for i in ["architecture", "component", "help", "no-delete", "suite", "force-touch" ]:
-       if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
-           Cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
+            Cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
     apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Make-Suite-File-List::Options")
     if Options["Help"]:
old mode 100755 (executable)
new mode 100644 (file)
index d4d96c2..3f79020
@@ -97,7 +97,7 @@ class MirrorSplitTarget:
         if path.find("/installer-") != -1:
             return 0
         return 1
-    
+
 ##############################################################################
 # The applicable function is basically a predicate. Given a path and a
 # target object its job is to decide if the path conforms for the
@@ -190,7 +190,7 @@ class MirrorSplitDB:
         cPickle.dump( self.root, f, 1 )
         f.close()
 
-        
+
 ##############################################################################
 # Helper functions for the tree syncing...
 ##################
@@ -238,7 +238,7 @@ def _internal_reconcile( path, srcdir, targdir, targ ):
         #print "-L-", _pth(path,k)
         do_unlink(targ, _pth(path,k))
         del targdir.links[k]
-    
+
     # Remove any files in targdir which aren't in srcdir
     # Or which aren't applicable
     rm = []
@@ -281,7 +281,7 @@ def _internal_reconcile( path, srcdir, targdir, targ ):
     for k in srcdir.links.keys():
         if applicable( _pth(path,k), targ ):
             if not targdir.links.has_key(k):
-                targdir.links[k] = srcdir.links[k]; 
+                targdir.links[k] = srcdir.links[k];
                 #print "+L+",_pth(path,k), "->", srcdir.links[k]
                 do_symlink( targ, _pth(path,k), targdir.links[k] )
             else:
@@ -314,7 +314,7 @@ def load_config():
     MASTER_PATH = Cnf["Mirror-Split::FTPPath"]
     TREE_ROOT = Cnf["Mirror-Split::TreeRootPath"]
     TREE_DB_ROOT = Cnf["Mirror-Split::TreeDatabasePath"]
-    
+
     for a in Cnf.ValueList("Mirror-Split::BasicTrees"):
         trees.append( MirrorSplitTarget( a, "%s,all" % a, 1 ) )
 
@@ -338,7 +338,7 @@ def do_list ():
             print " [source]"
         else:
             print ""
-        
+
 def do_help ():
     print """Usage: dak mirror-split [OPTIONS]
 Generate hardlink trees of certain architectures
@@ -371,7 +371,7 @@ def main ():
     if Options.has_key("List"):
         do_list()
         return
-    
+
 
     src = MirrorSplitDB()
     print "Scanning", MASTER_PATH
@@ -384,7 +384,7 @@ def main ():
         print "Saving updated DB...",
         tree.save_db()
         print "Done"
-    
+
 ##############################################################################
 
 if __name__ == '__main__':
old mode 100755 (executable)
new mode 100644 (file)
index 99378e7..f2aea52
@@ -134,7 +134,7 @@ def load_adv_changes():
             continue
 
         if c not in changes: changes.append(c)
-        srcver = "%s %s" % (Upload.pkg.changes["source"], 
+        srcver = "%s %s" % (Upload.pkg.changes["source"],
                             Upload.pkg.changes["version"])
         srcverarches.setdefault(srcver, {})
         for arch in Upload.pkg.changes["architecture"].keys():
@@ -173,7 +173,7 @@ def prompt(opts, default):
     while a not in v:
         a = daklib.utils.our_raw_input(p) + default
         a = a[:1].upper()
-        
+
     return v[a]
 
 def add_changes(extras):
@@ -204,10 +204,10 @@ def do_upload():
         actually_upload(changes)
     else:
         child = os.fork()
-       if child == 0:
-           actually_upload(changes)
-           os._exit(0)
-       print "Uploading in the background"
+        if child == 0:
+            actually_upload(changes)
+            os._exit(0)
+        print "Uploading in the background"
 
 def actually_upload(changes_files):
     file_list = ""
@@ -430,7 +430,7 @@ def sudo(arg, fn, exit):
     if Options["Sudo"]:
         if advisory == None:
             daklib.utils.fubar("Must set advisory name")
-        os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H", 
+        os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
                   "/usr/local/bin/dak", "new-security-install", "-"+arg, "--", advisory)
     else:
         fn()
@@ -632,7 +632,7 @@ def main():
             if os.getcwd() == Cnf["Dir::Queue::Embargoed"].rstrip("/"):
                 opts.append("Disembargo")
             opts += ["Show advisory", "Reject", "Quit"]
-        
+
             advisory_info()
             what = prompt(opts, default)
 
old mode 100755 (executable)
new mode 100644 (file)
index b981a23..2e10bca
@@ -68,16 +68,16 @@ def main ():
                  ('s',"suite","Override::Options::Suite", "HasArg"),
                  ]
     for i in ["help", "no-action"]:
-       if not Cnf.has_key("Override::Options::%s" % (i)):
-           Cnf["Override::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Override::Options::%s" % (i)):
+            Cnf["Override::Options::%s" % (i)] = ""
     if not Cnf.has_key("Override::Options::Suite"):
-       Cnf["Override::Options::Suite"] = "unstable"
+        Cnf["Override::Options::Suite"] = "unstable"
 
     arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Override::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
     daklib.database.init(Cnf, projectB)
old mode 100755 (executable)
new mode 100644 (file)
index 1a445ea..5b7ffbf
@@ -141,8 +141,8 @@ def main ():
     Cnf = daklib.utils.get_conf()
 
     for i in ["help", "limit", "no-action", "verbose" ]:
-       if not Cnf.has_key("Poolize::Options::%s" % (i)):
-           Cnf["Poolize::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Poolize::Options::%s" % (i)):
+            Cnf["Poolize::Options::%s" % (i)] = ""
 
 
     Arguments = [('h',"help","Poolize::Options::Help"),
@@ -190,4 +190,3 @@ UNION SELECT l.path, f.filename, f.id as files_id, null as component
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 8639683..bb75d5e
@@ -33,7 +33,7 @@ import errno, fcntl, os, sys, time, re
 import apt_pkg
 import daklib.database
 import daklib.logging
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 ###############################################################################
@@ -71,9 +71,9 @@ class Urgency_Log:
         self.timestamp = time.strftime("%Y%m%d%H%M%S")
         # Create the log directory if it doesn't exist
         self.log_dir = Cnf["Dir::UrgencyLog"]
-        if not os.path.exists(self.log_dir):
-            umask = os.umask(00000)
-            os.makedirs(self.log_dir, 02775)
+        if not os.path.exists(self.log_dir) or not os.access(self.log_dir, os.W_OK):
+            daklib.utils.warn("UrgencyLog directory %s does not exist or is not writeable, using /srv/ftp.debian.org/tmp/ instead" % (self.log_dir))
+            self.log_dir = '/srv/ftp.debian.org/tmp/'
         # Open the logfile
         self.log_filename = "%s/.install-urgencies-%s.new" % (self.log_dir, self.timestamp)
         self.log_file = daklib.utils.open_file(self.log_filename, 'w')
@@ -133,15 +133,15 @@ def check():
         # propogate in the case it is in the override tables:
         if changes.has_key("propdistribution"):
             for suite in changes["propdistribution"].keys():
-               if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
-                   propogate[suite] = 1
-               else:
-                   nopropogate[suite] = 1
+                if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
+                    propogate[suite] = 1
+                else:
+                    nopropogate[suite] = 1
 
     for suite in propogate.keys():
-       if suite in nopropogate:
-           continue
-       changes["distribution"][suite] = 1
+        if suite in nopropogate:
+            continue
+        changes["distribution"][suite] = 1
 
     for file in files.keys():
         # Check the package is still in the override tables
@@ -163,8 +163,8 @@ def init():
                  ('s',"no-mail", "Dinstall::Options::No-Mail")]
 
     for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "version"]:
-       if not Cnf.has_key("Dinstall::Options::%s" % (i)):
-           Cnf["Dinstall::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Dinstall::Options::%s" % (i)):
+            Cnf["Dinstall::Options::%s" % (i)] = ""
 
     changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Dinstall::Options")
@@ -211,8 +211,8 @@ def action ():
         if Options["Automatic"]:
             answer = 'R'
     else:
-        print "INSTALL to " + ", ".join(changes["distribution"].keys()) 
-       print reject_message + summary,
+        print "INSTALL to " + ", ".join(changes["distribution"].keys())
+        print reject_message + summary,
         prompt = "[I]nstall, Skip, Quit ?"
         if Options["Automatic"]:
             answer = 'I'
@@ -345,8 +345,8 @@ def install ():
             source = files[file]["source package"]
             source_version = files[file]["source version"]
             filename = files[file]["pool name"] + file
-           if not files[file].has_key("location id") or not files[file]["location id"]:
-               files[file]["location id"] = daklib.database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],daklib.utils.where_am_i())
+            if not files[file].has_key("location id") or not files[file]["location id"]:
+                files[file]["location id"] = daklib.database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],daklib.utils.where_am_i())
             if not files[file].has_key("files id") or not files[file]["files id"]:
                 files[file]["files id"] = daklib.database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
             source_id = daklib.database.get_source_id (source, source_version)
old mode 100755 (executable)
new mode 100644 (file)
index 54c2b48..37a902a
@@ -41,7 +41,7 @@ import apt_pkg, apt_inst
 import examine_package
 import daklib.database
 import daklib.logging
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 # Globals
@@ -83,7 +83,12 @@ def recheck():
                and not Upload.source_exists(source_package, source_version, Upload.pkg.changes["distribution"].keys()):
                 source_epochless_version = daklib.utils.re_no_epoch.sub('', source_version)
                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
-                if not os.path.exists(Cnf["Dir::Queue::Accepted"] + '/' + dsc_filename):
+                found = 0
+                for q in ["Accepted", "Embargoed", "Unembargoed"]:
+                    if Cnf.has_key("Dir::Queue::%s" % (q)):
+                        if os.path.exists(Cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+                            found = 1
+                if not found:
                     reject("no source found for %s %s (%s)." % (source_package, source_version, file))
 
         # Version and file overwrite checks
@@ -604,14 +609,14 @@ def do_new():
     for suite in changes["suite"].keys():
         override = Cnf.Find("Suite::%s::OverrideSuite" % (suite))
         if override:
-           (olderr, newerr) = (daklib.database.get_suite_id(suite) == -1,
-             daklib.database.get_suite_id(override) == -1)
-           if olderr or newerr:
-               (oinv, newinv) = ("", "")
-               if olderr: oinv = "invalid "
-               if newerr: ninv = "invalid "
-               print "warning: overriding %ssuite %s to %ssuite %s" % (
-                       oinv, suite, ninv, override)
+            (olderr, newerr) = (daklib.database.get_suite_id(suite) == -1,
+              daklib.database.get_suite_id(override) == -1)
+            if olderr or newerr:
+                (oinv, newinv) = ("", "")
+                if olderr: oinv = "invalid "
+                if newerr: ninv = "invalid "
+                print "warning: overriding %ssuite %s to %ssuite %s" % (
+                        oinv, suite, ninv, override)
             del changes["suite"][suite]
             changes["suite"][override] = 1
     # Validate suites
@@ -686,6 +691,7 @@ def usage (exit_code=0):
     print """Usage: dak process-new [OPTION]... [CHANGES]...
   -a, --automatic           automatic run
   -h, --help                show this help and exit.
+  -C, --comments-dir=DIR    use DIR as comments-dir, for [o-]p-u-new
   -m, --manual-reject=MSG   manual reject with `msg'
   -n, --no-action           don't do anything
   -V, --version             display the version number and exit"""
@@ -700,7 +706,7 @@ def init():
 
     Arguments = [('a',"automatic","Process-New::Options::Automatic"),
                  ('h',"help","Process-New::Options::Help"),
-                ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
+                 ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
                  ('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
                  ('n',"no-action","Process-New::Options::No-Action")]
 
@@ -777,28 +783,44 @@ def do_byhand():
 
 ################################################################################
 
+def get_accept_lock():
+    retry = 0
+    while retry < 10:
+        try:
+            lock_fd = os.open(Cnf["Process-New::AcceptedLockFile"], os.O_RDONLY | os.O_CREAT | os.O_EXCL)
+            retry = 10
+        except OSError, e:
+            if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EEXIST':
+                retry += 1
+                if (retry >= 10):
+                    daklib.utils.fubar("Couldn't obtain lock; assuming 'dak process-unchecked' is already running.")
+                else:
+                    print("Unable to get accepted lock (try %d of 10)" % retry)
+                time.sleep(60)
+            else:
+                raise
+
+def move_to_dir (dest, perms=0660, changesperms=0664):
+    daklib.utils.move (Upload.pkg.changes_file, dest, perms=changesperms)
+    file_keys = Upload.pkg.files.keys()
+    for file in file_keys:
+        daklib.utils.move (file, dest, perms=perms)
+
 def do_accept():
     print "ACCEPT"
     if not Options["No-Action"]:
-        retry = 0
-       while retry < 10:
-           try:
-               lock_fd = os.open(Cnf["Process-New::AcceptedLockFile"], os.O_RDONLY | os.O_CREAT | os.O_EXCL)
-                retry = 10
-           except OSError, e:
-               if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EEXIST':
-                   retry += 1
-                   if (retry >= 10):
-                       daklib.utils.fubar("Couldn't obtain lock; assuming 'dak process-unchecked' is already running.")
-                   else:
-                       print("Unable to get accepted lock (try %d of 10)" % retry)
-                   time.sleep(60)
-               else:
-                   raise
+        get_accept_lock()
         (summary, short_summary) = Upload.build_summaries()
+    if Cnf.FindB("Dinstall::SecurityQueueHandling"):
+        Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
+        move_to_dir(Cnf["Dir::Queue::Embargoed"])
+        Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
+        # Check for override disparities
+        Upload.Subst["__SUMMARY__"] = summary
+    else:
         Upload.accept(summary, short_summary)
         os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-       os.unlink(Cnf["Process-New::AcceptedLockFile"])
+    os.unlink(Cnf["Process-New::AcceptedLockFile"])
 
 def check_status(files):
     new = byhand = 0
@@ -854,16 +876,16 @@ def do_comments(dir, opref, npref, line, fn):
         if len(lines) == 0 or lines[0] != line + "\n": continue
         changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
                                 and x.endswith(".changes") ]
-       changes_files = sort_changes(changes_files)
+        changes_files = sort_changes(changes_files)
         for f in changes_files:
-                f = daklib.utils.validate_changes_file_arg(f, 0)
-                if not f: continue
-                print "\n" + f
-                fn(f, "".join(lines[1:]))
+            f = daklib.utils.validate_changes_file_arg(f, 0)
+            if not f: continue
+            print "\n" + f
+            fn(f, "".join(lines[1:]))
 
         if opref != npref and not Options["No-Action"]:
-                newcomm = npref + comm[len(opref):]
-                os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
+            newcomm = npref + comm[len(opref):]
+            os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
 
 ################################################################################
 
@@ -917,11 +939,11 @@ def main():
 
     commentsdir = Cnf.get("Process-New::Options::Comments-Dir","")
     if commentsdir:
-       if changes_files != []:
-               sys.stderr.write("Can't specify any changes files if working with comments-dir")
-               sys.exit(1)
-       do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-       do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
+        if changes_files != []:
+            sys.stderr.write("Can't specify any changes files if working with comments-dir")
+            sys.exit(1)
+        do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
+        do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
     else:
         for changes_file in changes_files:
             changes_file = daklib.utils.validate_changes_file_arg(changes_file, 0)
old mode 100755 (executable)
new mode 100644 (file)
index 04038a2..abf53fd
@@ -32,7 +32,7 @@ import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceb
 import apt_inst, apt_pkg
 import daklib.database
 import daklib.logging
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 from types import *
@@ -206,7 +206,7 @@ def check_changes():
 
     # Strip a source version in brackets from the source field
     if re_strip_srcver.search(changes["source"]):
-       changes["source"] = re_strip_srcver.sub('', changes["source"])
+        changes["source"] = re_strip_srcver.sub('', changes["source"])
 
     # Ensure the source field is a valid package name.
     if not re_valid_pkg_name.match(changes["source"]):
@@ -238,7 +238,7 @@ def check_changes():
     except daklib.utils.ParseMaintError, msg:
         (changes["changedby822"], changes["changedby2047"],
          changes["changedbyname"], changes["changedbyemail"]) = \
-        ("", "", "", "")
+         ("", "", "", "")
         reject("%s: Changed-By field ('%s') failed to parse: %s" \
                % (filename, changes["changed-by"], msg))
 
@@ -389,7 +389,7 @@ def check_files():
     for file in file_keys:
         # Ensure the file does not already exist in one of the accepted directories
         for dir in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
-           if not Cnf.has_key("Dir::Queue::%s" % (dir)): continue
+            if not Cnf.has_key("Dir::Queue::%s" % (dir)): continue
             if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+file):
                 reject("%s file already exists in the %s directory." % (file, dir))
         if not daklib.utils.re_taint_free.match(file):
@@ -536,13 +536,13 @@ def check_files():
                     elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
                         files[file]["new"] = 1
                     else:
-                       dsc_file_exists = 0
+                        dsc_file_exists = 0
                         for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
-                           if Cnf.has_key("Dir::Queue::%s" % (myq)):
-                               if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
-                                   dsc_file_exists = 1
-                                   break
-                       if not dsc_file_exists:
+                            if Cnf.has_key("Dir::Queue::%s" % (myq)):
+                                if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
+                                    dsc_file_exists = 1
+                                    break
+                        if not dsc_file_exists:
                             reject("no source found for %s %s (%s)." % (source_package, source_version, file))
             # Check the version and for file overwrites
             reject(Upload.check_binary_against_db(file),"")
@@ -754,7 +754,7 @@ def check_dsc():
         m = daklib.utils.re_issource.match(f)
         if not m:
             reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
-           continue
+            continue
         type = m.group(3)
         if type == "orig.tar.gz" or type == "tar.gz":
             has_tar = 1
@@ -856,7 +856,7 @@ def get_changelog_versions(source_dir):
 
 def check_source():
     # Bail out if:
-    #    a) there's no source 
+    #    a) there's no source
     # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
     # or c) the orig.tar.gz is MIA
     if not changes["architecture"].has_key("source") or reprocess == 2 \
@@ -931,30 +931,34 @@ def check_hashes ():
 
     for x in changes:
         if x.startswith("checksum-"):
-           h = x.split("-",1)[1] 
-           if h not in dict(hashes):
-               reject("Unsupported checksum field in .changes" % (h))
+            h = x.split("-",1)[1]
+            if h not in dict(hashes):
+                reject("Unsupported checksum field in .changes" % (h))
 
     for x in dsc:
         if x.startswith("checksum-"):
-           h = x.split("-",1)[1] 
-           if h not in dict(hashes):
-               reject("Unsupported checksum field in .dsc" % (h))
+            h = x.split("-",1)[1]
+            if h not in dict(hashes):
+                reject("Unsupported checksum field in .dsc" % (h))
 
     for h,f in hashes:
         try:
             fs = daklib.utils.build_file_list(changes, 0, "checksums-%s" % h, h)
             check_hash(".changes %s" % (h), fs, h, f, files)
-       except daklib.utils.no_files_exc:
-           reject("No Checksums-%s: field in .changes file" % (h))
+        except daklib.utils.no_files_exc:
+            reject("No Checksums-%s: field in .changes" % (h))
+        except daklib.utils.changes_parse_error_exc, line:
+            reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
 
         if "source" not in changes["architecture"]: continue
 
         try:
             fs = daklib.utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
             check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
-       except daklib.utils.no_files_exc:
-           reject("No Checksums-%s: field in .changes file" % (h))
+        except daklib.utils.no_files_exc:
+            reject("No Checksums-%s: field in .dsc" % (h))
+        except daklib.utils.changes_parse_error_exc, line:
+            reject("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
 
 ################################################################################
 
@@ -1069,8 +1073,8 @@ def check_signed_by_key():
     if uid == None:
         uid, uid_email = changes["fingerprint"], uid
         may_nmu, may_sponsor = 1, 1
-       # XXX by default new dds don't have a fingerprint/uid in the db atm,
-       #     and can't get one in there if we don't allow nmu/sponsorship
+        # XXX by default new dds don't have a fingerprint/uid in the db atm,
+        #     and can't get one in there if we don't allow nmu/sponsorship
     elif uid[:3] == "dm:":
         uid_email = uid[3:]
         may_nmu, may_sponsor = 0, 0
@@ -1092,13 +1096,13 @@ def check_signed_by_key():
                 changes["changedbyemail"] not in sponsor_addresses):
                 changes["sponsoremail"] = uid_email
 
-    if sponsored and not may_sponsor: 
+    if sponsored and not may_sponsor:
         reject("%s is not authorised to sponsor uploads" % (uid))
 
     if not sponsored and not may_nmu:
         source_ids = []
-       check_suites = changes["distribution"].keys()
-       if "unstable" not in check_suites: check_suites.append("unstable")
+        check_suites = changes["distribution"].keys()
+        if "unstable" not in check_suites: check_suites.append("unstable")
         for suite in check_suites:
             suite_id = daklib.database.get_suite_id(suite)
             q = Upload.projectB.query("SELECT s.id FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND sa.suite = %d" % (changes["source"], suite_id))
@@ -1122,13 +1126,13 @@ def check_signed_by_key():
         for b in changes["binary"].keys():
             for suite in changes["distribution"].keys():
                 suite_id = daklib.database.get_suite_id(suite)
-               q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id))
-               for s in q.getresult():
+                q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id))
+                for s in q.getresult():
                     if s[0] != changes["source"]:
                         reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
 
         for file in files.keys():
-            if files[file].has_key("byhand"): 
+            if files[file].has_key("byhand"):
                 reject("%s may not upload BYHAND file %s" % (uid, file))
             if files[file].has_key("new"):
                 reject("%s may not upload NEW file %s" % (uid, file))
@@ -1208,10 +1212,10 @@ def action ():
     # q-unapproved hax0ring
     queue_info = {
          "New": { "is": is_new, "process": acknowledge_new },
-        "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
+         "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
          "Byhand" : { "is": is_byhand, "process": do_byhand },
-         "OldStableUpdate" : { "is": is_oldstableupdate, 
-                               "process": do_oldstableupdate },
+         "OldStableUpdate" : { "is": is_oldstableupdate,
+                                "process": do_oldstableupdate },
          "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
          "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
          "Embargo" : { "is": is_embargo, "process": queue_embargo },
@@ -1245,7 +1249,7 @@ def action ():
                 break
         if queue:
             print "%s for %s\n%s%s" % (
-                queue.upper(), ", ".join(changes["distribution"].keys()), 
+                queue.upper(), ", ".join(changes["distribution"].keys()),
                 reject_message, summary),
             queuekey = queue[0].upper()
             if queuekey in "RQSA":
@@ -1304,7 +1308,7 @@ def move_to_dir (dest, perms=0660, changesperms=0664):
 
 def is_unembargo ():
     q = Upload.projectB.query(
-      "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" % 
+      "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" %
       (changes["source"], changes["version"]))
     ql = q.getresult()
     if ql:
@@ -1320,7 +1324,7 @@ def is_unembargo ():
             if Options["No-Action"]: return 1
 
             Upload.projectB.query(
-              "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" % 
+              "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" %
               (changes["source"], changes["version"]))
             return 1
 
@@ -1360,12 +1364,12 @@ def queue_embargo (summary, short_summary):
 
 def is_stableupdate ():
     if not changes["distribution"].has_key("proposed-updates"):
-       return 0
+        return 0
 
     if not changes["architecture"].has_key("source"):
         pusuite = daklib.database.get_suite_id("proposed-updates")
         q = Upload.projectB.query(
-          "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" % 
+          "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
           (changes["source"], changes["version"], pusuite))
         ql = q.getresult()
         if ql:
@@ -1389,12 +1393,12 @@ def do_stableupdate (summary, short_summary):
 
 def is_oldstableupdate ():
     if not changes["distribution"].has_key("oldstable-proposed-updates"):
-       return 0
+        return 0
 
     if not changes["architecture"].has_key("source"):
         pusuite = daklib.database.get_suite_id("oldstable-proposed-updates")
         q = Upload.projectB.query(
-          "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" % 
+          "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
           (changes["source"], changes["version"], pusuite))
         ql = q.getresult()
         if ql:
@@ -1421,35 +1425,35 @@ def is_autobyhand ():
     any_auto = 0
     for file in files.keys():
         if files[file].has_key("byhand"):
-           any_auto = 1
-
-           # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
-           # don't contain underscores, and ARCH doesn't contain dots.
-           # further VER matches the .changes Version:, and ARCH should be in
-           # the .changes Architecture: list.
-           if file.count("_") < 2:
-               all_auto = 0
-               continue
-       
-           (pkg, ver, archext) = file.split("_", 2)
-           if archext.count(".") < 1 or changes["version"] != ver:
-               all_auto = 0
-               continue
-
-           ABH = Cnf.SubTree("AutomaticByHandPackages")
-           if not ABH.has_key(pkg) or \
-             ABH["%s::Source" % (pkg)] != changes["source"]:
-               print "not match %s %s" % (pkg, changes["source"])
-               all_auto = 0
-               continue
-
-           (arch, ext) = archext.split(".", 1)
-           if arch not in changes["architecture"]:
-               all_auto = 0
-               continue
-
-           files[file]["byhand-arch"] = arch
-           files[file]["byhand-script"] = ABH["%s::Script" % (pkg)]
+            any_auto = 1
+
+            # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
+            # don't contain underscores, and ARCH doesn't contain dots.
+            # further VER matches the .changes Version:, and ARCH should be in
+            # the .changes Architecture: list.
+            if file.count("_") < 2:
+                all_auto = 0
+                continue
+
+            (pkg, ver, archext) = file.split("_", 2)
+            if archext.count(".") < 1 or changes["version"] != ver:
+                all_auto = 0
+                continue
+
+            ABH = Cnf.SubTree("AutomaticByHandPackages")
+            if not ABH.has_key(pkg) or \
+              ABH["%s::Source" % (pkg)] != changes["source"]:
+                print "not match %s %s" % (pkg, changes["source"])
+                all_auto = 0
+                continue
+
+            (arch, ext) = archext.split(".", 1)
+            if arch not in changes["architecture"]:
+                all_auto = 0
+                continue
+
+            files[file]["byhand-arch"] = arch
+            files[file]["byhand-script"] = ABH["%s::Script" % (pkg)]
 
     return any_auto and all_auto
 
@@ -1466,7 +1470,7 @@ def do_autobyhand (summary, short_summary):
 
         os.system("ls -l %s" % byhandfile)
         result = os.system("%s %s %s %s %s" % (
-                files[file]["byhand-script"], byhandfile, 
+                files[file]["byhand-script"], byhandfile,
                 changes["version"], files[file]["byhand-arch"],
                 os.path.abspath(pkg.changes_file)))
         if result == 0:
@@ -1584,7 +1588,7 @@ def process_it (changes_file):
         raise
     except:
         print "ERROR"
-       traceback.print_exc(file=sys.stderr)
+        traceback.print_exc(file=sys.stderr)
         pass
 
     # Restore previous WD
@@ -1664,4 +1668,3 @@ def main():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index ff3e082..af3bc58
@@ -36,7 +36,7 @@
 
 import copy, glob, os, stat, sys, time
 import apt_pkg
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 Cnf = None
@@ -61,7 +61,7 @@ Prints a report of packages in queue directories (usually new and byhand).
                    nf=notes, first           nl=notes, last
 
      Age Keys: m=minutes, h=hours, d=days, w=weeks, o=months, y=years
-     
+
 """
     sys.exit(exit_code)
 
@@ -119,77 +119,77 @@ def sg_compare (a, b):
 ############################################################
 
 def sortfunc(a,b):
-     for sorting in direction:
-         (sortkey, way, time) = sorting
-         ret = 0
-         if time == "m":
-             x=int(a[sortkey]/60)
-             y=int(b[sortkey]/60)
-         elif time == "h":
-             x=int(a[sortkey]/3600)
-             y=int(b[sortkey]/3600)
-         elif time == "d":
-             x=int(a[sortkey]/86400)
-             y=int(b[sortkey]/86400)
-         elif time == "w":
-             x=int(a[sortkey]/604800)
-             y=int(b[sortkey]/604800)
-         elif time == "o":
-             x=int(a[sortkey]/2419200)
-             y=int(b[sortkey]/2419200)
-         elif time == "y":
-             x=int(a[sortkey]/29030400)
-             y=int(b[sortkey]/29030400)
-         else:
-             x=a[sortkey]
-             y=b[sortkey]
-         if x < y:
-             ret = -1
-         elif x > y:
-             ret = 1
-         if ret != 0:
-             if way < 0:
-                 ret = ret*-1
-             return ret
-     return 0
+    for sorting in direction:
+        (sortkey, way, time) = sorting
+        ret = 0
+        if time == "m":
+            x=int(a[sortkey]/60)
+            y=int(b[sortkey]/60)
+        elif time == "h":
+            x=int(a[sortkey]/3600)
+            y=int(b[sortkey]/3600)
+        elif time == "d":
+            x=int(a[sortkey]/86400)
+            y=int(b[sortkey]/86400)
+        elif time == "w":
+            x=int(a[sortkey]/604800)
+            y=int(b[sortkey]/604800)
+        elif time == "o":
+            x=int(a[sortkey]/2419200)
+            y=int(b[sortkey]/2419200)
+        elif time == "y":
+            x=int(a[sortkey]/29030400)
+            y=int(b[sortkey]/29030400)
+        else:
+            x=a[sortkey]
+            y=b[sortkey]
+        if x < y:
+            ret = -1
+        elif x > y:
+            ret = 1
+        if ret != 0:
+            if way < 0:
+                ret = ret*-1
+            return ret
+    return 0
 
 ############################################################
 
 def header():
     print """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
-       <html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8">
-       <title>Debian NEW and BYHAND Packages</title>
-       <link type="text/css" rel="stylesheet" href="style.css">
-       <link rel="shortcut icon" href="http://www.debian.org/favicon.ico">
-       </head>
-       <body>
-       <div align="center">
-       <a href="http://www.debian.org/">
+        <html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+        <title>Debian NEW and BYHAND Packages</title>
+        <link type="text/css" rel="stylesheet" href="style.css">
+        <link rel="shortcut icon" href="http://www.debian.org/favicon.ico">
+        </head>
+        <body>
+        <div align="center">
+        <a href="http://www.debian.org/">
      <img src="http://www.debian.org/logos/openlogo-nd-50.png" border="0" hspace="0" vspace="0" alt=""></a>
-       <a href="http://www.debian.org/">
+        <a href="http://www.debian.org/">
      <img src="http://www.debian.org/Pics/debian.png" border="0" hspace="0" vspace="0" alt="Debian Project"></a>
-       </div>
-       <br />
-       <table class="reddy" width="100%">
-       <tr>
-       <td class="reddy">
+        </div>
+        <br />
+        <table class="reddy" width="100%">
+        <tr>
+        <td class="reddy">
     <img src="http://www.debian.org/Pics/red-upperleft.png" align="left" border="0" hspace="0" vspace="0"
      alt="" width="15" height="16"></td>
-       <td rowspan="2" class="reddy">Debian NEW and BYHAND Packages</td>
-       <td class="reddy">
+        <td rowspan="2" class="reddy">Debian NEW and BYHAND Packages</td>
+        <td class="reddy">
     <img src="http://www.debian.org/Pics/red-upperright.png" align="right" border="0" hspace="0" vspace="0"
      alt="" width="16" height="16"></td>
-       </tr>
-       <tr>
-       <td class="reddy">
+        </tr>
+        <tr>
+        <td class="reddy">
     <img src="http://www.debian.org/Pics/red-lowerleft.png" align="left" border="0" hspace="0" vspace="0"
      alt="" width="16" height="16"></td>
-       <td class="reddy">
+        <td class="reddy">
     <img src="http://www.debian.org/Pics/red-lowerright.png" align="right" border="0" hspace="0" vspace="0"
      alt="" width="15" height="16"></td>
-       </tr>
-       </table>
-       """
+        </tr>
+        </table>
+        """
 
 def footer():
     print "<p class=\"validate\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
@@ -197,7 +197,7 @@ def footer():
     print "<p>You may want to look at <a href=\"http://ftp-master.debian.org/REJECT-FAQ.html\">the REJECT-FAQ</a> for possible reasons why one of the above packages may get rejected.</p>"
     print """<a href="http://validator.w3.org/check?uri=referer">
     <img border="0" src="http://www.w3.org/Icons/valid-html401" alt="Valid HTML 4.01!" height="31" width="88"></a>
-       <a href="http://jigsaw.w3.org/css-validator/check/referer">
+        <a href="http://jigsaw.w3.org/css-validator/check/referer">
     <img border="0" src="http://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!"
      height="31" width="88"></a>
     """
@@ -206,16 +206,16 @@ def footer():
 def table_header(type):
     print "<h1>Summary for: %s</h1>" % (type)
     print """<center><table border="0">
-       <tr>
-         <th align="center">Package</th>
-         <th align="center">Version</th>
-         <th align="center">Arch</th>
-         <th align="center">Distribution</th>
-         <th align="center">Age</th>
-         <th align="center">Maintainer</th>
-         <th align="center">Closes</th>
-       </tr>
-       """
+        <tr>
+          <th align="center">Package</th>
+          <th align="center">Version</th>
+          <th align="center">Arch</th>
+          <th align="center">Distribution</th>
+          <th align="center">Age</th>
+          <th align="center">Maintainer</th>
+          <th align="center">Closes</th>
+        </tr>
+        """
 
 def table_footer(type, source_count, total_count):
     print "</table></center><br>\n"
@@ -252,7 +252,7 @@ def table_row(source, version, arch, last_mod, maint, distribution, closes):
         print "<a href=\"http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br>" % (close, close)
     print "</td></tr>"
     row_number+=1
-    
+
 ############################################################
 
 def process_changes_files(changes_files, type):
@@ -359,26 +359,26 @@ def process_changes_files(changes_files, type):
     # If we produce html we always have oldest first.
         direction.append([4,-1,"ao"])
     else:
-               if Cnf.has_key("Queue-Report::Options::Sort"):
-                       for i in Cnf["Queue-Report::Options::Sort"].split(","):
-                         if i == "ao":
-                                 # Age, oldest first.
-                                 direction.append([4,-1,age])
-                         elif i == "an":
-                                 # Age, newest first.
-                                 direction.append([4,1,age])
-                         elif i == "na":
-                                 # Name, Ascending.
-                                 direction.append([0,1,0])
-                         elif i == "nd":
-                                 # Name, Descending.
-                                 direction.append([0,-1,0])
-                         elif i == "nl":
-                                 # Notes last.
-                                 direction.append([3,1,0])
-                         elif i == "nf":
-                                 # Notes first.
-                                 direction.append([3,-1,0])
+        if Cnf.has_key("Queue-Report::Options::Sort"):
+            for i in Cnf["Queue-Report::Options::Sort"].split(","):
+                if i == "ao":
+                    # Age, oldest first.
+                    direction.append([4,-1,age])
+                elif i == "an":
+                    # Age, newest first.
+                    direction.append([4,1,age])
+                elif i == "na":
+                    # Name, Ascending.
+                    direction.append([0,1,0])
+                elif i == "nd":
+                    # Name, Descending.
+                    direction.append([0,-1,0])
+                elif i == "nl":
+                    # Notes last.
+                    direction.append([3,1,0])
+                elif i == "nf":
+                    # Notes first.
+                    direction.append([3,-1,0])
     entries.sort(lambda x, y: sortfunc(x, y))
     # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
     # at the moment to come up with a real good sorting function that considers all the sidesteps you
@@ -429,14 +429,14 @@ def main():
                  ('s',"sort","Queue-Report::Options::Sort", "HasArg"),
                  ('a',"age","Queue-Report::Options::Age", "HasArg")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
-           Cnf["Queue-Report::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
+            Cnf["Queue-Report::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Queue-Report::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     Upload = daklib.queue.Upload(Cnf)
 
old mode 100755 (executable)
new mode 100644 (file)
index d78fe59..d04dbeb
@@ -23,7 +23,7 @@ import os, pg, sys
 import apt_pkg
 import daklib.database
 import daklib.logging
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 ################################################################################
@@ -56,14 +56,14 @@ def main():
                  ('m',"manual-reject","Reject-Proposed-Updates::Options::Manual-Reject", "HasArg"),
                  ('s',"no-mail", "Reject-Proposed-Updates::Options::No-Mail")]
     for i in [ "help", "manual-reject", "no-mail" ]:
-       if not Cnf.has_key("Reject-Proposed-Updates::Options::%s" % (i)):
-           Cnf["Reject-Proposed-Updates::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Reject-Proposed-Updates::Options::%s" % (i)):
+            Cnf["Reject-Proposed-Updates::Options::%s" % (i)] = ""
 
     arguments = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Reject-Proposed-Updates::Options")
     if Options["Help"]:
-       usage()
+        usage()
     if not arguments:
         daklib.utils.fubar("need at least one .changes filename as an argument.")
 
@@ -82,7 +82,7 @@ def main():
     for arg in arguments:
         arg = daklib.utils.validate_changes_file_arg(arg)
         Upload.pkg.changes_file = arg
-       Upload.init_vars()
+        Upload.init_vars()
         cwd = os.getcwd()
         os.chdir(Cnf["Suite::Proposed-Updates::CopyDotDak"])
         Upload.update_vars()
@@ -159,7 +159,7 @@ def reject (reject_message = ""):
     # If we fail here someone is probably trying to exploit the race
     # so let's just raise an exception ...
     if os.path.exists(reject_filename):
-         os.unlink(reject_filename)
+        os.unlink(reject_filename)
     reject_fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
 
     # Build up the rejection email
old mode 100755 (executable)
new mode 100644 (file)
index 37cdad2..262039b
--- a/dak/rm.py
+++ b/dak/rm.py
@@ -158,7 +158,7 @@ def reverse_depends_check(removals, suites):
                 unsat = 0
                 for dep_package, _, _ in dep:
                     if dep_package in removals:
-                            unsat += 1
+                        unsat += 1
                 if unsat == len(dep):
                     component = p2c[package]
                     if component != "main":
@@ -196,7 +196,7 @@ def reverse_depends_check(removals, suites):
                 unsat = 0
                 for dep_package, _, _ in dep:
                     if dep_package in removals:
-                            unsat += 1
+                        unsat += 1
                 if unsat == len(dep):
                     if component != "main":
                         source = "%s/%s" % (source, component)
@@ -214,7 +214,7 @@ def reverse_depends_check(removals, suites):
     else:
         print "No dependency problem found."
     print
-    
+
 ################################################################################
 
 def main ():
@@ -239,16 +239,16 @@ def main ():
     for i in [ "architecture", "binary-only", "carbon-copy", "component",
                "done", "help", "no-action", "partial", "rdep-check", "reason",
                "source-only" ]:
-       if not Cnf.has_key("Rm::Options::%s" % (i)):
-           Cnf["Rm::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Rm::Options::%s" % (i)):
+            Cnf["Rm::Options::%s" % (i)] = ""
     if not Cnf.has_key("Rm::Options::Suite"):
-       Cnf["Rm::Options::Suite"] = "unstable"
+        Cnf["Rm::Options::Suite"] = "unstable"
 
     arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Rm::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
     daklib.database.init(Cnf, projectB)
@@ -534,4 +534,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index f686404..bf54915
@@ -32,7 +32,7 @@
 
 import commands, os, pwd, re, sys, time
 import apt_pkg
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 ################################################################################
@@ -73,7 +73,7 @@ def do_upload(changes_files):
         components = {}
         upload_uris = {}
         file_list = []
-       Upload.init_vars()
+        Upload.init_vars()
         # Parse the .dak file for the .changes file
         Upload.pkg.changes_file = changes_file
         Upload.update_vars()
@@ -154,49 +154,49 @@ def make_advisory(advisory_nr, changes_files):
 
     for arg in changes_files:
         arg = daklib.utils.validate_changes_file_arg(arg)
-       Upload.pkg.changes_file = arg
-       Upload.init_vars()
-       Upload.update_vars()
+        Upload.pkg.changes_file = arg
+        Upload.init_vars()
+        Upload.update_vars()
 
-       src = Upload.pkg.changes["source"]
-       if src not in adv_packages:
-           adv_packages += [src]
+        src = Upload.pkg.changes["source"]
+        if src not in adv_packages:
+            adv_packages += [src]
 
-       suites = Upload.pkg.changes["distribution"].keys()
-       for suite in suites:
-           if not updated_pkgs.has_key(suite):
+        suites = Upload.pkg.changes["distribution"].keys()
+        for suite in suites:
+            if not updated_pkgs.has_key(suite):
                 updated_pkgs[suite] = {}
 
-       files = Upload.pkg.files
-       for file in files.keys():
-           arch = files[file]["architecture"]
-           md5 = files[file]["md5sum"]
-           size = files[file]["size"]
-           poolname = Cnf["Dir::PoolRoot"] + \
-               daklib.utils.poolify(src, files[file]["component"])
-           if arch == "source" and file.endswith(".dsc"):
-               dscpoolname = poolname
-           for suite in suites:
-               if not updated_pkgs[suite].has_key(arch):
-                   updated_pkgs[suite][arch] = {}
-               updated_pkgs[suite][arch][file] = {
+        files = Upload.pkg.files
+        for file in files.keys():
+            arch = files[file]["architecture"]
+            md5 = files[file]["md5sum"]
+            size = files[file]["size"]
+            poolname = Cnf["Dir::PoolRoot"] + \
+                daklib.utils.poolify(src, files[file]["component"])
+            if arch == "source" and file.endswith(".dsc"):
+                dscpoolname = poolname
+            for suite in suites:
+                if not updated_pkgs[suite].has_key(arch):
+                    updated_pkgs[suite][arch] = {}
+                updated_pkgs[suite][arch][file] = {
                     "md5": md5, "size": size,
                     "poolname": poolname }
 
-       dsc_files = Upload.pkg.dsc_files
-       for file in dsc_files.keys():
-           arch = "source"
-           if not dsc_files[file].has_key("files id"):
+        dsc_files = Upload.pkg.dsc_files
+        for file in dsc_files.keys():
+            arch = "source"
+            if not dsc_files[file].has_key("files id"):
                 continue
 
-           # otherwise, it's already in the pool and needs to be
-           # listed specially
-           md5 = dsc_files[file]["md5sum"]
-           size = dsc_files[file]["size"]
-           for suite in suites:
-               if not updated_pkgs[suite].has_key(arch):
-                   updated_pkgs[suite][arch] = {}
-               updated_pkgs[suite][arch][file] = {
+            # otherwise, it's already in the pool and needs to be
+            # listed specially
+            md5 = dsc_files[file]["md5sum"]
+            size = dsc_files[file]["size"]
+            for suite in suites:
+                if not updated_pkgs[suite].has_key(arch):
+                    updated_pkgs[suite][arch] = {}
+                updated_pkgs[suite][arch][file] = {
                     "md5": md5, "size": size,
                     "poolname": dscpoolname }
 
@@ -208,10 +208,10 @@ def make_advisory(advisory_nr, changes_files):
     username = whoamifull[4].split(",")[0]
 
     Subst = {
-       "__ADVISORY__": advisory_nr,
-       "__WHOAMI__": username,
-       "__DATE__": time.strftime("%B %d, %Y", time.gmtime(time.time())),
-       "__PACKAGE__": ", ".join(adv_packages),
+        "__ADVISORY__": advisory_nr,
+        "__WHOAMI__": username,
+        "__DATE__": time.strftime("%B %d, %Y", time.gmtime(time.time())),
+        "__PACKAGE__": ", ".join(adv_packages),
         "__DAK_ADDRESS__": Cnf["Dinstall::MyEmailAddress"]
         }
 
@@ -225,35 +225,35 @@ def make_advisory(advisory_nr, changes_files):
                                        Cnf["Suite::%s::Version" % suite], suite)
         adv += "%s\n%s\n\n" % (suite_header, "-"*len(suite_header))
 
-       arches = Cnf.ValueList("Suite::%s::Architectures" % suite)
-       if "source" in arches:
+        arches = Cnf.ValueList("Suite::%s::Architectures" % suite)
+        if "source" in arches:
             arches.remove("source")
-       if "all" in arches:
+        if "all" in arches:
             arches.remove("all")
-       arches.sort()
+        arches.sort()
 
-       adv += "  %s was released for %s.\n\n" % (
-               suite.capitalize(), daklib.utils.join_with_commas_and(arches))
+        adv += "  %s was released for %s.\n\n" % (
+                suite.capitalize(), daklib.utils.join_with_commas_and(arches))
 
-       for a in ["source", "all"] + arches:
-           if not updated_pkgs[suite].has_key(a):
+        for a in ["source", "all"] + arches:
+            if not updated_pkgs[suite].has_key(a):
                 continue
 
-           if a == "source":
-               adv += "  Source archives:\n\n"
-           elif a == "all":
-               adv += "  Architecture independent packages:\n\n"
-           else:
-               adv += "  %s architecture (%s)\n\n" % (a,
-                       Cnf["Architectures::%s" % a])
-
-           for file in updated_pkgs[suite][a].keys():
-               adv += "    http://%s/%s%s\n" % (
-                               archive, updated_pkgs[suite][a][file]["poolname"], file)
-               adv += "      Size/MD5 checksum: %8s %s\n" % (
-                       updated_pkgs[suite][a][file]["size"],
-                       updated_pkgs[suite][a][file]["md5"])
-           adv += "\n"
+            if a == "source":
+                adv += "  Source archives:\n\n"
+            elif a == "all":
+                adv += "  Architecture independent packages:\n\n"
+            else:
+                adv += "  %s architecture (%s)\n\n" % (a,
+                        Cnf["Architectures::%s" % a])
+
+            for file in updated_pkgs[suite][a].keys():
+                adv += "    http://%s/%s%s\n" % (
+                                archive, updated_pkgs[suite][a][file]["poolname"], file)
+                adv += "      Size/MD5 checksum: %8s %s\n" % (
+                        updated_pkgs[suite][a][file]["size"],
+                        updated_pkgs[suite][a][file]["md5"])
+            adv += "\n"
     adv = adv.rstrip()
 
     Subst["__ADVISORY_TEXT__"] = adv
old mode 100755 (executable)
new mode 100644 (file)
index a35f4d6..34baba9
@@ -29,7 +29,7 @@ import copy, os, sys, time
 import apt_pkg
 import examine_package
 import daklib.database
-import daklib.queue 
+import daklib.queue
 import daklib.utils
 
 # Globals
@@ -113,17 +113,17 @@ def html_header(name, filestoexamine):
       <p><a href="#source-lintian" onclick="show('source-lintian-body')">source lintian</a></p>
       """
     for fn in filter(lambda x: x.endswith('.deb') or x.endswith('.udeb'),filestoexamine):
-      packagename = fn.split('_')[0]
-      print """
-      <p class="subtitle">%(pkg)s</p>
-      <p><a href="#binary-%(pkg)s-control" onclick="show('binary-%(pkg)s-control-body')">control file</a></p>
-      <p><a href="#binary-%(pkg)s-lintian" onclick="show('binary-%(pkg)s-lintian-body')">binary lintian</a></p>
-      <p><a href="#binary-%(pkg)s-contents" onclick="show('binary-%(pkg)s-contents-body')">.deb contents</a></p>
-      <p><a href="#binary-%(pkg)s-copyright" onclick="show('binary-%(pkg)s-copyright-body')">copyright</a></p>
-      <p><a href="#binary-%(pkg)s-file-listing" onclick="show('binary-%(pkg)s-file-listing-body')">file listing</a></p>
-      """%{"pkg":packagename}
+        packagename = fn.split('_')[0]
+        print """
+        <p class="subtitle">%(pkg)s</p>
+        <p><a href="#binary-%(pkg)s-control" onclick="show('binary-%(pkg)s-control-body')">control file</a></p>
+        <p><a href="#binary-%(pkg)s-lintian" onclick="show('binary-%(pkg)s-lintian-body')">binary lintian</a></p>
+        <p><a href="#binary-%(pkg)s-contents" onclick="show('binary-%(pkg)s-contents-body')">.deb contents</a></p>
+        <p><a href="#binary-%(pkg)s-copyright" onclick="show('binary-%(pkg)s-copyright-body')">copyright</a></p>
+        <p><a href="#binary-%(pkg)s-file-listing" onclick="show('binary-%(pkg)s-file-listing-body')">file listing</a></p>
+        """%{"pkg":packagename}
     print "    </div>"
-   
+
 def html_footer():
     print """    <p class="validate">Timestamp: %s (UTC)</p>"""% (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
     print """    <p><a href="http://validator.w3.org/check?uri=referer">
old mode 100755 (executable)
new mode 100644 (file)
index 0579f04..2fc6c90
@@ -42,7 +42,7 @@ def main():
             dest = dirname + '/' + os.path.basename(filename)
             if os.path.exists(dest):
                 daklib.utils.fubar("%s already exists." % (dest))
-            print "Move: %s -> %s" % (filename, dest) 
+            print "Move: %s -> %s" % (filename, dest)
             os.rename(filename, dest)
             count = count + 1
     print "Moved %d files." % (count)
old mode 100755 (executable)
new mode 100644 (file)
index f7414ed..bf84efe
@@ -216,14 +216,14 @@ def main ():
     Cnf = daklib.utils.get_conf()
     Arguments = [('h',"help","Stats::Options::Help")]
     for i in [ "help" ]:
-       if not Cnf.has_key("Stats::Options::%s" % (i)):
-           Cnf["Stats::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Stats::Options::%s" % (i)):
+            Cnf["Stats::Options::%s" % (i)] = ""
 
     args = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
 
     Options = Cnf.SubTree("Stats::Options")
     if Options["Help"]:
-       usage()
+        usage()
 
     if len(args) < 1:
         daklib.utils.warn("dak stats requires a MODE argument")
@@ -249,4 +249,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 8eb5e3d..6656ab6
@@ -99,8 +99,8 @@ SELECT DISTINCT ON (f.id) c.name, sec.section, l.path, f.filename, f.id
             section=""
         dest = "%sdists/%s/%s/source/%s%s" % (Cnf["Dir::Root"], codename, component, section, os.path.basename(i[3]))
         if not os.path.exists(dest):
-           src = i[2]+i[3]
-           src = daklib.utils.clean_symlink(src, dest, Cnf["Dir::Root"])
+            src = i[2]+i[3]
+            src = daklib.utils.clean_symlink(src, dest, Cnf["Dir::Root"])
             if Cnf.Find("Symlink-Dists::Options::Verbose"):
                 print src+' -> '+dest
             os.symlink(src, dest)
@@ -165,14 +165,14 @@ def main ():
     Arguments = [('h',"help","Symlink-Dists::Options::Help"),
                  ('v',"verbose","Symlink-Dists::Options::Verbose")]
     for i in ["help", "verbose" ]:
-       if not Cnf.has_key("Symlink-Dists::Options::%s" % (i)):
-           Cnf["Symlink-Dists::Options::%s" % (i)] = ""
+        if not Cnf.has_key("Symlink-Dists::Options::%s" % (i)):
+            Cnf["Symlink-Dists::Options::%s" % (i)] = ""
 
     apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
     Options = Cnf.SubTree("Symlink-Dists::Options")
 
     if Options["Help"]:
-       usage()
+        usage()
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
 
@@ -184,4 +184,3 @@ def main ():
 
 if __name__ == '__main__':
     main()
-
old mode 100755 (executable)
new mode 100644 (file)
index 05560c3..8238c20
@@ -30,7 +30,7 @@ import utils
 def fail(message):
     sys.stderr.write("%s\n" % (message))
     sys.exit(1)
-    
+
 ################################################################################
 
 def main ():
old mode 100755 (executable)
new mode 100644 (file)
index e31f7d8..919a70a
@@ -30,7 +30,7 @@ import utils
 def fail(message):
     sys.stderr.write("%s\n" % (message))
     sys.exit(1)
-    
+
 ################################################################################
 
 def main ():
old mode 100755 (executable)
new mode 100644 (file)
index e7cb99e..5636624
@@ -75,7 +75,7 @@ def init():
 
     projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
     daklib.database.init(Cnf, projectB)
-    
+
 ################################################################################
 
 def usage (exit_code=0):
@@ -125,7 +125,7 @@ def load_transitions(trans_file):
     try:
         for test in trans:
             t = trans[test]
-        
+
             # First check if we know all the keys for the transition and if they have
             # the right type (and for the packages also if the list has the right types
             # included, ie. not a list in list, but only str in the list)
@@ -133,7 +133,7 @@ def load_transitions(trans_file):
                 if key not in checkkeys:
                     print "ERROR: Unknown key %s in transition %s" % (key, test)
                     failure = True
-        
+
                 if key == "packages":
                     if type(t[key]) != list:
                         print "ERROR: Unknown type %s for packages in transition %s." % (type(t[key]), test)
@@ -153,7 +153,7 @@ def load_transitions(trans_file):
                         print "ERROR: No packages defined in transition %s" % (test)
                         failure = True
                         continue
-        
+
                 elif type(t[key]) != str:
                     if key == "new" and type(t[key]) == int:
                         # Ok, debian native version
@@ -161,7 +161,7 @@ def load_transitions(trans_file):
                     else:
                         print "ERROR: Unknown type %s for key %s in transition %s" % (type(t[key]), key, test)
                         failure = True
-        
+
             # And now the other way round - are all our keys defined?
             for key in checkkeys:
                 if key not in t:
@@ -213,7 +213,7 @@ def write_transitions(from_trans):
 
     trans_file = Cnf["Dinstall::Reject::ReleaseTransitions"]
     trans_temp = trans_file + ".tmp"
-  
+
     trans_lock = lock_file(trans_file)
     temp_lock  = lock_file(trans_temp)
 
@@ -243,7 +243,7 @@ def write_transitions_from_file(from_file):
         sys.exit(3)
 
     if Options["sudo"]:
-        os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H", 
+        os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
               "/usr/local/bin/dak", "transitions", "--import", from_file)
     else:
         trans = load_transitions(from_file)
@@ -257,7 +257,7 @@ def temp_transitions_file(transitions):
     # NB: file is unlinked by caller, but fd is never actually closed.
     # We need the chmod, as the file is (most possibly) copied from a
     # sudo-ed script and would be unreadable if it has default mkstemp mode
-    
+
     (fd, path) = tempfile.mkstemp("", "transitions", Cnf["Transitions::TempPath"])
     os.chmod(path, 0644)
     f = open(path, "w")
@@ -277,7 +277,7 @@ def edit_transitions():
         if result != 0:
             os.unlink(edit_file)
             daklib.utils.fubar("%s invocation failed for %s, not removing tempfile." % (editor, edit_file))
-    
+
         # Now try to load the new file
         test = load_transitions(edit_file)
 
@@ -292,8 +292,8 @@ def edit_transitions():
             print "------------------------------------------------------------------------"
             transition_info(test)
 
-           prompt = "[S]ave, Edit again, Drop changes?"
-           default = "S"
+            prompt = "[S]ave, Edit again, Drop changes?"
+            default = "S"
 
         answer = "XXX"
         while prompt.find(answer) == -1:
@@ -377,7 +377,7 @@ def check_transitions(transitions):
             print "Committing"
             for remove in to_remove:
                 del transitions[remove]
-    
+
             edit_file = temp_transitions_file(transitions)
             write_transitions_from_file(edit_file)
 
@@ -389,14 +389,14 @@ def check_transitions(transitions):
 ################################################################################
 
 def print_info(trans, source, expected, rm, reason, packages):
-        print """Looking at transition: %s
- Source:      %s
- New Version: %s
- Responsible: %s
- Description: %s
- Blocked Packages (total: %d): %s
+    print """Looking at transition: %s
+Source:      %s
+New Version: %s
+Responsible: %s
+Description: %s
+Blocked Packages (total: %d): %s
 """ % (trans, source, expected, rm, reason, len(packages), ", ".join(packages))
-        return
+    return
 
 ################################################################################
 
@@ -435,7 +435,7 @@ def main():
     #### This can run within sudo !! ####
     #####################################
     init()
-    
+
     # Check if there is a file defined (and existant)
     transpath = Cnf.get("Dinstall::Reject::ReleaseTransitions", "")
     if transpath == "":
@@ -454,7 +454,7 @@ def main():
         daklib.utils.warn("Temporary path %s not found." %
                           (Cnf["Transitions::TempPath"]))
         sys.exit(1)
-   
+
     if Options["import"]:
         try:
             write_transitions_from_file(Options["import"])
@@ -485,7 +485,7 @@ def main():
         transition_info(transitions)
 
     sys.exit(0)
-    
+
 ################################################################################
 
 if __name__ == '__main__':
index d5da89d83eb9c3d755d6ec9935b67c8ef284339c..9befa7bf6c427984968b9ad42c336c1d9ef9ffee 100644 (file)
@@ -37,10 +37,10 @@ def replace_dak_function(module,name):
         myfunc.__doc__ = f.__doc__
         myfunc.__dict__.update(f.__dict__)
 
-       fnname = "%s:%s" % (module, name)
-       if fnname in dak_functions_to_replace:
-           raise Exception, \
-               "%s in %s already marked to be replaced" % (name, module)
+        fnname = "%s:%s" % (module, name)
+        if fnname in dak_functions_to_replace:
+            raise Exception, \
+                "%s in %s already marked to be replaced" % (name, module)
         dak_functions_to_replace["%s:%s" % (module,name)] = myfunc
         return f
     return x
@@ -57,5 +57,3 @@ def init(name, module, userext):
         if len(f) > 0 and m == name:
             dak_replaced_functions[f] = module.__dict__[f]
             module.__dict__[f] = newfunc
-
-
index d74555026c00d874d9a33a170fd67dc2dc7b40a3..f0dcd9c94201435288f3bd37e9e8896b4edc8631 100644 (file)
@@ -39,16 +39,16 @@ class Logger:
         if not os.path.exists(logdir):
             umask = os.umask(00000)
             os.makedirs(logdir, 02775)
-           os.umask(umask)
+            os.umask(umask)
         # Open the logfile
         logfilename = "%s/%s" % (logdir, time.strftime("%Y-%m"))
-       logfile = None
-       if debug:
-           logfile = sys.stderr
-       else:
-           umask = os.umask(00002)
-           logfile = utils.open_file(logfilename, 'a')
-           os.umask(umask)
+        logfile = None
+        if debug:
+            logfile = sys.stderr
+        else:
+            umask = os.umask(00002)
+            logfile = utils.open_file(logfilename, 'a')
+            os.umask(umask)
         self.logfile = logfile
         # Log the start of the program
         user = pwd.getpwuid(os.getuid())[0]
old mode 100755 (executable)
new mode 100644 (file)
index e2c7396..9f8223a
@@ -334,8 +334,8 @@ class Upload:
                 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
                 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
                 summary += file + "\n  to " + destination + "\n"
-               if not files[file].has_key("type"):
-                   files[file]["type"] = "unknown"
+                if not files[file].has_key("type"):
+                    files[file]["type"] = "unknown"
                 if files[file]["type"] in ["deb", "udeb", "dsc"]:
                     # (queue/unchecked), there we have override entries already, use them
                     # (process-new), there we dont have override entries, use the newly generated ones.
@@ -718,26 +718,26 @@ distribution."""
     # (2) Bin-only NMU                     => 1.0-3+b1 , 1.0-3.1+b1
 
     def source_exists (self, package, source_version, suites = ["any"]):
-       okay = 1
-       for suite in suites:
-           if suite == "any":
-               que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
-                   (package)
-           else:
-               # source must exist in suite X, or in some other suite that's
-               # mapped to X, recursively... silent-maps are counted too,
-               # unreleased-maps aren't.
-               maps = self.Cnf.ValueList("SuiteMappings")[:]
-               maps.reverse()
-               maps = [ m.split() for m in maps ]
-               maps = [ (x[1], x[2]) for x in maps
-                               if x[0] == "map" or x[0] == "silent-map" ]
-               s = [suite]
-               for x in maps:
-                       if x[1] in s and x[0] not in s:
-                               s.append(x[0])
-
-               que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
+        okay = 1
+        for suite in suites:
+            if suite == "any":
+                que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
+                    (package)
+            else:
+                # source must exist in suite X, or in some other suite that's
+                # mapped to X, recursively... silent-maps are counted too,
+                # unreleased-maps aren't.
+                maps = self.Cnf.ValueList("SuiteMappings")[:]
+                maps.reverse()
+                maps = [ m.split() for m in maps ]
+                maps = [ (x[1], x[2]) for x in maps
+                                if x[0] == "map" or x[0] == "silent-map" ]
+                s = [suite]
+                for x in maps:
+                    if x[1] in s and x[0] not in s:
+                        s.append(x[0])
+
+                que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
             q = self.projectB.query(que)
 
             # Reduce the query results to a list of version numbers
@@ -754,11 +754,11 @@ distribution."""
 
             # No source found...
             okay = 0
-           break
-       return okay
+            break
+        return okay
 
     ################################################################################
-    
+
     def in_override_p (self, package, component, suite, binary_type, file):
         files = self.pkg.files
 
@@ -842,12 +842,12 @@ distribution."""
                     ch = self.pkg.changes
                     cansave = 0
                     if ch.get('distribution-version', {}).has_key(suite):
-                        # we really use the other suite, ignoring the conflicting one ...
+                    # we really use the other suite, ignoring the conflicting one ...
                         addsuite = ch["distribution-version"][suite]
-                    
+
                         add_version = self.get_anyversion(query_result, addsuite)
                         target_version = self.get_anyversion(query_result, target_suite)
-                    
+
                         if not add_version:
                             # not add_version can only happen if we map to a suite
                             # that doesn't enhance the suite we're propup'ing from.
@@ -878,7 +878,7 @@ distribution."""
                             self.pkg.changes.setdefault("propdistribution", {})
                             self.pkg.changes["propdistribution"][addsuite] = 1
                             cansave = 1
-                
+
                     if not cansave:
                         self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
 
@@ -1033,8 +1033,8 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
 
                     in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
                     # See process_it() in 'dak process-unchecked' for explanation of this
-                   # in_unchecked check dropped by ajt 2007-08-28, how did that
-                   # ever make sense?
+                    # in_unchecked check dropped by ajt 2007-08-28, how did that
+                    # ever make sense?
                     if os.path.exists(in_unchecked) and False:
                         return (self.reject_message, in_unchecked)
                     else:
old mode 100755 (executable)
new mode 100644 (file)
index 0d22bd1..a094788
@@ -46,6 +46,8 @@ re_gpg_uid = re.compile('^uid.*<([^>]*)>')
 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
 
+re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
+
 changes_parse_error_exc = "Can't parse line in .changes file"
 invalid_dsc_format_exc = "Invalid .dsc file"
 nk_format_exc = "Unknown Format: in .changes file"
@@ -84,7 +86,7 @@ class ParseMaintError(Error):
 
 def open_file(filename, mode='r'):
     try:
-       f = open(filename, mode)
+        f = open(filename, mode)
     except IOError:
         raise cant_open_exc, filename
     return f
@@ -149,7 +151,7 @@ The rules for (signing_rules == 1)-mode are:
     lines = changes_in.readlines()
 
     if not lines:
-       raise changes_parse_error_exc, "[Empty changes file]"
+        raise changes_parse_error_exc, "[Empty changes file]"
 
     # Reindex by line number so we can easily verify the format of
     # .dsc files...
@@ -195,7 +197,7 @@ The rules for (signing_rules == 1)-mode are:
         if slf:
             field = slf.groups()[0].lower()
             changes[field] = slf.groups()[1]
-           first = 1
+            first = 1
             continue
         if line == " .":
             changes[field] += '\n'
@@ -207,9 +209,9 @@ The rules for (signing_rules == 1)-mode are:
             if first == 1 and changes[field] != "":
                 changes[field] += '\n'
             first = 0
-           changes[field] += mlf.groups()[0] + '\n'
+            changes[field] += mlf.groups()[0] + '\n'
             continue
-       error += line
+        error += line
 
     if signing_rules == 1 and inside_signature:
         raise invalid_dsc_format_exc, index
@@ -219,14 +221,14 @@ The rules for (signing_rules == 1)-mode are:
 
     if changes.has_key("source"):
         # Strip the source version in brackets from the source field,
-       # put it in the "source-version" field instead.
+        # put it in the "source-version" field instead.
         srcver = re_srchasver.search(changes["source"])
-       if srcver:
+        if srcver:
             changes["source"] = srcver.group(1)
-           changes["source-version"] = srcver.group(2)
+            changes["source-version"] = srcver.group(2)
 
     if error:
-       raise changes_parse_error_exc, error
+        raise changes_parse_error_exc, error
 
     return changes
 
@@ -260,7 +262,7 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
     else:
         if (format < (1,5) or format > (1,8)):
             raise nk_format_exc, "%s" % (changes.get("format","0.0"))
-       if field != "files" and format < (1,8):
+        if field != "files" and format < (1,8):
             raise nk_format_exc, "%s" % (changes.get("format","0.0"))
 
     includes_section = (not is_a_dsc) and field == "files"
@@ -375,46 +377,46 @@ switched to 'email (name)' format."""
 
 # sendmail wrapper, takes _either_ a message string or a file as arguments
 def send_mail (message, filename=""):
-       # If we've been passed a string dump it into a temporary file
-       if message:
-            filename = tempfile.mktemp()
-            fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
-            os.write (fd, message)
-            os.close (fd)
-
-       # Invoke sendmail
-       (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
-       if (result != 0):
-            raise sendmail_failed_exc, output
-
-       # Clean up any temporary files
-       if message:
-            os.unlink (filename)
+        # If we've been passed a string dump it into a temporary file
+    if message:
+        filename = tempfile.mktemp()
+        fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
+        os.write (fd, message)
+        os.close (fd)
+
+    # Invoke sendmail
+    (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
+    if (result != 0):
+        raise sendmail_failed_exc, output
+
+    # Clean up any temporary files
+    if message:
+        os.unlink (filename)
 
 ################################################################################
 
 def poolify (source, component):
     if component:
-       component += '/'
+        component += '/'
     if source[:3] == "lib":
-       return component + source[:4] + '/' + source + '/'
+        return component + source[:4] + '/' + source + '/'
     else:
-       return component + source[:1] + '/' + source + '/'
+        return component + source[:1] + '/' + source + '/'
 
 ################################################################################
 
 def move (src, dest, overwrite = 0, perms = 0664):
     if os.path.exists(dest) and os.path.isdir(dest):
-       dest_dir = dest
+        dest_dir = dest
     else:
-       dest_dir = os.path.dirname(dest)
+        dest_dir = os.path.dirname(dest)
     if not os.path.exists(dest_dir):
-       umask = os.umask(00000)
-       os.makedirs(dest_dir, 02775)
-       os.umask(umask)
+        umask = os.umask(00000)
+        os.makedirs(dest_dir, 02775)
+        os.umask(umask)
     #print "Moving %s to %s..." % (src, dest)
     if os.path.exists(dest) and os.path.isdir(dest):
-       dest += '/' + os.path.basename(src)
+        dest += '/' + os.path.basename(src)
     # Don't overwrite unless forced to
     if os.path.exists(dest):
         if not overwrite:
@@ -428,16 +430,16 @@ def move (src, dest, overwrite = 0, perms = 0664):
 
 def copy (src, dest, overwrite = 0, perms = 0664):
     if os.path.exists(dest) and os.path.isdir(dest):
-       dest_dir = dest
+        dest_dir = dest
     else:
-       dest_dir = os.path.dirname(dest)
+        dest_dir = os.path.dirname(dest)
     if not os.path.exists(dest_dir):
-       umask = os.umask(00000)
-       os.makedirs(dest_dir, 02775)
-       os.umask(umask)
+        umask = os.umask(00000)
+        os.makedirs(dest_dir, 02775)
+        os.umask(umask)
     #print "Copying %s to %s..." % (src, dest)
     if os.path.exists(dest) and os.path.isdir(dest):
-       dest += '/' + os.path.basename(src)
+        dest += '/' + os.path.basename(src)
     # Don't overwrite unless forced to
     if os.path.exists(dest):
         if not overwrite:
@@ -454,23 +456,23 @@ def where_am_i ():
     res = socket.gethostbyaddr(socket.gethostname())
     database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
     if database_hostname:
-       return database_hostname
+        return database_hostname
     else:
         return res[0]
 
 def which_conf_file ():
     res = socket.gethostbyaddr(socket.gethostname())
     if Cnf.get("Config::" + res[0] + "::DakConfig"):
-       return Cnf["Config::" + res[0] + "::DakConfig"]
+        return Cnf["Config::" + res[0] + "::DakConfig"]
     else:
-       return default_config
+        return default_config
 
 def which_apt_conf_file ():
     res = socket.gethostbyaddr(socket.gethostname())
     if Cnf.get("Config::" + res[0] + "::AptConfig"):
-       return Cnf["Config::" + res[0] + "::AptConfig"]
+        return Cnf["Config::" + res[0] + "::AptConfig"]
     else:
-       return default_apt_config
+        return default_apt_config
 
 def which_alias_file():
     hostname = socket.gethostbyaddr(socket.gethostname())[0]
@@ -664,9 +666,9 @@ def real_arch(arch):
 ################################################################################
 
 def join_with_commas_and(list):
-       if len(list) == 0: return "nothing"
-       if len(list) == 1: return list[0]
-       return ", ".join(list[:-1]) + " and " + list[-1]
+    if len(list) == 0: return "nothing"
+    if len(list) == 1: return list[0]
+    return ", ".join(list[:-1]) + " and " + list[-1]
 
 ################################################################################
 
@@ -684,7 +686,7 @@ def pp_deps (deps):
 ################################################################################
 
 def get_conf():
-       return Cnf
+    return Cnf
 
 ################################################################################
 
@@ -927,7 +929,7 @@ on error."""
         return "%s: tainted filename" % (filename)
 
     # Invoke gpgv on the file
-    status_read, status_write = os.pipe(); 
+    status_read, status_write = os.pipe();
     cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
     (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
 
@@ -999,7 +1001,7 @@ used."""
             return None
 
     # Build the command line
-    status_read, status_write = os.pipe(); 
+    status_read, status_write = os.pipe();
     cmd = "gpgv --status-fd %s %s %s %s" % (
         status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
 
@@ -1094,21 +1096,21 @@ used."""
 ################################################################################
 
 def gpg_get_key_addresses(fingerprint):
-  """retreive email addresses from gpg key uids for a given fingerprint"""
-  addresses = key_uid_email_cache.get(fingerprint)
-  if addresses != None:
-      return addresses
-  addresses = set()
-  cmd = "gpg --no-default-keyring %s --fingerprint %s" \
-              % (gpg_keyring_args(), fingerprint)
-  (result, output) = commands.getstatusoutput(cmd)
-  if result == 0:
-    for l in output.split('\n'):
-      m = re_gpg_uid.match(l)
-      if m:
-        addresses.add(m.group(1))
-  key_uid_email_cache[fingerprint] = addresses
-  return addresses
+    """retreive email addresses from gpg key uids for a given fingerprint"""
+    addresses = key_uid_email_cache.get(fingerprint)
+    if addresses != None:
+        return addresses
+    addresses = set()
+    cmd = "gpg --no-default-keyring %s --fingerprint %s" \
+                % (gpg_keyring_args(), fingerprint)
+    (result, output) = commands.getstatusoutput(cmd)
+    if result == 0:
+        for l in output.split('\n'):
+            m = re_gpg_uid.match(l)
+            if m:
+                addresses.add(m.group(1))
+    key_uid_email_cache[fingerprint] = addresses
+    return addresses
 
 ################################################################################
 
@@ -1200,6 +1202,6 @@ Cnf = apt_pkg.newConfiguration()
 apt_pkg.ReadConfigFileISC(Cnf,default_config)
 
 if which_conf_file() != default_config:
-       apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
+    apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
 
 ################################################################################
diff --git a/scripts/debian/import_testing.sh b/scripts/debian/import_testing.sh
new file mode 100755 (executable)
index 0000000..6b5fa6c
--- /dev/null
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+# Copyright (C) 2008 Joerg Jaspert <joerg@debian.org>
+
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+set -e
+
+# Load up some standard variables
+export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
+. $SCRIPTVARS
+
+# What file we look at.
+TESTINGINPUT="/srv/release.debian.org/britney/Heidi/set/current"
+
+# Change to a known safe location
+cd $masterdir
+
+echo "Importing new data for testing into projectb"
+
+# Now load the data
+cat $TESTINGINPUT | dak control-suite --set testing
+
+echo "Done"
+
+exit 0
old mode 100755 (executable)
new mode 100644 (file)
index 7b817b5..570b547
@@ -1,4 +1,5 @@
 #!/usr/bin/env python
+# -*- coding: utf-8 -*-
 
 # Adds yet unknown changedby fields when this column is added to an existing
 # database. If everything goes well, it needs to be run only once. Data is
@@ -6,6 +7,8 @@
 # merkel:/home/filippo/upload-history/*.db.
 
 # Copyright (C) 2008  Christoph Berg <myon@debian.org>
+# Copyright (C) 2008  Bernd Zeimetz <bzed@debian.org>
+
 
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -35,51 +38,104 @@ import daklib.database
 import daklib.queue
 import daklib.utils
 from pysqlite2 import dbapi2 as sqlite
+import pysqlite2.dbapi2
+import psycopg2
 
 projectB = None
-DBNAME = "uploads-ddc.db"
+projectBdb = None
+DBNAME = "uploads-queue.db"
 sqliteConn = None
+maintainer_id_cache={}
 
 ###############################################################################
 
+def get_or_set_maintainer_id (maintainer):
+    global maintainer_id_cache
+
+    if maintainer_id_cache.has_key(maintainer):
+        return maintainer_id_cache[maintainer]
+
+    if isinstance(maintainer, basestring):
+        if not isinstance(maintainer, unicode):
+            try:
+                maintainer = unicode(maintainer, 'utf-8')
+            except:
+                maintainer = unicode(maintainer, 'iso8859-15')
+    maintainer = maintainer.encode('utf-8')
+
+    print "%s" % maintainer
+    cursor = projectBdb.cursor()
+    cursor.execute("SELECT id FROM maintainer WHERE name=%s", (maintainer, ))
+    row = cursor.fetchone()
+    if not row:
+        cursor.execute("INSERT INTO maintainer (name) VALUES (%s)" , (maintainer, ))
+        cursor.execute("SELECT id FROM maintainer WHERE name=%s", (maintainer, ))
+        row = cursor.fetchone()
+    maintainer_id = row[0]
+    maintainer_id_cache[maintainer] = maintainer_id
+    cursor.close()
+
+    return maintainer_id
+
+
+def __get_changedby__(package, version):
+    cur = sqliteConn.cursor()
+    cur.execute("SELECT changedby FROM uploads WHERE package=? AND version=? LIMIT 1", (package, version))
+    res = cur.fetchone()
+    cur.close()
+    return res
+
 def insert ():
     print "Adding missing changedby fields."
 
-    projectB.query("BEGIN WORK")
-
-    q = projectB.query("SELECT id, source, version FROM source WHERE changedby IS NULL")
-
-    for i in q.getresult():
-        print i[1] + "/" + i[2] + ":",
-
-        cur = sqliteConn.cursor()
-        cur.execute("SELECT changedby FROM uploads WHERE package = '%s' AND version = '%s' LIMIT 1" % (i[1], i[2]))
-        res = cur.fetchall()
-        if len(res) != 1:
+    listcursor = projectBdb.cursor()
+    listcursor.execute("SELECT id, source, version FROM source WHERE changedby IS NULL")
+    row = listcursor.fetchone()
+
+    while row:
+        print repr(row)
+        try:
+            res = __get_changedby__(row[1], row[2])
+        except:
+            sqliteConn.text_factory = str
+            try:
+                res = __get_changedby__(row[1], row[2])
+            except:
+                print 'FAILED SQLITE'
+                res=None
+            sqliteConn.text_factory = unicode
+        if res:
+            changedby_id = get_or_set_maintainer_id(res[0])
+
+            cur = projectBdb.cursor()
+            cur.execute("UPDATE source SET changedby=%s WHERE id=%s" % (changedby_id, row[0]))
+            cur.close()
+            print changedby_id, "(%d)" % row[0]
+
+        else:
             print "nothing found"
-            continue
 
-        changedby = res[0][0].replace("'", "\\'")
-        changedby_id = daklib.database.get_or_set_maintainer_id(changedby)
-
-        projectB.query("UPDATE source SET changedby = %d WHERE id = %d" % (changedby_id, i[0]))
-        print changedby, "(%d)" % changedby_id
-
-    projectB.query("COMMIT WORK")
+        row = listcursor.fetchone()
+    listcursor.close()
 
 ###############################################################################
 
+
 def main():
-    global projectB, sqliteConn
+    global projectB, sqliteConn, projectBdb
 
     Cnf = daklib.utils.get_conf()
     Upload = daklib.queue.Upload(Cnf)
     projectB = Upload.projectB
+    projectBdb = psycopg2.connect("dbname=%s" % Cnf["DB::Name"])
 
     sqliteConn = sqlite.connect(DBNAME)
 
     insert()
 
+    projectBdb.commit()
+    projectBdb.close()
+
 ###############################################################################
 
 if __name__ == '__main__':
index 9925148cfcd9638d66e025a10d03178c72f20c87..7a6e2a490b82f46b809f556e6b08414be02b844b 100644 (file)
@@ -28,6 +28,12 @@ CREATE TABLE maintainer (
        name TEXT UNIQUE NOT NULL
 );
 
+CREATE TABLE src_uploaders (
+       id SERIAL PRIMARY KEY,
+       source INT4 NOT NULL REFERENCES source,
+       maintainer INT4 NOT NULL REFERENCES maintainer
+);
+
 CREATE TABLE uid (
        id SERIAL PRIMARY KEY,
        uid TEXT UNIQUE NOT NULL,