FileList "/srv/ftp.debian.org/database/dists/unstable_$(SECTION)_binary-$(ARCH).list";
SourceFileList "/srv/ftp.debian.org/database/dists/unstable_$(SECTION)_source.list";
Sections "main contrib non-free";
- Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+ Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 source";
BinOverride "override.sid.$(SECTION)";
ExtraOverride "override.sid.extra.$(SECTION)";
SrcOverride "override.sid.$(SECTION).src";
FileList "/srv/ftp.debian.org/database/dists/experimental_$(SECTION)_binary-$(ARCH).list";
SourceFileList "/srv/ftp.debian.org/database/dists/experimental_$(SECTION)_source.list";
Sections "main contrib non-free";
- Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+ Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 source";
BinOverride "override.sid.$(SECTION)";
SrcOverride "override.sid.$(SECTION).src";
};
log "$*"
fi
}
+
+# used by cron.dinstall *and* cron.unchecked.
+function make_buildd_dir () {
+ cd $configdir
+ apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd
+
+ cd ${incoming}
+ rm -f buildd/Release*
+ apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd incoming" -o APT::FTPArchive::Release::Architectures="${archs}" release buildd > Release
+ gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o Release.gpg Release
+ mv Release* buildd/.
+
+ cd ${incoming}
+ mkdir -p tree/${STAMP}
+ cp -al ${incoming}/buildd/. tree/${STAMP}/
+ ln -sfT tree/${STAMP} ${incoming}/builddweb
+ find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+}
# Create the postgres dump files
function pgdump_pre() {
log "Creating pre-daily-cron-job backup of projectb database..."
- pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+ pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
}
function pgdump_post() {
apt-ftparchive generate apt.conf.buildd
}
+function buildd_dir() {
+ # Rebuilt the buildd dir to avoid long times of 403
+ log "Regenerating the buildd incoming dir"
+ STAMP=$(date "+%Y%m%d%H%M")
+ make_buildd_dir
+}
+
function scripts() {
log "Running various scripts from $scriptsdir"
cd $scriptsdir
ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
}
+function merkel3() {
+ # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
+ log "Trigger merkels dd accessible parts sync"
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
+}
+
function runparts() {
log "Using run-parts to run scripts in $base/scripts/distmnt"
run-parts --report $base/scripts/distmnt
cd $configdir
$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
R --slave --vanilla < $base/misc/ftpstats.R
+ dak stats arch-space > $webdir/arch-space
+ dak stats pkg-nums > $webdir/pkg-nums
}
function aptftpcleanup() {
function compress() {
log "Compress old psql backups"
cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 |
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
+
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin 720 |
+ while read dumpname; do
+ echo "Compressing $dumpname"
+ bzip2 -9v "$dumpname"
+ done
+ find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin 720 |
while read dumpname; do
echo "Compressing $dumpname"
bzip2 -9v "$dumpname"
done
+ finddup -l -d $base/backup
}
function logstats() {
)
stage $GO
+GO=(
+ FUNC="buildd_dir"
+ TIME="buildd_dir"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="cruft"
TIME="cruft"
)
stage $GO
-GO=(
- FUNC="pgdakdev"
- TIME="dak-dev db"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
GO=(
FUNC="expire"
TIME="expire_dumps"
ARGS=""
ERR="false"
)
+stage $GO
rm -f ${LOCK_BRITNEY}
+GO=(
+ FUNC="pgdakdev"
+ TIME="dak-dev db"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="aptftpcleanup"
TIME="apt-ftparchive cleanup"
)
stage $GO
+GO=(
+ FUNC="merkel3"
+ TIME="merkel ddaccessible sync"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="compress"
TIME="compress"
export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
. $SCRIPTVARS
+# common functions are "outsourced"
+. "${configdir}/common"
+
LOCKDAILY=""
LOCKFILE="$lockdir/unchecked.lock"
NOTICE="$lockdir/daily.lock"
cat override.sid.$i.src >> override.sid.all3.src
fi
done
- cd $configdir
- apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd
-
- cd ${incoming}
- rm -f buildd/Release*
- apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd incoming" -o APT::FTPArchive::Release::Architectures="${archs}" release buildd > Release
- gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o Release.gpg Release
- mv Release* buildd/.
-
- cd ${incoming}
- mkdir -p tree/${STAMP}
- cp -al ${incoming}/buildd/. tree/${STAMP}/
- ln -sfT tree/${STAMP} ${incoming}/builddweb
- find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+ make_buildd_dir
. $configdir/cron.buildd
fi
s390 "IBM S/390";
sh "Hitatchi SuperH";
sparc "Sun SPARC/UltraSPARC";
+ kfreebsd-i386 "GNU/kFreeBSD i386";
+ kfreebsd-amd64 "GNU/kFreeBSD amd64";
};
Archive
ftpdir=$base/ftp
webdir=$base/web
indices=$ftpdir/indices
-archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc"
+archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 "
scriptdir=$base/scripts
masterdir=$base/dak/
f.close()
else:
- utils.warn("No wanna-build dump file for architecture %s", architecture)
+ utils.warn("No wanna-build dump file for architecture %s" % architecture)
return ret
################################################################################
for component in check_components:
architectures = filter(utils.real_arch, database.get_suite_architectures(suite))
for architecture in architectures:
+ if component == 'main/debian-installer' and re.match("kfreebsd", architecture):
+ continue
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suite, component, architecture)
# apt_pkg.ParseTagFile needs a real file handle
(fd, temp_filename) = utils.temp_filename()
#!/usr/bin/env python
-""" Script to automate some parts of checking NEW packages """
-# Copyright (C) 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
+"""
+Script to automate some parts of checking NEW packages
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
################################################################################
-import errno, os, pg, re, sys, md5
+import errno
+import os
+import pg
+import re
+import sys
+import md5
import apt_pkg, apt_inst
from daklib import database
from daklib import utils
database.init(Cnf, projectB)
printed_copyrights = {}
+package_relations = {} #: Store relations of packages for later output
# default is to not output html.
use_html = 0
comma_count += 1
return result
-def output_deb_info(suite, filename):
+def output_package_relations ():
+ """
+ Output the package relations, if there is more than one package checked in this run.
+ """
+
+ if len(package_relations) < 2:
+ # Only list something if we have more than one binary to compare
+ return
+
+ to_print = ""
+ for package in package_relations:
+ for relation in package_relations[package]:
+ to_print += "%-15s: (%s) %s\n" % (package, relation, package_relations[package][relation])
+
+ package_relations.clear()
+ foldable_output("Package relations", "relations", to_print)
+
+def output_deb_info(suite, filename, packagename):
(control, control_keys, section, depends, recommends, arch, maintainer) = read_control(filename)
if control == '':
return formatted_text("no control info")
to_print = ""
+ if not package_relations.has_key(packagename):
+ package_relations[packagename] = {}
for key in control_keys :
if key == 'Depends':
field_value = create_depends_string(suite, depends)
+ package_relations[packagename][key] = field_value
elif key == 'Recommends':
field_value = create_depends_string(suite, recommends)
+ package_relations[packagename][key] = field_value
elif key == 'Section':
field_value = section
elif key == 'Architecture':
foldable_output("control file for %s" % (filename), "binary-%s-control"%packagename,
- output_deb_info(suite, deb_filename), norow=True)
+ output_deb_info(suite, deb_filename, packagename), norow=True)
if is_a_udeb:
foldable_output("skipping lintian check for udeb", "binary-%s-lintian"%packagename,
else:
utils.fubar("Unrecognised file type: '%s'." % (f))
finally:
+ output_package_relations()
if not Options["Html-Output"]:
# Reset stdout here so future less invocations aren't FUBAR
less_fd.close()
def print_sha256_files (tree, files):
print_md5sha_files (tree, files, apt_pkg.sha256sum)
+def write_release_file (relpath, suite, component, origin, label, arch, version="", suite_suffix="", notautomatic=""):
+ try:
+ if os.access(relpath, os.F_OK):
+ if os.stat(relpath).st_nlink > 1:
+ os.unlink(relpath)
+ release = open(relpath, "w")
+ except IOError:
+ utils.fubar("Couldn't write to " + relpath)
+
+ release.write("Archive: %s\n" % (suite))
+ if version != "":
+ release.write("Version: %s\n" % (version))
+
+ if suite_suffix:
+ release.write("Component: %s/%s\n" % (suite_suffix,component))
+ else:
+ release.write("Component: %s\n" % (component))
+
+ release.write("Origin: %s\n" % (origin))
+ release.write("Label: %s\n" % (label))
+ if notautomatic != "":
+ release.write("NotAutomatic: %s\n" % (notautomatic))
+ release.write("Architecture: %s\n" % (arch))
+ release.close()
+
################################################################################
def main ():
else:
rel = "%s/binary-%s/Release" % (sec, arch)
relpath = Cnf["Dir::Root"]+tree+"/"+rel
-
- try:
- if os.access(relpath, os.F_OK):
- if os.stat(relpath).st_nlink > 1:
- os.unlink(relpath)
- release = open(relpath, "w")
- #release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
- except IOError:
- utils.fubar("Couldn't write to " + relpath)
-
- release.write("Archive: %s\n" % (suite))
- if version != "":
- release.write("Version: %s\n" % (version))
- if suite_suffix:
- release.write("Component: %s/%s\n" % (suite_suffix,sec))
- else:
- release.write("Component: %s\n" % (sec))
- release.write("Origin: %s\n" % (origin))
- release.write("Label: %s\n" % (label))
- if notautomatic != "":
- release.write("NotAutomatic: %s\n" % (notautomatic))
- release.write("Architecture: %s\n" % (arch))
- release.close()
+ write_release_file(relpath, suite, sec, origin, label, arch, version, suite_suffix, notautomatic)
files.append(rel)
if AptCnf.has_key("tree::%s/main" % (tree)):
for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
if arch != "source": # always true
+ rel = "%s/%s/binary-%s/Release" % (dis, sec, arch)
+ relpath = Cnf["Dir::Root"]+tree+"/"+rel
+ write_release_file(relpath, suite, dis, origin, label, arch, version, suite_suffix, notautomatic)
+ files.append(rel)
for cfile in compressnames("tree::%s/%s" % (tree,dis),
"Packages",
"%s/%s/binary-%s/Packages" % (dis, sec, arch)):
elif ftype == "dsc":
examine_package.check_dsc(changes['distribution'], f)
finally:
+ examine_package.output_package_relations()
sys.stdout = stdout_fd
except IOError, e:
if e.errno == errno.EPIPE:
return
Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
Upload.dump_vars(queue_dir)
- move_to_dir(queue_dir)
+ move_to_dir(queue_dir, perms=0664)
os.unlink(Upload.pkg.changes_file[:-8]+".dak")
def _accept():
file_handle = open_file(f)
except CantOpenError:
rejmsg.append("Could not open file %s for checksumming" % (f))
+ continue
files[f][hash_key(hashname)] = hashfunc(file_handle)
TODO
====
-* Implement autosigning, see ftpmaster_autosigning on ftp-master host in
- text/.
+Various
+-------
+* Implement autosigning, see ftpmaster_autosigning on ftp-master host in text/.
* Check TODO.old and move still-valid/useful entries over here.
+* need a testsuite _badly_
* database table "binaries" contains a column 'type TEXT NOT
- NULL'. This should be made a FK on override_type, as it only contains
- deb/udeb strings.
+ NULL'. This should be made a FK on override_type, as it only contains
+ deb/udeb strings.
-- sql query to do the db work for it:
- ALTER TABLE binaries ADD COLUMN new_type INT4 REFERENCES override_type(id);
- UPDATE BINARIES SET new_type = 7 WHERE type = 'deb';
- UPDATE BINARIES SET new_type = 8 WHERE type = 'udeb';
- ALTER TABLE binaries DROP COLUMN type;
- ALTER TABLE binaries RENAME COLUMN new_type TO type;
+ - sql query to do the db work for it:
+ ALTER TABLE binaries ADD COLUMN new_type INT4 REFERENCES override_type(id);
+ UPDATE BINARIES SET new_type = 7 WHERE type = 'deb';
+ UPDATE BINARIES SET new_type = 8 WHERE type = 'udeb';
+ ALTER TABLE binaries DROP COLUMN type;
+ ALTER TABLE binaries RENAME COLUMN new_type TO type;
-- needs updateX.py written and then the rest of the code changed to deal
- with it.
+ - needs updateX.py written and then the rest of the code changed to deal
+ with it.
* Checkout SQL Alchemy and probably use that for our database layer.
+* reject on > or < in a version constraint
+
+* use pythonX.Y-tarfile to check orig.tar.gz timestamps too.
+
+* the .dak stuff is fundamentally braindamaged for various reasons, it
+ should DIE. If we want to cache information - use a ("temporary")
+ database table and let p-a clean it up, e.g. like contents does.
+
+* security global mail overrides should special case buildd stuff so
+ that buildds get ACCEPTED mails. Or maybe send them at
+ new-security-install time. That way upload-security doesn't grow
+ boundlessly.
+
+* debianqueued sucks. Reimplement in a sane way.
+
+NEW processing
+--------------
+* 'dak process-new' allows you to edit the section and change the
+ component, but really shouldn't allow the component change.
+
+* 'dak process-new' doesn't do the right thing with -2 and -1 uploads,
+ as you can end up with the .orig.tar.gz not in the pool or belonging
+ to a wrong suite.
+
+* 'dak process-new' doesn't trap signals from 'dak examine-package' properly
+
+* 'dak queue-report' should footnote the actual notes, and also * the
+ versions with notes so we can see new versions since being noted...
+
+* <neuro> the orig was in NEW, the changes that caused it to be NEW
+ were pulled out in -2, and we end up with no orig in the archive :(
+
+
+
+Override handling
+-----------------
+* 'dak check-overrides' should remove the src-only override when a
+ binary+source override exists
+
+* override checks sucks; it needs to track changes made by the
+ maintainer and pass them onto ftpmaster instead of warning the maintainer.
+
+* Fix component handling in overrides
+
+
+Cruft
+-----
+* 'dak cruft-report' could do with overrides
+
+* cruft-report could spot "half-dropped" binaries. Like if a package
+ used to build A and B, but B is no longer built for half the
+ architectures.
+
+* cruft-report's NVIU check doesn't catch cases where source package
+ changed name, should check binaries too. [debian-devel@l.d.o,
+ 2004-02-03]
+
+* 'dak cruft-report' doesn't look at debian-installer but should.
+
Others
------
- o 'dak check-overrides' should remove the src-only override when a
- binary+source override exists
-
- o reject on > or < in a version constraint
-
o 'dak reject-proposed-updates' should only start an editor once to
capture a message; it will usually be the same message for all
files on the same command line.
-23:07 < aba> elmo: and, how about enhancing 'dak cruft-report' to spot half-dropped
- binaries on one arch (i.e. package used to build A and B, but B is
- no longer built on some archs)?
-
- o tabnanny the source
-
o drop map-unreleased
o check email only portions of addresses match too, iff the names
o 'dak ls' could do better sanity checking for -g/-G (e.g. not more
than one suite, etc.)
- o use python2.2-tarfile (once it's in stable?) to check orig.tar.gz
- timestamps too.
-
o need to decide on whether we're tying for most errors at once.. if
so (probably) then make sure code doesn't assume variables exist and
either way do something about checking error code of check_dsc and
later functions so we skip later checks if they're bailing.
- o the .dak stuff is fundamentally braindamaged, it's not versioned
- so there's no way to change the format, yay me. need to fix.
- probably by putting a version var as the first thing and checking
- that.. auto-upgrade at least from original format would be good.
- might also be a good idea to put everything in one big dict after
- that?
-
o [?, wishlist, distant future] RFC2047-ing should be extended to
all headers of mails sent out.
deal, upload can be retried once the source is in the archive, but
still.
- o security global mail overrides should special case buildd stuff so
- that buildds get ACCEPTED mails (or maybe 'dak security-install' (?)), that way
- upload-security doesn't grow boundlessly.
-
o 'dak security-install' should upload sourceful packages first,
otherwise with big packages (e.g. X) and esp. when source is !i386,
half the arches can be uploaded without source, get copied into
queue/unaccepted and promptly rejected.
- o 'dak cruft-report's NVIU check doesn't catch cases where source
- package changed name, should check binaries
- too. [debian-devel@l.d.o, 2004-02-03]
-
o cnf[Rm::logfile] is misnamed...
<aj> i'd be kinda inclined to go with insisting the .changes file take
BEGIN PGP SIG -- END PGP MESSAGE -- with no lines before or after,
and rejecting .changes that didn't match that
- o 'dak cruft-report' should check for source packages not building any binaries
-
o 'dak control-suite' should have a diff mode that accepts diff output!
o 'dak clean-proposed-updates' doesn't deal with 'dak rm'-d
o 'dak rm' should remove obsolete changes when removing from p-u, or
at least warn. or 'dak reject-proposed-updates' should handle it.
- o need a testsuite _badly_
-
o 'dak process-unchecked' crashes if run as a user in -n mode when
orig.tar.gz is in queue/new...
o check_dsc_against_db's "delete an entry from files while you're
not looking" habit is Evil and Bad.
- o 'dak process-new' allows you to edit the section and change the
- component, but really shouldn't.
-
o 'dak rm' needs to, when not sending bug close mails, promote Cc: to
To: and send the mail anyways.
o UrgencyLog stuff should minimize it's bombing out(?)
o Log stuff should open the log file
- o 'dak queue-report' should footnote the actual notes, and also *
- the versions with notes so we can see new versions since being
- noted...
-
- o 'dak queue-report' should have alternative sorting options, including reverse
- and without or without differentiaion.
-
o 'dak import-users-from-passwd' should sync debadmin and ftpmaster (?)
o <drow> Can't read file.:
You assume that the filenames are relative to accepted/, might want
to doc or fix that.
- o <neuro> the orig was in NEW, the changes that caused it to be NEW
- were pulled out in -2, and we end up with no orig in the archive
- :(
-
o SecurityQueueBuild doesn't handle the case of foo_3.3woody1 with a
new .orig.tar.gz followed by a foo_3.3potato1 with the same
.orig.tar.gz; 'dak process-unchecked' sees it and copes, but the AA
o permissions (paranoia, group write, etc.) configurability and overhaul
- o remember duplicate copyrights in 'dak process-new' and skip them, per package
-
- o <M>ove option for 'dak process-new' byhand proecessing
-
- o 'dak cruft-report' could do with overrides
-
o database.get_location_id should handle the lack of archive_id properly
o the whole versioncmp thing should be documented
- o 'dak process-new' doesn't do the right thing with -2 and -1 uploads, as you can
- end up with the .orig.tar.gz not in the pool
-
- o 'dak process-new' exits if you check twice (aj)
-
- o 'dak process-new' doesn't trap signals from 'dak examine-package' properly
-
- o queued and/or perl on sparc stable sucks - reimplement it.
-
o aj's bin nmu changes
o 'dak process-new':
who their source is; source-must-exist does, but the info is not
propogated down.
- o Fix BTS vs. dak sync issues by queueing(via BSMTP) BTS mail so
- that it can be released on deman (e.g. ETRN to exim).
-
o maintainers file needs overrides
[ change override.maintainer to override.maintainer-from +
o [Hard] Need to merge non-non-US and non-US DBs.
- o experimental needs to auto clean (relative to unstable) [partial:
- 'dak cruft-report' warns about this]
-
o Do a checkpc(1)-a-like which sanitizes a config files.
o fix parse_changes()/build_file_list() to sanity check filenames
o saftey check and/or rename debs so they match what they should be
o s/distribution/suite/g
- o cron.weekly:
- @ weekly postins to d-c (?)
- @ backup of report (?)
- @ backup of changes.tgz (?)
-
o --help doesn't work without /etc/dak/dak.conf (or similar) at
least existing.
o interrupting of stracing 'dak process-unchecked' causes exceptions errors from apt_inst calls
o dependency checking (esp. stable) (partially done)
- o override checks sucks; it needs to track changes made by the
- maintainer and pass them onto ftpmaster instead of warning the
- maintainer.
o need to do proper rfc822 escaping of from lines (as opposed to s/\.//g)
o Revisit linking of binary->source in install() in dak.
- o Fix component handling in overrides (aj)
- o Fix lack of entires in source overrides (aj)
- o direport misreports things as section 'devel' (? we don't use direport)
- o vrfy check of every Maintainer+Changed-By address; valid for 3 months.
o binary-all should be done on a per-source, per-architecture package
basis to avoid, e.g. the perl-modules problem.
o a source-missing-diff check: if the version has a - in it, and it
o dak should validate multi-suite uploads; only possible valid one
is "stable unstable"
o cron.daily* should change umask (aj sucks)
- o 'dak cruft-report' doesn't look at debian-installer but should.
- o 'dak cruft-report' needs to check for binary-less source packages.
- o 'dak cruft-report' could accept a suite argument (?)
o byhand stuff should send notification
- o 'dak poolize' should udpate db; move files, not the other way around [neuro]
o 'dak rm' should update the stable changelog [joey]
o update tagdb.dia
o drop rather dubious currval stuff (?)
o rationalize os.path.join() usage
- o 'dak cruft-report' also doesn't seem to warn about missing binary packages (??)
o logging: hostname + pid ?
o ANAIS should be done in dak (?)
o Add an 'add' ability to 'dak rm' (? separate prog maybe)
- o Replicate old dinstall report stuff (? needed ?)
o Handle the case of 1:1.1 which would overwrite 1.1 (?)
o maybe drop -r/--regex in 'dak ls', make it the default and
implement -e/--exact (a la joey's "elmo")
#!/bin/bash
#
-# $Id: ddtp_i18n_check.sh 1186 2008-08-12 18:31:25Z faw $
+# $Id: ddtp_i18n_check.sh 1670 2009-03-31 20:57:49Z nekral-guest $
#
# Copyright (C) 2008, Felipe Augusto van de Wiel <faw@funlabs.org>
-# Copyright (C) 2008, Nicolas François <nicolas.francois@centraliens.net>
+# Copyright (C) 2008, 2009 Nicolas François <nicolas.francois@centraliens.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# Otherwise, list all the errors.
DEBUG=0
+# When DRY_RUN=0, generate the compressed version of the Translation-*
+# files.
+DRY_RUN=0
+
+dists_parent_dir=""
+# If no argument indicates the PACKAGES_LISTS_DIR then use '.'
+PACKAGES_LISTS_DIR=""
+
+usage () {
+ echo "Usage: $0 [options] <dists_parent_dir> [<packages_lists_directory>]" >&2
+ echo "" >&2
+ echo " --debug Debug mode: do not stop after the first error" >&2
+ echo " --dry-run Do not generate the compressed version of the " >&2
+ echo " Translation files">&2
+ exit 1
+}
+
+# Parse options
+for opt; do
+ case "$opt" in
+ "--debug")
+ DEBUG=1
+ ;;
+ "--dry-run")
+ DRY_RUN=1
+ ;;
+ "-*")
+ usage
+ ;;
+ "")
+ echo "Empty parameter" >&2
+ echo "" >&2
+ usage
+ ;;
+ *)
+ if [ -z "$dists_parent_dir" ]; then
+ # Removing trailing /
+ dists_parent_dir=${opt%/}
+ elif [ -z "$PACKAGES_LISTS_DIR" ]; then
+ PACKAGES_LISTS_DIR=$opt
+ else
+ echo "$0: Invalid option: $opt" >&2
+ usage
+ fi
+ ;;
+ esac
+done
+PACKAGES_LISTS_DIR=${opt:-.}
+
+if [ ! -d "$dists_parent_dir" ]; then
+ echo "missing dists_parent_dir, or not a directory" >&2
+ echo "" >&2
+ usage
+elif [ ! -d "$PACKAGES_LISTS_DIR" ]; then
+ echo "missing packages_lists_directory, or not a directory" >&2
+ echo "" >&2
+ usage
+fi
+
#STABLE="lenny"
TESTING="squeeze"
UNSTABLE="sid"
# These special files must exist on the top of dists_parent_dir
SPECIAL_FILES="$SHA256SUMS $TIMESTAMP $TIMESTAMP.gpg"
-usage () {
- echo "Usage: $0 <dists_parent_dir> [<packages_lists_directory>]" >&2
- exit 1
-}
-
-if [ "$#" -lt 1 ] || [ "$#" -gt 2 ] || [ ! -d $1 ]
-then
- usage
-fi
-
# Temporary working directory. We need a full path to reduce the
# complexity of checking SHA256SUMS and cleaning/removing TMPDIR
TEMP_WORK_DIR=$(mktemp -d -t ddtp_dinstall_tmpdir.XXXXXX)
cd "$OLDPWD"
unset TEMP_WORK_DIR
-# If it's traped, something bad happened.
+# If it's trapped, something bad happened.
trap_exit () {
rm -rf "$TMP_WORK_DIR"
- rm -f "$dists_parent_dir"/dists/*/main/i18n/Translation-*.{bz2,gz}
+ rm -f "$dists_parent_dir"/dists/*/main/i18n/Translation-*.bz2
exit 1
}
trap trap_exit EXIT HUP INT QUIT TERM
-# If no argument indicates the PACKAGES_LISTS_DIR then use '.'
-PACKAGES_LISTS_DIR=${2:-.}
-
-if [ ! -d "$PACKAGES_LISTS_DIR" ]
-then
- usage
-fi
-
-# Removing trailing /
-dists_parent_dir=${1%/}
-
is_filename_okay () {
ifo_file="$1"
# We do not check if the md5 in Translation-$lang are
# correct.
- # Now generate files
- # Compress the file
- bzip2 -c "$f" > "$f.bz2"
- gzip -c "$f" > "$f.gz"
+ if [ "$DRY_RUN" = "0" ]; then
+ # Now generate the compressed files
+ bzip2 "$f"
+ fi
else
echo "Neither a file or directory: $f" >&2
exit 1
# keep 14 days, all each day
# keep 31 days, 1 each 7th day
# keep 365 days, 1 each 31th day
+ # keep 3650 days, 1 each 365th day
]
TODAY = datetime.today()
<ul>
<li>A newly included architecture has to be completely built using
packages available in plain Debian sources. External patches cannot
- be used.<li>
+ be used.</li>
<li>At the time of inclusion a minimal set of binary packages will be
imported into the archive, just enough to get build-essential ready to
<li>There must be at least two machines ready to be maintained
by the Debian System Administrators, so at the start of its
- lifetime there will be at least one buildd and one porter machine.</br />
+ lifetime there will be at least one buildd and one porter machine.<br />
The inclusion into the archive will almost certainly happen before
the machines are handed over to DSA, but this should happen as
(Note that this is the minimum to get into the archive. The release team
may have additional requirements to allow the architecture to release, so
- there would normally need to be more machines, especially more buildds.)
+ there would normally need to be more machines, especially more
+ buildds.)<br />
+
+ <b>Note:</b> The machines, their setup and hosting etc should be
+ coordinated with DSA and needs to be acceptable to DSA. Please
+ <a href="mailto:debian-admin@lists.debian.org">coordinate with
+ them</a>, they might be able to help you in more ways
+ you can imagine, but at least they can help to avoid useless work
+ if a hosting wouldnt be acceptable. :)
</li>
</ul>