+++ /dev/null
-<VirtualHost *:80>
- ServerName incoming.debian.org
- DocumentRoot /srv/incoming.debian.org/public
-
- # deny by default
- <Directory />
- Order allow,deny
- </Directory>
-
- # incoming dir, no subdir. allow everyone except abusers.
- <Directory /srv/incoming.debian.org/public>
- IndexIgnore robots.txt
- Order deny,allow
- </Directory>
-
- <Directory /srv/incoming.debian.org>
- <Files robots.txt>
- Order deny,allow
- </Files>
- </Directory>
- Alias /robots.txt /srv/incoming.debian.org/robots.txt
-
- # buildd direct access -- buildds only
- Alias /debian /srv/ftp.debian.org/mirror
- Alias /debian-backports /srv/backports-master.debian.org/mirror
- Alias /debian-buildd /srv/incoming.debian.org/debian-buildd
-
- <DirectoryMatch ~ "^/srv/(incoming\.debian\.org/(builddweb|debian-buildd/)|ftp\.debian\.org/mirror|backports-master\.debian\.org/mirror)">
- Order allow,deny
-
- Use DebianBuilddHostList
-
- # buildd.d.o, cimarosa
- allow from 206.12.19.8
-
- # franck.d.o
- allow from 138.16.160.12
-
- # test access to check functionality, ganneff
- allow from 213.146.108.162
-
- # for testing sbuild-db; rleigh@codelibre.net
- # nagini.codelibre.net
- allow from 80.68.93.164
- allow from 2001:41c8:1:5750::2
-
- # Should be in DSA list
- # amd64
- # vitry (archive rebuild)
- allow from 194.177.211.206
- allow from 2001:648:2ffc:deb:214:22ff:feb2:122c
- # krenek (archive rebuild)
- allow from 194.177.211.207
- allow from 2001:648:2ffc:deb:214:22ff:feb1:ff56
-
- # Known Extras
-
- # No idea about
- # arm
- ## netwinder
- allow from 192.133.104.24
- ##
- allow from 217.147.81.26
- ## toffee
- allow from 78.32.9.218
- ##
- allow from 86.3.74.169
- ## nw1.xandros
- allow from 67.210.160.89
- ## nw2.xandros
- allow from 67.210.160.90
- ## hdges.billgatliff
- allow from 209.251.101.204
-
- # armel
- ## allegri
- allow from 157.193.39.233
-
- # hppa
- ## bld3.mmjgroup
- allow from 192.25.206.243
- ## paer
- allow from 192.25.206.11
-
- # hurd-i386
- ## rossini (NOT .debian.org)
- allow from 192.33.98.55
- ## back / mozart (xen domains; NOT .debian.org)
- allow from 80.87.129.151
- # ironforge.sceen.net.
- allow from 46.105.42.111
-
- # ia64
- ## mundi
- allow from 192.25.206.62
-
- # mips
- ##
- allow from 217.147.81.21
- ## sigrun, aba
- allow from 82.195.75.68
- allow from 2001:41b8:202:deb:a00:69ff:fe08:30c6
-
- # mipsel
- ## monteverdi
- allow from 78.47.2.111
- ## kritias, aba
- allow from 78.46.213.163
-
- # s390
- ## debian01.zseries
- allow from 195.243.109.161
- ## l003092.zseriespenguins.ihost.com
- allow from 32.97.40.46
- ##
- allow from 148.100.96.45
- ##
- allow from 148.100.96.52
- ## lxdebian.bfinv
- allow from 80.245.147.60
-
- # sparc
- ## titan.ayous.org ('non-standard' buildd; contact HE)
- allow from 82.195.75.33
-
- # kfreebsd
- ## i386
- # himalai1, ganymede1
- allow from 129.175.22.65
- ## luchesi
- ## Password based due to being KVM instance
- ## allow from 137.82.84.78
-
- # Dynamics use password auth
-
- AuthType Basic
- AuthName "incoming.debian.org"
- AuthUserFile /srv/incoming.debian.org/htpasswd
- require valid-user
- satisfy any
- </DirectoryMatch>
-
- CustomLog /var/log/apache2/incoming-access.log privacy
- ErrorLog /var/log/apache2/incoming-error.log
-
-</VirtualHost>
# used by cron.dinstall *and* cron.unchecked.
function make_buildd_dir () {
- local overridesuite
- local suite
-
+ # We generate straight into the static mirror location for incoming
dak manage-build-queues -a
dak generate-packages-sources2 -a build-queues
dak generate-releases -a build-queues >/dev/null
- ${scriptsdir}/update-buildd-archive ${base}/build-queues ${incoming}/debian-buildd
- rm -f ${incoming}/public/*
- dak export-suite -s "accepted" -d "$incoming/public"
+ # Stick a last modified date in the page footer
+ echo "<p>Last updated: `date -u`</p>" > ${incoming}/web/README.html
+
+ # Tell the mirrors that we've updated
+ chronic /usr/local/bin/static-update-component incoming.debian.org
}
# Process (oldstable)-proposed-updates "NEW" queue
# Push files over to security
# The key over there should have the following set for the ssh key:
-# command="/usr/bin/xzcat | /usr/bin/psql -f - -1 obscurity"
-pg_dump -a -F p -t files | \
- sed -e "s,^COPY files (,DELETE FROM external_files; COPY external_files (," | \
+# command="/usr/bin/xzcat | /usr/bin/psql -1 -c 'DELETE FROM external_files; COPY external_files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) FROM STDIN' obscurity"
+psql -c 'COPY files (id, filename, size, md5sum, last_used, sha1sum, sha256sum, created, modified) TO STDOUT' projectb | \
xz -3 | \
ssh -o BatchMode=yes -o ConnectTimeout=30 -o SetupTimeout=30 -2 \
-i ${base}/s3kr1t/push_external_files dak@security-master.debian.org sync
function do_buildd () {
if lockfile -r3 $NOTICE; then
LOCKDAILY="YES"
- cd $overridedir
- dak make-overrides &>/dev/null
- rm -f override.sid.all3 override.sid.all3.src override.squeeze-backports.all3 override.squeeze-backports.all3.src
- for i in main contrib non-free main.debian-installer; do
- cat override.sid.$i >> override.sid.all3
- cat override.squeeze-backports.$i >> override.squeeze-backports.all3
- if [ "$i" != "main.debian-installer" ]; then
- cat override.sid.$i.src >> override.sid.all3.src
- cat override.squeeze-backports.$i.src >> override.squeeze-backports.all3.src
- fi
- done
make_buildd_dir
wbtrigger
fi
}
-function do_dists () {
- #cd $configdir
- #dak generate-filelist -s unstable,experimental -i
- dak generate-packages-sources2 -s unstable experimental >/dev/null
-}
-
########################################################################
# the actual unchecked functions follow #
########################################################################
if [ ! -z "$changes" ]; then
sync_debbugs
do_buildd
- #do_dists
fi
dak contents -l 10000 scan-binary
DefaultSuite "unstable";
LintianTags "/srv/ftp-master.debian.org/dak/config/debian/lintian.tags";
ReleaseTransitions "/srv/ftp.debian.org/web/transitions.yaml";
+ AllowSourceOnlyUploads true;
// if you setup an own dak repository and want to upload Debian packages you most possibly want
// to set the following option to a real path/filename and then enter those mail addresses that
// you want to be able to receive mails generated by your dak installation. This avoids spamming
unchecked=$queuedir/unchecked/
accepted=$queuedir/accepted/
mirrordir=$base/mirror/
-incoming=$base/incoming
+incoming=$base/public/incoming.debian.org/
newstage=$queuedir/newstage/
exportdir=$base/export/
exportpublic=$public/rsync/export/
import apt_pkg
import daklib.archive
+import daklib.gpg
from daklib import utils
from daklib.dbconn import *
change-component SUITE COMPONENT binary BINARY...
Move source or binary packages to a different component by copying
associated files and changing the overrides.
+
+ forget-signature FILE: forget that we saw FILE
"""
sys.exit(exit_code)
################################################################################
+def forget_signature(args):
+ filename = args[1]
+ with open(filename, 'r') as fh:
+ data = fh.read()
+
+ session = DBConn().session()
+ keyrings = [ k.keyring_name for k in session.query(Keyring).filter_by(active=True).order_by(Keyring.priority) ]
+ signed_file = daklib.gpg.SignedFile(data, keyrings)
+ history = SignatureHistory.from_signed_file(signed_file).query(session)
+ if history is not None:
+ session.delete(history)
+ session.commit()
+ else:
+ print "Signature was not known to dak."
+ session.rollback()
+
+dispatch['forget-signature'] = forget_signature
+
+################################################################################
+
def main():
"""Perform administrative work on the dak database"""
global dryrun
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Drop unique .changes name requirement and allow ftpteam to forget seen files
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2014, Ansgar Burchardt <ansgar@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from daklib.config import Config
+
+statements = [
+"ALTER TABLE changes DROP CONSTRAINT IF EXISTS known_changes_changesname_key",
+"DROP INDEX IF EXISTS changesurgency_ind",
+"GRANT DELETE ON signature_history TO ftpteam",
+]
+
+################################################################################
+def do_update(self):
+ print __doc__
+ try:
+ cnf = Config()
+
+ c = self.db.cursor()
+
+ for stmt in statements:
+ c.execute(stmt)
+
+ c.execute("UPDATE config SET value = '103' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError as msg:
+ self.db.rollback()
+ raise DBUpdateError('Unable to apply sick update 103, rollback issued. Error message: {0}'.format(msg))
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Drop created and modified columns from bin_contents and src_contents
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2014, Ansgar Burchardt <ansgar@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import psycopg2
+from daklib.dak_exceptions import DBUpdateError
+from daklib.config import Config
+
+statements = [
+"DROP TRIGGER IF EXISTS modified_bin_contents ON bin_contents",
+"ALTER TABLE bin_contents DROP COLUMN created, DROP COLUMN modified",
+"DROP TRIGGER IF EXISTS modified_src_contents ON src_contents",
+"ALTER TABLE src_contents DROP COLUMN created, DROP COLUMN modified",
+]
+
+################################################################################
+def do_update(self):
+ print __doc__
+ try:
+ cnf = Config()
+
+ c = self.db.cursor()
+
+ for stmt in statements:
+ c.execute(stmt)
+
+ c.execute("UPDATE config SET value = '104' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError as msg:
+ self.db.rollback()
+ raise DBUpdateError('Unable to apply sick update 104, rollback issued. Error message: {0}'.format(msg))
-c --copy copy files instead of symlinking them
-d <directory> target directory to export packages to
default: current directory
+ -r --relative use symlinks relative to target directory
-s <suite> suite to grab uploads from
"""
arguments = [('h', 'help', 'Export::Options::Help'),
('c', 'copy', 'Export::Options::Copy'),
('d', 'directory', 'Export::Options::Directory', 'HasArg'),
+ ('r', 'relative', 'Export::Options::Relative'),
('s', 'suite', 'Export::Options::Suite', 'HasArg')]
cnf = Config()
sys.exit(1)
symlink = 'Copy' not in options
+ relative = 'Relative' in options
+
+ if relative and not symlink:
+ print "E: --relative and --copy cannot be used together."
+ sys.exit(1)
binaries = suite.binaries
sources = suite.sources
.join(ArchiveFile.component).join(ArchiveFile.file) \
.filter(ArchiveFile.archive == suite.archive) \
.filter(ArchiveFile.file == f).first()
- # XXX: Remove later. There was a bug that caused only the *.dsc to
- # be installed in build queues and we do not want to break them.
- # The bug was fixed in 55d2c7e6e2418518704623246021021e05b90e58
- # on 2012-11-04
- if af is None:
- af = session.query(ArchiveFile) \
- .join(ArchiveFile.component).join(ArchiveFile.file) \
- .filter(ArchiveFile.file == f).first()
+ src = af.path
+ if relative:
+ src = os.path.relpath(src, directory)
dst = os.path.join(directory, f.basename)
if not os.path.exists(dst):
- fs.copy(af.path, dst, symlink=symlink)
+ fs.copy(src, dst, symlink=symlink)
fs.commit()
if __name__ == '__main__':
else:
logger.log(['E: ', msg])
+ # Lock tables so that nobody can change things underneath us
+ session.execute("LOCK TABLE src_associations IN SHARE MODE")
+ session.execute("LOCK TABLE bin_associations IN SHARE MODE")
+
for s in suites:
component_ids = [ c.component_id for c in s.components ]
if s.untouchable and not force:
# Process directories from dak.conf
process_tree(Cnf, "Dir")
+ # Hardcode creation of the unchecked directory
+ if Cnf.has_key("Dir::Base"):
+ do_dir(os.path.join(Cnf["Dir::Base"], "queue", "unchecked"), 'unchecked directory')
+
# Process queue directories
for queue in session.query(PolicyQueue):
do_dir(queue.path, '%s queue' % queue.queue_name)
+ # If we're doing the NEW queue, make sure it has a COMMENTS directory
+ if queue.queue_name == 'new':
+ do_dir(os.path.join(queue.path, "COMMENTS"), '%s queue comments' % queue.queue_name)
for config_name in [ "Rm::LogFile",
"Import-Archive::ExportDir" ]:
JOIN files ON files.id=dsc_files.file
JOIN files_archive_map ON files.id = files_archive_map.file_id
JOIN component ON files_archive_map.component_id = component.id
+ JOIN files_archive_map fam_dsc ON fam_dsc.file_id=source.file AND fam_dsc.component_id=component.id AND fam_dsc.archive_id=files_archive_map.archive_id
WHERE files_archive_map.archive_id = :archive_id
- ORDER BY source, version
+ ORDER BY source, version, component.id, files.filename
"""
query_binaries = """
binaries = set([x.package for x in upload.binaries])
for m in missing:
if m['type'] != 'dsc':
- binaries.remove(m['package'])
+ binaries.discard(m['package'])
if binaries:
source = upload.binaries[0].source.source
suite = upload.target_suite.overridesuite or \
package = m['package']
section = m['section']
priority = m['priority']
+ included = "" if m['included'] else "NOT UPLOADED"
if indexed:
- line = "(%s): %-20s %-20s %-20s" % (index, package, priority, section)
+ line = "(%s): %-20s %-20s %-20s %s" % (index, package, priority, section, included)
else:
- line = "%-20s %-20s %-20s" % (package, priority, section)
+ line = "%-20s %-20s %-20s %s" % (package, priority, section, included)
line = line.strip()
if not m['valid']:
line = line + ' [!]'
type, pkg = pkg.split(':', 1)
else:
type = 'deb'
- if (type, pkg) not in overrides_map:
+ o = overrides_map.get((type, pkg), None)
+ if o is None:
utils.warn("Ignoring unknown package '%s'" % (pkg))
else:
if section.find('/') != -1:
section=section,
component=component,
priority=priority,
+ included=o['included'],
))
return new_overrides
from daklib import utils
from daklib.dak_exceptions import CantOpenError, AlreadyLockedError, CantGetLockError
from daklib.config import Config
-from daklib.archive import ArchiveTransaction
+from daklib.archive import ArchiveTransaction, source_component_from_package_list
from daklib.urgencylog import UrgencyLog
+from daklib.packagelist import PackageList
import daklib.announce
overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
def binary_component_func(db_binary):
- override = session.query(Override).filter_by(suite=overridesuite, package=db_binary.package) \
- .join(OverrideType).filter(OverrideType.overridetype == db_binary.binarytype) \
- .join(Component).one()
- return override.component
+ section = db_binary.proxy['Section']
+ component_name = 'main'
+ if section.find('/') != -1:
+ component_name = section.split('/', 1)[0]
+ return session.query(Component).filter_by(component_name=component_name).one()
def source_component_func(db_source):
- override = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
+ package_list = PackageList(db_source.proxy)
+ component = source_component_from_package_list(package_list, upload.target_suite)
+ if component is not None:
+ return component
+
+ # Fallback for packages without Package-List field
+ query = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
.join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
- .join(Component).one()
- return override.component
+ .join(Component)
+ return query.one().component
all_target_suites = [upload.target_suite]
all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues])
with daklib.archive.ArchiveUpload(directory, changes, keyrings) as upload:
processed = action(directory, upload)
if processed and not Options['No-Action']:
+ session = DBConn().session()
+ history = SignatureHistory.from_signed_file(upload.changes)
+ if history.query(session) is None:
+ session.add(history)
+ session.commit()
+ session.close()
+
unlink_if_exists(os.path.join(directory, changes.filename))
for fn in changes.files:
unlink_if_exists(os.path.join(directory, fn))
uploader = achanges.get('changed-by')
uploader = re.sub(r'^\s*(\S.*)\s+<.*>',r'\1',uploader)
with utils.open_file(changesfn) as f:
- fingerprint = SignedFile(f.read(), keyrings=get_active_keyring_paths()).fingerprint
+ fingerprint = SignedFile(f.read(), keyrings=get_active_keyring_paths(), require_signature=False).fingerprint
if Cnf.has_key("Show-Deferred::LinkPath"):
isnew = 0
suites = get_suites_source_in(achanges['source'])
else:
return 'any', 'any', 'any'
+ if len(parts) == 2 and parts[0] == 'linux':
+ arch = parts[1]
+
triplet = _triplettable()[1].get(arch, None)
if triplet is None:
return None
triplet_arch = Debian_arch_to_Debian_triplet(arch)
triplet_wildcard = Debian_arch_to_Debian_triplet(wildcard)
- if len(triplet_arch) != 3:
+ if triplet_arch is None or len(triplet_arch) != 3:
raise InvalidArchitecture('{0} is not a valid architecture name'.format(arch))
- if len(triplet_wildcard) != 3:
+ if triplet_wildcard is None or len(triplet_wildcard) != 3:
raise InvalidArchitecture('{0} is not a valid architecture name or wildcard'.format(wildcard))
for i in range(0,3):
import os
import shutil
from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.orm import object_session
import sqlalchemy.exc
import tempfile
import traceback
self.rollback()
return None
+def source_component_from_package_list(package_list, suite):
+ """Get component for a source package
+
+ This function will look at the Package-List field to determine the
+ component the source package belongs to. This is the first component
+ the source package provides binaries for (first with respect to the
+ ordering of components).
+
+ It the source package has no Package-List field, None is returned.
+
+ @type package_list: L{daklib.packagelist.PackageList}
+ @param package_list: package list of the source to get the override for
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to consider for binaries produced
+
+ @rtype: L{daklib.dbconn.Component} or C{None}
+ @return: component for the given source or C{None}
+ """
+ if package_list.fallback:
+ return None
+ session = object_session(suite)
+ packages = package_list.packages_for_suite(suite)
+ components = set(p.component for p in packages)
+ query = session.query(Component).order_by(Component.ordering) \
+ .filter(Component.component_name.in_(components))
+ return query.first()
+
class ArchiveUpload(object):
"""handle an upload
suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
return suites
+ def _check_new_binary_overrides(self, suite):
+ new = False
+
+ binaries = self.changes.binaries
+ source = self.changes.source
+ if source is not None and not source.package_list.fallback:
+ packages = source.package_list.packages_for_suite(suite)
+ binaries = [ entry for entry in packages ]
+
+ for b in binaries:
+ override = self._binary_override(suite, b)
+ if override is None:
+ self.warnings.append('binary:{0} is NEW.'.format(b.name))
+ new = True
+
+ return new
+
def _check_new(self, suite):
"""Check if upload is NEW
new = False
# Check for missing overrides
- for b in self.changes.binaries:
- override = self._binary_override(suite, b)
- if override is None:
- self.warnings.append('binary:{0} is NEW.'.format(b.control['Package']))
- new = True
-
+ if self._check_new_binary_overrides(suite):
+ new = True
if self.changes.source is not None:
override = self._source_override(suite, self.changes.source)
if override is None:
@type suite: L{daklib.dbconn.Suite}
@param suite: suite to get override for
- @type binary: L{daklib.upload.Binary}
+ @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
@param binary: binary to get override for
@rtype: L{daklib.dbconn.Override} or C{None}
if mapped_component is None:
return None
- query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
+ query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
.join(Component).filter(Component.component_name == mapped_component.component_name) \
.join(OverrideType).filter(OverrideType.overridetype == binary.type)
if suite.overridesuite is not None:
suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
- # XXX: component for source?
query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
.join(OverrideType).filter(OverrideType.overridetype == 'dsc')
+ component = source_component_from_package_list(source.package_list, suite)
+ if component is not None:
+ query = query.filter(Override.component == component)
+
try:
return query.one()
except NoResultFound:
# Validate signatures and hashes before we do any real work:
for chk in (
checks.SignatureAndHashesCheck,
+ checks.SignatureTimestampCheck,
checks.ChangesCheck,
checks.ExternalHashesCheck,
checks.SourceCheck,
from daklib.textutils import fix_maintainer, ParseMaintError
import daklib.lintian as lintian
import daklib.utils as utils
-from daklib.upload import InvalidHashException
+import daklib.upload
import apt_inst
import apt_pkg
from apt_pkg import version_compare
+import datetime
import errno
import os
import subprocess
return False
class SignatureAndHashesCheck(Check):
+ def check_replay(self, upload):
+ # Use private session as we want to remember having seen the .changes
+ # in all cases.
+ session = upload.session
+ history = SignatureHistory.from_signed_file(upload.changes)
+ r = history.query(session)
+ if r is not None:
+ raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
+ return True
+
"""Check signature of changes and dsc file (if included in upload)
Make sure the signature is valid and done by a known user.
changes = upload.changes
if not changes.valid_signature:
raise Reject("Signature for .changes not valid.")
+ self.check_replay(upload)
self._check_hashes(upload, changes.filename, changes.files.itervalues())
source = None
try:
for f in files:
f.check(upload.directory)
- except IOError as e:
- if e.errno == errno.ENOENT:
- raise Reject('{0} refers to non-existing file: {1}\n'
- 'Perhaps you need to include it in your upload?'
- .format(filename, os.path.basename(e.filename)))
- raise
- except InvalidHashException as e:
+ except daklib.upload.FileDoesNotExist as e:
+ raise Reject('{0}: {1}\n'
+ 'Perhaps you need to include the file in your upload?'
+ .format(filename, unicode(e)))
+ except daklib.upload.UploadException as e:
raise Reject('{0}: {1}'.format(filename, unicode(e)))
+class SignatureTimestampCheck(Check):
+ """Check timestamp of .changes signature"""
+ def check(self, upload):
+ changes = upload.changes
+
+ now = datetime.datetime.utcnow()
+ timestamp = changes.signature_timestamp
+ age = now - timestamp
+
+ age_max = datetime.timedelta(days=365)
+ age_min = datetime.timedelta(days=-7)
+
+ if age > age_max:
+ raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
+ if age < age_min:
+ raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
+
+ return True
+
class ChangesCheck(Check):
"""Check changes file for syntax errors."""
def check(self, upload):
fn = binary.hashed_file.filename
control = binary.control
- for field in ('Package', 'Architecture', 'Version', 'Description'):
+ for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
if field not in control:
raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
except:
raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
+ # "Multi-Arch: no" breaks wanna-build, #768353
+ multi_arch = control.get("Multi-Arch")
+ if multi_arch == 'no':
+ raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
+
class BinaryTimestampCheck(Check):
"""check timestamps of files in binary packages
return None
class NoSourceOnlyCheck(Check):
+ def is_source_only_upload(self, upload):
+ changes = upload.changes
+ if changes.source is not None and len(changes.binaries) == 0:
+ return True
+ return False
+
"""Check for source-only upload
Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
set. Otherwise they are rejected.
+
+ Source-only uploads are only accepted for source packages having a
+ Package-List field that also lists architectures per package. This
+ check can be disabled via
+ Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
+
+ Source-only uploads to NEW are only allowed if
+ Dinstall::AllowSourceOnlyNew is set.
+
+ Uploads not including architecture-independent packages are only
+ allowed if Dinstall::AllowNoArchIndepUploads is set.
+
"""
def check(self, upload):
- if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
+ if not self.is_source_only_upload(upload):
return True
+
+ allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
+ allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
+ allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
+ allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
changes = upload.changes
- if changes.source is not None and len(changes.binaries) == 0:
+
+ if not allow_source_only_uploads:
raise Reject('Source-only uploads are not allowed.')
+ if not allow_source_only_uploads_without_package_list \
+ and changes.source.package_list.fallback:
+ raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
+ if not allow_source_only_new and upload.new:
+ raise Reject('Source-only uploads to NEW are not allowed.')
+
+ if not allow_no_arch_indep_uploads \
+ and 'all' not in changes.architectures \
+ and 'experimental' not in changes.distributions \
+ and changes.source.package_list.has_arch_indep_packages():
+ raise Reject('Uploads not including architecture-independent packages are not allowed.')
+
return True
class LintianCheck(Check):
section = sections.section
if 'Uploader' in section:
self.uploader = section['Uploader']
+ if 'Cc' in section:
+ self.cc.append(section['Cc'])
# TODO: Verify first section has valid Archive field
if 'Archive' not in section:
raise CommandError('No Archive field in first section.')
self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to)])
- count = 0
+ sources = []
for entry in session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr_from):
self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to), 'source={0}'.format(entry.source)])
entry.fingerprint = fpr_to
- count += 1
+ sources.append(entry.source)
- self.result.append('Migrated {0} to {1}.\n{2} acl entries changed.'.format(fpr_hash_from, fpr_hash_to, count))
+ self.result.append('Migrated {0} to {1}.\n{2} acl entries changed: {3}'.format(fpr_hash_from, fpr_hash_to, len(sources), ", ".join(sources)))
session.commit()
stanza = self.read_control()
return apt_pkg.TagSection(stanza)
+ @property
+ def proxy(self):
+ session = object_session(self)
+ query = session.query(BinaryMetadata).filter_by(binary=self)
+ return MetadataProxy(session, query)
+
__all__.append('DBBinary')
@session_wrapper
self.contents_sha1 = signed_file.contents_sha1()
return self
+ def query(self, session):
+ return session.query(SignatureHistory).filter_by(fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1).first()
+
__all__.append('SignatureHistory')
################################################################################
fileset.add(name)
return fileset
+ @property
+ def proxy(self):
+ session = object_session(self)
+ query = session.query(SourceMetadata).filter_by(source=self)
+ return MetadataProxy(session, query)
+
__all__.append('DBSource')
@session_wrapper
################################################################################
+class MetadataProxy(object):
+ def __init__(self, session, query):
+ self.session = session
+ self.query = query
+
+ def _get(self, key):
+ metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
+ if metadata_key is None:
+ return None
+ metadata = self.query.filter_by(key=metadata_key).first()
+ return metadata
+
+ def __contains__(self, key):
+ if self._get(key) is not None:
+ return True
+ return False
+
+ def __getitem__(self, key):
+ metadata = self._get(key)
+ if metadata is None:
+ raise KeyError
+ return metadata.value
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+################################################################################
+
class VersionCheck(ORMObject):
def __init__(self, *args, **kwargs):
pass
self.keyrings = keyrings
self.valid = False
+ self.expired = False
+ self.invalid = False
self.fingerprint = None
self.primary_fingerprint = None
+ self.signature_id = None
self._verify(data, require_signature)
for line in self.status.splitlines():
self._parse_status(line)
+ if self.invalid:
+ self.valid = False
+
if require_signature and not self.valid:
raise GpgException("No valid signature found. (GPG exited with status code %s)\n%s" % (exit_code, self.stderr))
return dict( (fd, "".join(read_lines[fd])) for fd in read_lines.keys() )
- def _parse_date(self, value):
- """parse date string in YYYY-MM-DD format
+ def _parse_timestamp(self, timestamp, datestring=None):
+ """parse timestamp in GnuPG's format
@rtype: L{datetime.datetime}
- @returns: datetime objects for 0:00 on the given day
+ @returns: datetime object for the given timestamp
"""
- year, month, day = value.split('-')
- date = datetime.date(int(year), int(month), int(day))
- time = datetime.time(0, 0)
- return datetime.datetime.combine(date, time)
+ # The old implementation did only return the date. As we already
+ # used this for replay production, return the legacy value for
+ # old signatures.
+ if datestring is not None:
+ year, month, day = datestring.split('-')
+ date = datetime.date(int(year), int(month), int(day))
+ time = datetime.time(0, 0)
+ if date < datetime.date(2014, 8, 4):
+ return datetime.datetime.combine(date, time)
+
+ if 'T' in timestamp:
+ raise Exception('No support for ISO 8601 timestamps.')
+ return datetime.datetime.utcfromtimestamp(long(timestamp))
def _parse_status(self, line):
fields = line.split()
# <expire-timestamp> <sig-version> <reserved> <pubkey-algo>
# <hash-algo> <sig-class> <primary-key-fpr>
if fields[1] == "VALIDSIG":
+ if self.fingerprint is not None:
+ raise GpgException("More than one signature is not (yet) supported.")
self.valid = True
self.fingerprint = fields[2]
self.primary_fingerprint = fields[11]
- self.signature_timestamp = self._parse_date(fields[3])
+ self.signature_timestamp = self._parse_timestamp(fields[4], fields[3])
- if fields[1] == "BADARMOR":
+ elif fields[1] == "BADARMOR":
raise GpgException("Bad armor.")
- if fields[1] == "NODATA":
+ elif fields[1] == "NODATA":
raise GpgException("No data.")
- if fields[1] == "DECRYPTION_FAILED":
+ elif fields[1] == "DECRYPTION_FAILED":
raise GpgException("Decryption failed.")
- if fields[1] == "ERROR":
+ elif fields[1] == "ERROR":
raise GpgException("Other error: %s %s" % (fields[2], fields[3]))
+ elif fields[1] == "SIG_ID":
+ if self.signature_id is not None:
+ raise GpgException("More than one signature id.")
+ self.signature_id = fields[2]
+
+ elif fields[1] in ('PLAINTEXT', 'GOODSIG', 'NOTATION_NAME', 'NOTATION_DATA', 'SIGEXPIRED', 'KEYEXPIRED', 'POLICY_URL'):
+ pass
+
+ elif fields[1] in ('EXPSIG', 'EXPKEYSIG'):
+ self.expired = True
+ self.invalid = True
+
+ elif fields[1] in ('REVKEYSIG', 'BADSIG', 'ERRSIG', 'KEYREVOKED', 'NO_PUBKEY'):
+ self.invalid = True
+
+ else:
+ raise GpgException("Keyword '{0}' from GnuPG was not expected.".format(fields[1]))
+
def _exec_gpg(self, stdin, stdout, stderr, statusfd):
try:
if stdin != 0:
"--no-default-keyring",
"--batch",
"--no-tty",
- "--trust-model", "always"]
+ "--trust-model", "always",
+ "--fixed-list-mode"]
for k in self.keyrings:
args.append("--keyring=%s" % k)
args.extend(["--decrypt", "-"])
class PackageListEntry(object):
def __init__(self, name, package_type, section, component, priority, **other):
self.name = name
- self.package_type = package_type
+ self.type = package_type
self.section = section
self.component = component
self.priority = priority
self.other = other
- @property
- def architectures(self):
+
+ self.architectures = self._architectures()
+
+ def _architectures(self):
archs = self.other.get("arch", None)
if archs is None:
return None
return archs.split(',')
+
def built_on_architecture(self, architecture):
archs = self.architectures
if archs is None:
if match_architecture(architecture, arch):
return True
return False
+
def built_in_suite(self, suite):
built = False
for arch in suite.architectures:
+ if arch.arch_string == 'source':
+ continue
built_on_arch = self.built_on_architecture(arch.arch_string)
if built_on_arch:
return True
class PackageList(object):
def __init__(self, source):
- self._source = source
- if 'Package-List' in self._source:
- self._parse()
- elif 'Binary' in self._source:
- self._parse_fallback()
+ if 'Package-List' in source:
+ self._parse(source)
+ elif 'Binary' in source:
+ self._parse_fallback(source)
else:
raise InvalidSource('Source package has neither Package-List nor Binary field.')
- def _parse(self):
- self.package_list = {}
- for line in self._source['Package-List'].split("\n"):
+ self.fallback = any(entry.architectures is None for entry in self.package_list)
+
+ def _binaries(self, source):
+ return set(name.strip() for name in source['Binary'].split(","))
+
+ def _parse(self, source):
+ self.package_list = []
+
+ binaries_binary = self._binaries(source)
+ binaries_package_list = set()
+
+ for line in source['Package-List'].split("\n"):
if not line:
continue
fields = line.split()
# <name> <type> <component/section> <priority> [arch=<arch>[,<arch>]...]
name = fields[0]
package_type = fields[1]
- component, section = extract_component_from_section(fields[2])
+ section, component = extract_component_from_section(fields[2])
priority = fields[3]
other = dict(kv.split('=', 1) for kv in fields[4:])
+ if name in binaries_package_list:
+ raise InvalidSource("Package-List has two entries for '{0}'.".format(name))
+ if name not in binaries_binary:
+ raise InvalidSource("Package-List lists {0} which is not listed in Binary.".format(name))
+ binaries_package_list.add(name)
+
entry = PackageListEntry(name, package_type, section, component, priority, **other)
- self.package_list[name] = entry
+ self.package_list.append(entry)
+
+ if len(binaries_binary) != len(binaries_package_list):
+ raise InvalidSource("Package-List and Binaries fields have a different number of entries.")
- def _parse_fallback(self):
- self.package_list = {}
+ def _parse_fallback(self, source):
+ self.package_list = []
- for binary in self._source['Binary'].split():
+ for binary in self._binaries(source):
name = binary
package_type = None
component = None
other = dict()
entry = PackageListEntry(name, package_type, section, component, priority, **other)
- self.package_list[name] = entry
+ self.package_list.append(entry)
def packages_for_suite(self, suite):
packages = []
- for entry in self.package_list.values():
+ for entry in self.package_list:
built = entry.built_in_suite(suite)
if built or built is None:
packages.append(entry)
def has_arch_indep_packages(self):
has_arch_indep = False
- for entry in self.package_list.values():
+ for entry in self.package_list:
built = entry.built_on_architecture('all')
if built:
return True
def has_arch_dep_packages(self):
has_arch_dep = False
- for entry in self.package_list.values():
+ for entry in self.package_list:
built_on_all = entry.built_on_architecture('all')
if built_on_all == False:
return True
from .dbconn import BinaryMetadata, Component, MetadataKey, Override, OverrideType, Suite, get_mapped_component
from .fstransactions import FilesystemTransaction
from .regexes import re_file_changes, re_file_safe
+from .packagelist import PackageList
import daklib.utils as utils
import errno
.filter(Override.component == component)
return query.first()
- def _binary_override(self, binary, component_name):
- package = binary.package
+ def _binary_override(self, name, binarytype, component_name):
suite = self._overridesuite
- overridetype = binary.binarytype
component = get_mapped_component(component_name, self.session)
- query = self.session.query(Override).filter_by(package=package, suite=suite) \
- .join(OverrideType).filter(OverrideType.overridetype == overridetype) \
+ query = self.session.query(Override).filter_by(package=name, suite=suite) \
+ .join(OverrideType).filter(OverrideType.overridetype == binarytype) \
.filter(Override.component == component)
return query.first()
- def _binary_metadata(self, binary, key):
- metadata_key = self.session.query(MetadataKey).filter_by(key=key).first()
- if metadata_key is None:
- return None
- metadata = self.session.query(BinaryMetadata).filter_by(binary=binary, key=metadata_key).first()
- if metadata is None:
- return None
- return metadata.value
-
@property
def _changes_prefix(self):
changesname = self.upload.changes.changesname
missing = []
components = set()
+ source = self.upload.source
+
if hints is None:
hints = []
hints_map = dict([ ((o['type'], o['package']), o) for o in hints ])
- for binary in self.upload.binaries:
- priority = self._binary_metadata(binary, 'Priority')
- section = self._binary_metadata(binary, 'Section')
+ def check_override(name, type, priority, section, included):
component = 'main'
if section.find('/') != -1:
component = section.split('/', 1)[0]
- override = self._binary_override(binary, component)
- if override is None and not any(o['package'] == binary.package and o['type'] == binary.binarytype for o in missing):
- hint = hints_map.get((binary.binarytype, binary.package))
+ override = self._binary_override(name, type, component)
+ if override is None and not any(o['package'] == name and o['type'] == type for o in missing):
+ hint = hints_map.get((type, name))
if hint is not None:
missing.append(hint)
component = hint['component']
else:
missing.append(dict(
- package = binary.package,
+ package = name,
priority = priority,
section = section,
component = component,
- type = binary.binarytype,
+ type = type,
+ included = included
))
components.add(component)
- source = self.upload.source
- source_component = '(unknown)'
- for component, in self.session.query(Component.component_name).order_by(Component.ordering):
- if component in components:
- source_component = component
- break
- else:
- if source is not None:
- if self._source_override(component) is not None:
- source_component = component
- break
+ for binary in self.upload.binaries:
+ binary_proxy = binary.proxy
+ priority = binary_proxy['Priority']
+ section = binary_proxy['Section']
+ check_override(binary.package, binary.binarytype, priority, section, included=True)
if source is not None:
+ source_proxy = source.proxy
+ package_list = PackageList(source_proxy)
+ if not package_list.fallback:
+ packages = package_list.packages_for_suite(self.upload.target_suite)
+ for p in packages:
+ check_override(p.name, p.type, p.priority, p.section, included=False)
+
+ # see daklib.archive.source_component_from_package_list
+ # which we cannot use here as we might not have a Package-List
+ # field for old packages
+ query = self.session.query(Component).order_by(Component.ordering) \
+ .filter(Component.component_name.in_(components))
+ source_component = query.first().component_name
+
override = self._source_override(source_component)
if override is None:
hint = hints_map.get(('dsc', source.source))
missing.append(hint)
else:
section = 'misc'
- if component != 'main':
- section = "{0}/{1}".format(component, section)
+ if source_component != 'main':
+ section = "{0}/{1}".format(source_component, section)
missing.append(dict(
package = source.source,
priority = 'extra',
section = section,
component = source_component,
type = 'dsc',
+ included = True,
))
return missing
######################################################################
# Match safe filenames
-re_file_safe = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9_.:~+-]*$')
+re_file_safe = re.compile(r'^[a-zA-Z0-9][a-zA-Z0-9_.~+-]*$')
# Prefix of binary and source filenames
-_re_file_prefix = r'^(?P<package>[a-z0-9][a-z0-9.+-]+)_(?P<version>[A-Za-z0-9.:~+-]+?)'
+_re_file_prefix = r'^(?P<package>[a-z0-9][a-z0-9.+-]+)_(?P<version>[A-Za-z0-9.~+-]+?)'
# Match binary packages
# Groups: package, version, architecture, type
import apt_inst
import apt_pkg
+import errno
import os
import re
from daklib.gpg import SignedFile
from daklib.regexes import *
+import daklib.packagelist
-class InvalidChangesException(Exception):
+class UploadException(Exception):
pass
-class InvalidBinaryException(Exception):
+class InvalidChangesException(UploadException):
pass
-class InvalidSourceException(Exception):
+class InvalidBinaryException(UploadException):
pass
-class InvalidHashException(Exception):
+class InvalidSourceException(UploadException):
+ pass
+
+class InvalidHashException(UploadException):
def __init__(self, filename, hash_name, expected, actual):
self.filename = filename
self.hash_name = hash_name
"might already be known to the archive software.") \
.format(self.hash_name, self.filename, self.expected, self.actual)
-class InvalidFilenameException(Exception):
+class InvalidFilenameException(UploadException):
def __init__(self, filename):
self.filename = filename
def __str__(self):
return "Invalid filename '{0}'.".format(self.filename)
+class FileDoesNotExist(UploadException):
+ def __init__(self, filename):
+ self.filename = filename
+ def __str__(self):
+ return "Refers to non-existing file '{0}'".format(self.filename)
+
class HashedFile(object):
"""file with checksums
"""
@return: C{HashedFile} object for the given file
"""
path = os.path.join(directory, filename)
- size = os.stat(path).st_size
with open(path, 'r') as fh:
+ size = os.fstat(fh.fileno()).st_size
hashes = apt_pkg.Hashes(fh)
return cls(filename, size, hashes.md5, hashes.sha1, hashes.sha256, section, priority)
"""
path = os.path.join(directory, self.filename)
- size = os.stat(path).st_size
+ try:
+ with open(path) as fh:
+ size = os.fstat(fh.fileno()).st_size
+ hashes = apt_pkg.Hashes(fh)
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ raise FileDoesNotExist(self.filename)
+ raise
+
if size != self.size:
raise InvalidHashException(self.filename, 'size', self.size, size)
- with open(path) as fh:
- hashes = apt_pkg.Hashes(fh)
-
if hashes.md5 != self.md5sum:
raise InvalidHashException(self.filename, 'md5sum', self.md5sum, hashes.md5)
"""
return self._signed_file.valid
+ @property
+ def signature_timestamp(self):
+ return self._signed_file.signature_timestamp
+
+ @property
+ def contents_sha1(self):
+ return self._signed_file.contents_sha1
+
@property
def architectures(self):
"""list of architectures included in the upload
version = self.control['Version']
return (match.group('package'), version)
+ @property
+ def name(self):
+ return self.control['Package']
+
@property
def type(self):
"""package type ('deb' or 'udeb')
@type: dict-like
"""
+ self.package_list = daklib.packagelist.PackageList(self.dsc)
+ """Information about packages built by the source.
+ @type: daklib.packagelist.PackageList
+ """
+
self._files = None
@classmethod
suite=stable
suitename=wheezy
pusuite=proposed-updates
-oldrev=7.5
-newrev=7.6
+oldrev=7.6
+newrev=7.7
export SCRIPTVARS=/srv/ftp-master.debian.org/dak/config/debian/vars
. $SCRIPTVARS
. "${configdir}/common"
dak control-suite --add ${suitename}-r0 < ${suitename}-r0-additions.cs
- sync with stable RM if there is any propup needed. do it, if so:
-like, cat /srv/release.debian.org/www/squeeze/6.0/6.0.4/propup.unstable | dak control-suite --add unstable
-Note: unstable first, then testing
+cp /srv/release.debian.org/www/${suitename}/${newrev%%.*}/${newrev}/propups.unstable .
+dak control-suite --force --add unstable <propups.unstable
+cp /srv/release.debian.org/www/${suitename}/${newrev%%.*}/${newrev}/propups.testing .
+dak control-suite --force --add testing <propups.testing
+
- ask rms if they have RMs to do.
- and then check if they have a d-i update. if so, bash:
# set dioldver to "empty" if there is no old to remove
-diver=20130613+deb7u2+b1
+diver=20130613+deb7u2+b3
dak copy-installer -s ${pusuite} -d ${suite} ${diver}
-dioldver=20130613+deb7u1+b2
+dioldver=20130613+deb7u2+b1
cd $ftpdir/dists/${suite}/main
if [ "${dioldver}" != "empty" ]; then
for iarch in $(dak admin s-a list-arch ${suite}); do
- Change first line to NOTOK, add comment "Moving back to unchecked."
- dak process-policy new; dak clean-suites
- cd /srv/security-master.debian.org/queue/reject
+- dak admin forget-signature bla.changes
- dcmd mv -n bla.changes ../unchecked
- /srv/security-master.debian.org/dak/config/debian-security/cron.unchecked
- cronon
-arch <- c("source", "all", "amd64", "i386", "alpha", "arm", "armel", "armhf", "hppa", "hurd-i386", "ia64",
- "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc", "s390", "s390x", "sparc")
-palette(c("midnightblue", "gold", "turquoise", "cyan", "black", "red", "OrangeRed", "green3", "blue", "magenta",
+arch <- c("source", "all", "amd64", "arm64", "i386", "alpha", "arm", "armel", "armhf", "hppa", "hurd-i386", "ia64",
+ "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc", "ppc64el", "s390", "s390x", "sparc")
+palette(c("midnightblue", "gold", "turquoise", "pink", "cyan", "black", "red", "OrangeRed", "green3", "blue", "magenta",
"cornsilk3", "darkolivegreen3", "tomato4", "violetred2","thistle4", "steelblue2", "springgreen4",
"salmon","gray"))
cname <- c("date",arch)
FTPDIR="/srv/upload.debian.org/ftp/pub/UploadQueue/"
SSHDIR="/srv/upload.debian.org/UploadQueue/"
-yes n | find ${FTPDIR} -type f -mmin +15 -print0 -exec mv -i --target-directory=${SSHDIR} "{}" +
+find ${FTPDIR} -type f -mmin +15 -print0 -exec mv --no-clobber --target-directory=${SSHDIR} -- "{}" +
--exclude "/s3kr1t" \
--exclude "/scripts/s3kr1t" \
--exclude "/tmp/" \
+ --exclude "/public/incoming.debian.org" \
--delete --delete-after \
--timeout 3600 \
-e 'ssh -o ConnectTimeout=30 -o SetupTimeout=30' \
my %data;
my %data2;
-my @archs = ("source", "all", "amd64", "i386", "alpha", "arm", "armel", "armhf", "hppa",
- "hurd-i386", "ia64", "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc", "s390",
- "s390x", "sparc");
+my @archs = ("source", "all", "amd64", "arm64", "i386", "alpha", "arm", "armel", "armhf", "hppa",
+ "hurd-i386", "ia64", "kfreebsd-amd64", "kfreebsd-i386", "mips", "mipsel", "powerpc",
+ "ppc64el", "s390", "s390x", "sparc");
for my $file (@ARGV) {
my $FH = new IO::Uncompress::AnyUncompress $file, Transparent => 1
WB_DB_DIR=${WB_DB_DIR:-${scriptdir}/nfu}
for arch in $(dak admin s-a list-arch unstable); do
- wget -q http://buildd.debian.org/stats/$arch-all.txt -O ${WB_DB_DIR}/${arch}-all.txt
+ wget -q http://buildd.debian.org/stats/$arch-all.txt -O ${WB_DB_DIR}/${arch}-all.txt || echo "No w-b dump for ${arch}"
done
signingkey= will ensure that Release files are signed
# dak admin suite add-all-arches unstable x.y.z origin=MyDistro label=Master codename=sid signingkey=DDDDDDDD
+Add the components to the suite
+# dak admin s-c add unstable main contrib non-free
+
Re-run dak init-dirs to add new suite directories to /srv/dak
# dak init-dirs
--- /dev/null
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+Valid: expired-subkey
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iJwEAQEKAAYFAlLsQwMACgkQ+DxQkDe7kaCctQQAuzx+L6Bo+8oI9zTb04Cg2EAW
+ul4rN7XIqj3Q9/Cy2/+6+ET7GE414cA3KEElrimgAHHNdr6xPOJnEYAHSlMRG0wk
+gP9zk0nAt1ZJRgmWKb2zgbV6DYz7gAcUVaZMd+fixBdn39E3SkMnDHsUhWZNecsG
+BpSvYQJ7pQDnqo9gWbY=
+=AKH9
+-----END PGP SIGNATURE-----
--- /dev/null
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+Valid: expired
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iJwEAQEKAAYFAjp4sQMACgkQVDsrrtoETwtbowQAxZ+imlX8u44TCRaJmgSDx6dV
+G+BiNiM7RXbfoYD3jmKWX8ILdxBXyMP2YaPmtRij03h1q8idjol6mxGl2xBrOdbB
+hE7N+67MuvpGB1yBOb6JJQuqLALyoK+efzcqPBEJv3s0eeYbWkB4ZxWRhso1DDnm
+RYieWYyoR9/oNVhsBmE=
+=WR84
+-----END PGP SIGNATURE-----
--- /dev/null
+-----BEGIN PGP MESSAGE-----
+Version: GnuPG v1
+
+owE7LZXEEMIWKBWWmJOZYqWQl69bnJmel1hSWpTKBQA=
+=JwM5
+-----END PGP MESSAGE-----
--- /dev/null
+Valid: text/plain
--- /dev/null
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+Valid: yes
+-----BEGIN PGP SIGNATURE-----
+Version: GnuPG v1
+
+iJwEAQEKAAYFAlQGNXoACgkQy51cWChgboRrDAP9E/cwAQgF5BpzIEN5Wnus4mf0
+L4QdVPXCVjU4f8YS4FKali0++shPRFxgqBhWaOT9XFR9y0+ZzHjfcXffY0loYMQ6
+JCZdIK0lQ4aPDFqX6892+Aka0ZaijL+20yd9IE+9E7M7rCCW+PgVFRIIKnB7Eyc2
+MkCGwQ91CAOjErXnZPw=
+=UsYp
+-----END PGP SIGNATURE-----
class MatchArchitecture(DakTestCase):
def testEqual(self):
self.assert_(match_architecture('amd64', 'amd64'))
+ self.assert_(match_architecture('linux-amd64', 'linux-amd64'))
+ self.assert_(match_architecture('linux-amd64', 'amd64'))
+ self.assert_(match_architecture('amd64', 'linux-amd64'))
self.assert_(not match_architecture('amd64', 'i386'))
self.assert_(match_architecture('kfreebsd-amd64', 'kfreebsd-amd64'))
self.assert_(not match_architecture('kfreebsd-amd64', 'amd64'))
--- /dev/null
+#! /usr/bin/env python
+#
+# Copyright (C) 2014, Ansgar Burchardt <ansgar@debian.org>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import datetime
+import unittest
+from base_test import DakTestCase, fixture
+from daklib.gpg import GpgException, SignedFile
+
+keyring = fixture('gpg/gnupghome/pubring.gpg')
+fpr_valid = '0ABB89079CB58F8F94F6F310CB9D5C5828606E84'
+fpr_expired = '05A558AE65B77B559BBE0C4D543B2BAEDA044F0B'
+fpr_expired_subkey = '8865D9EC71713394ADBD8F729F7A24B7F6388CE1'
+
+def verify(filename, require_signature=True):
+ with open(fixture(filename)) as fh:
+ data = fh.read()
+ return SignedFile(data, [keyring], require_signature)
+
+class GpgTest(DakTestCase):
+ def test_valid(self):
+ result = verify('gpg/valid.asc')
+ self.assertTrue(result.valid)
+ self.assertEqual(result.primary_fingerprint, fpr_valid)
+ self.assertEqual(result.contents, "Valid: yes\n")
+ self.assertEqual(result.signature_timestamp, datetime.datetime(2014, 9, 2, 21, 24, 10))
+
+ def test_expired(self):
+ result = verify('gpg/expired.asc', False)
+ self.assertFalse(result.valid)
+ self.assertEqual(result.primary_fingerprint, fpr_expired)
+ self.assertEqual(result.contents, "Valid: expired\n")
+ self.assertEqual(result.signature_timestamp, datetime.datetime(2001, 2, 1, 0, 0, 0))
+
+ def test_expired_assertion(self):
+ with self.assertRaises(GpgException):
+ verify('gpg/expired.asc')
+
+ def test_expired_subkey(self):
+ result = verify('gpg/expired-subkey.asc', False)
+ self.assertFalse(result.valid)
+ self.assertEqual(result.primary_fingerprint, fpr_expired_subkey)
+ self.assertEqual(result.contents, "Valid: expired-subkey\n")
+ self.assertEqual(result.signature_timestamp, datetime.datetime(2014, 2, 1, 0, 0, 0))
+
+ def test_expires_subkey_assertion(self):
+ with self.assertRaises(GpgException):
+ verify('gpg/expired-subkey.asc')
+
+ def test_message_assertion(self):
+ with self.assertRaises(GpgException):
+ verify('gpg/message.asc')
+
+ def test_plain_assertion(self):
+ with self.assertRaises(GpgException):
+ verify('gpg/plaintext.txt')
+
+if __name__ == '__main__':
+ unittest.main()
source_all_any = {
'Package-List': '\n libdune-common-dev deb libdevel optional arch=any\nlibdune-common-doc deb doc optional arch=all\n',
- 'Binary': 'libdune-common-dev libdune-common-doc\n',
+ 'Binary': 'libdune-common-dev, libdune-common-doc\n',
}
source_amd64 = {
p_all_amd64 = pl.packages_for_suite(suite_all_amd64)
self.assertEqual(len(p_all_amd64), 1)
+ p = p_all[0]
+ self.assertEqual(p.name, 'libdune-common-doc')
+ self.assertEqual(p.type, 'deb')
+ self.assertEqual(p.section, 'doc')
+ self.assertEqual(p.component, 'main')
+ self.assertEqual(p.priority, 'optional')
+ self.assertEqual(p.architectures, ['all'])
+
def testArchAny(self):
pl = PackageList(source_any)
p_kfreebsdi386 = pl.packages_for_suite(suite_kfreebsdi386)
self.assertEqual(len(p_kfreebsdi386), 0)
+ suite_source = FakeSuite('source')
+ p_source = pl.packages_for_suite(suite_source)
+ self.assertEqual(len(p_source), 0)
+
def testNoArch(self):
pl = PackageList(source_noarch)
"/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
"/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
@keyrings = ( "/srv/keyring.debian.org/keyrings/debian-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
- "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");
"/srv/keyring.debian.org/keyrings/debian-maintainers.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-alpha-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-amd64-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-arm64-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-armhf-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-hurd-i386-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mipsel-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-mips-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-powerpc-keyring.gpg",
+ "/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-ppc64el-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-s390x-keyring.gpg",
"/srv/keyring.debian.org/keyrings/buildd-keyrings/buildd-sparc-keyring.gpg");