This module provides classes to manipulate the archive.
"""
-from .dbconn import *
+from daklib.dbconn import *
import daklib.checks as checks
from daklib.config import Config
import daklib.upload as upload
import daklib.utils as utils
-from .fstransactions import FilesystemTransaction
-from .regexes import re_changelog_versions, re_bin_only_nmu
+from daklib.fstransactions import FilesystemTransaction
+from daklib.regexes import re_changelog_versions, re_bin_only_nmu
import apt_pkg
from datetime import datetime
self.fs = FilesystemTransaction()
self.session = DBConn().session()
- def get_file(self, hashed_file, source_name):
- """Look for file `hashed_file` in database
+ def get_file(self, hashed_file, source_name, check_hashes=True):
+ """Look for file C{hashed_file} in database
- Args:
- hashed_file (daklib.upload.HashedFile): file to look for in the database
+ @type hashed_file: L{daklib.upload.HashedFile}
+ @param hashed_file: file to look for in the database
- Raises:
- KeyError: file was not found in the database
- HashMismatchException: hash mismatch
+ @type source_name: str
+ @param source_name: source package name
- Returns:
- `daklib.dbconn.PoolFile` object for the database
+ @type check_hashes: bool
+ @param check_hashes: check size and hashes match
+
+ @raise KeyError: file was not found in the database
+ @raise HashMismatchException: hash mismatch
+
+ @rtype: L{daklib.dbconn.PoolFile}
+ @return: database entry for the file
"""
poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
try:
poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
- if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum:
+ if check_hashes and (poolfile.filesize != hashed_file.size
+ or poolfile.md5sum != hashed_file.md5sum
+ or poolfile.sha1sum != hashed_file.sha1sum
+ or poolfile.sha256sum != hashed_file.sha256sum):
raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
return poolfile
except NoResultFound:
Will not give an error when the file is already present.
- Returns:
- `daklib.dbconn.PoolFile` object for the new file
+ @rtype: L{daklib.dbconn.PoolFile}
+ @return: batabase object for the new file
"""
session = self.session
def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
"""Install a binary package
- Args:
- directory (str): directory the binary package is located in
- binary (daklib.upload.Binary): binary package to install
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
-
- Kwargs:
- allow_tainted (bool): allow to copy additional files from tainted archives
- fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
- source_suites (list of daklib.dbconn.Suite or True): suites to copy
- the source from if they are not in `suite` or True to allow
- copying from any suite.
- This can also be a SQLAlchemy (sub)query object.
- extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
-
- Returns:
- `daklib.dbconn.DBBinary` object for the new package
+ @type directory: str
+ @param directory: directory the binary package is located in
+
+ @type binary: L{daklib.upload.Binary}
+ @param binary: binary package to install
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy additional files from tainted archives
+
+ @type fingerprint: L{daklib.dbconn.Fingerprint}
+ @param fingerprint: optional fingerprint
+
+ @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True}
+ @param source_suites: suites to copy the source from if they are not
+ in C{suite} or C{True} to allow copying from any
+ suite.
+
+ @type extra_source_archives: list of L{daklib.dbconn.Archive}
+ @param extra_source_archives: extra archives to copy Built-Using sources from
+
+ @rtype: L{daklib.dbconn.DBBinary}
+ @return: databse object for the new package
"""
session = self.session
control = binary.control
This is intended to be used to check that Built-Using sources exist.
- Args:
- filename (str): filename to use in error messages
- source (daklib.dbconn.DBSource): source to look for
- archive (daklib.dbconn.Archive): archive to look in
+ @type filename: str
+ @param filename: filename to use in error messages
- Kwargs:
- extra_archives (list of daklib.dbconn.Archive): list of archives to copy
- the source package from if it is not yet present in `archive`
+ @type source: L{daklib.dbconn.DBSource}
+ @param source: source to look for
+
+ @type archive: L{daklib.dbconn.Archive}
+ @param archive: archive to look in
+
+ @type extra_archives: list of L{daklib.dbconn.Archive}
+ @param extra_archives: list of archives to copy the source package from
+ if it is not yet present in C{archive}
"""
session = self.session
db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
- """Add Built-Using sources to `db_binary.extra_sources`
+ """Add Built-Using sources to C{db_binary.extra_sources}
"""
session = self.session
built_using = control.get('Built-Using', None)
def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
"""Install a source package
- Args:
- directory (str): directory the source package is located in
- source (daklib.upload.Source): source package to install
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
- changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package
+ @type directory: str
+ @param directory: directory the source package is located in
+
+ @type source: L{daklib.upload.Source}
+ @param source: source package to install
- Kwargs:
- allow_tainted (bool): allow to copy additional files from tainted archives
- fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
- Returns:
- `daklib.dbconn.DBSource` object for the new source
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type changed_by: L{daklib.dbconn.Maintainer}
+ @param changed_by: person who prepared this version of the package
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy additional files from tainted archives
+
+ @type fingerprint: L{daklib.dbconn.Fingerprint}
+ @param fingerprint: optional fingerprint
+
+ @rtype: L{daklib.dbconn.DBSource}
+ @return: database object for the new source
"""
session = self.session
archive = suite.archive
# Uploaders are the maintainer and co-maintainers from the Uploaders field
db_source.uploaders.append(maintainer)
if 'Uploaders' in control:
- def split_uploaders(field):
- import re
- for u in re.sub(">[ ]*,", ">\t", field).split("\t"):
- yield u.strip()
-
+ from daklib.textutils import split_uploaders
for u in split_uploaders(control['Uploaders']):
db_source.uploaders.append(get_or_set_maintainer(u, session))
session.flush()
def _copy_file(self, db_file, archive, component, allow_tainted=False):
"""Copy a file to the given archive and component
- Args:
- db_file (daklib.dbconn.PoolFile): file to copy
- archive (daklib.dbconn.Archive): target archive
- component (daklib.dbconn.Component): target component
+ @type db_file: L{daklib.dbconn.PoolFile}
+ @param db_file: file to copy
+
+ @type archive: L{daklib.dbconn.Archive}
+ @param archive: target archive
+
+ @type component: L{daklib.dbconn.Archive}
+ @param component: target component
- Kwargs:
- allow_tainted (bool): allow to copy from tainted archives (such as NEW)
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy from tainted archives (such as NEW)
"""
session = self.session
def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
"""Copy a binary package to the given suite and component
- Args:
- db_binary (daklib.dbconn.DBBinary): binary to copy
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
+ @type db_binary: L{daklib.dbconn.DBBinary}
+ @param db_binary: binary to copy
- Kwargs:
- allow_tainted (bool): allow to copy from tainted archives (such as NEW)
- extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy from tainted archives (such as NEW)
+
+ @type extra_archives: list of L{daklib.dbconn.Archive}
+ @param extra_archives: extra archives to copy Built-Using sources from
"""
session = self.session
archive = suite.archive
if archive.tainted:
allow_tainted = True
- # make sure built-using packages are present in target archive
filename = db_binary.poolfile.filename
+
+ # make sure source is present in target archive
+ db_source = db_binary.source
+ if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None:
+ raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name))
+
+ # make sure built-using packages are present in target archive
for db_source in db_binary.extra_sources:
self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
def copy_source(self, db_source, suite, component, allow_tainted=False):
"""Copy a source package to the given suite and component
- Args:
- db_source (daklib.dbconn.DBSource): source to copy
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
+ @type db_source: L{daklib.dbconn.DBSource}
+ @param db_source: source to copy
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
- Kwargs:
- allow_tainted (bool): allow to copy from tainted archives (such as NEW)
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy from tainted archives (such as NEW)
"""
archive = suite.archive
if archive.tainted:
def remove_file(self, db_file, archive, component):
"""Remove a file from a given archive and component
- Args:
- db_file (daklib.dbconn.PoolFile): file to remove
- archive (daklib.dbconn.Archive): archive to remove the file from
- component (daklib.dbconn.Component): component to remove the file from
+ @type db_file: L{daklib.dbconn.PoolFile}
+ @param db_file: file to remove
+
+ @type archive: L{daklib.dbconn.Archive}
+ @param archive: archive to remove the file from
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: component to remove the file from
"""
af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
self.fs.unlink(af.path)
def remove_binary(self, binary, suite):
"""Remove a binary from a given suite and component
- Args:
- binary (daklib.dbconn.DBBinary): binary to remove
- suite (daklib.dbconn.Suite): suite to remove the package from
+ @type binary: L{daklib.dbconn.DBBinary}
+ @param binary: binary to remove
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to remove the package from
"""
binary.suites.remove(suite)
self.session.flush()
def remove_source(self, source, suite):
"""Remove a source from a given suite and component
- Raises:
- ArchiveException: source package is still referenced by other
- binaries in the suite
+ @type source: L{daklib.dbconn.DBSource}
+ @param source: source to remove
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to remove the package from
- Args:
- binary (daklib.dbconn.DBSource): source to remove
- suite (daklib.dbconn.Suite): suite to remove the package from
+ @raise ArchiveException: source package is still referenced by other
+ binaries in the suite
"""
session = self.session
class ArchiveUpload(object):
"""handle an upload
- This class can be used in a with-statement:
+ This class can be used in a with-statement::
with ArchiveUpload(...) as upload:
...
Doing so will automatically run any required cleanup and also rollback the
transaction if it was not committed.
-
- Attributes:
- changes (daklib.upload.Changes): upload to process
- directory (str): directory with temporary copy of files. set by `prepare`
- fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload
- new (bool): upload is NEW. set by `check`
- reject_reasons (list of str): reasons why the upload cannot be accepted
- session: database session
- transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload
- warnings (list of str): warnings (NOT USED YET)
"""
def __init__(self, directory, changes, keyrings):
self.transaction = ArchiveTransaction()
+ """transaction used to handle the upload
+ @type: L{daklib.archive.ArchiveTransaction}
+ """
+
self.session = self.transaction.session
+ """database session"""
self.original_directory = directory
self.original_changes = changes
+
self.changes = None
+ """upload to process
+ @type: L{daklib.upload.Changes}
+ """
+
self.directory = None
+ """directory with temporary copy of files. set by C{prepare}
+ @type: str
+ """
+
self.keyrings = keyrings
self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
+ """fingerprint of the key used to sign the upload
+ @type: L{daklib.dbconn.Fingerprint}
+ """
self.reject_reasons = []
+ """reasons why the upload cannot by accepted
+ @type: list of str
+ """
+
self.warnings = []
+ """warnings
+ @note: Not used yet.
+ @type: list of str
+ """
+
self.final_suites = None
+
self.new = False
+ """upload is NEW. set by C{check}
+ @type: bool
+ """
+
+ self._checked = False
+ """checks passes. set by C{check}
+ @type: bool
+ """
self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
self._new = self._new_queue.suite
+ def warn(self, message):
+ """add a warning message
+
+ Adds a warning message that can later be seen in C{self.warnings}
+
+ @type message: string
+ @param message: warning message
+ """
+ self.warnings.append(message)
+
def prepare(self):
"""prepare upload for further processing
This copies the files involved to a temporary directory. If you use
this method directly, you have to remove the directory given by the
- `directory` attribute later on your own.
+ C{directory} attribute later on your own.
- Instead of using the method directly, you can also use a with-statement:
+ Instead of using the method directly, you can also use a with-statement::
with ArchiveUpload(...) as upload:
...
cnf = Config()
session = self.transaction.session
- self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
+ self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
+ mode=0o2750, group=cnf.unprivgroup)
with FilesystemTransaction() as fs:
src = os.path.join(self.original_directory, self.original_changes.filename)
dst = os.path.join(self.directory, self.original_changes.filename)
- fs.copy(src, dst)
+ fs.copy(src, dst, mode=0o640)
self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
for f in self.changes.files.itervalues():
src = os.path.join(self.original_directory, f.filename)
dst = os.path.join(self.directory, f.filename)
- fs.copy(src, dst)
+ if not os.path.exists(src):
+ continue
+ fs.copy(src, dst, mode=0o640)
source = self.changes.source
if source is not None:
for f in source.files.itervalues():
src = os.path.join(self.original_directory, f.filename)
dst = os.path.join(self.directory, f.filename)
- if f.filename not in self.changes.files:
- db_file = self.transaction.get_file(f, source.dsc['Source'])
- db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
- fs.copy(db_archive_file.path, dst, symlink=True)
+ if not os.path.exists(dst):
+ try:
+ db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
+ db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
+ fs.copy(db_archive_file.path, dst, symlink=True)
+ except KeyError:
+ # Ignore if get_file could not find it. Upload will
+ # probably be rejected later.
+ pass
def unpacked_source(self):
"""Path to unpacked source
Get path to the unpacked source. This method does unpack the source
- into a temporary directory under `self.directory` if it has not
+ into a temporary directory under C{self.directory} if it has not
been done so already.
- Returns:
- String giving the path to the unpacked source directory
- or None if no source was included in the upload.
+ @rtype: str or C{None}
+ @return: string giving the path to the unpacked source directory
+ or C{None} if no source was included in the upload.
"""
assert self.directory is not None
sourcedir = os.path.join(self.directory, 'source')
if not os.path.exists(sourcedir):
- subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False)
+ devnull = open('/dev/null', 'w')
+ subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
if not os.path.isdir(sourcedir):
raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
return sourcedir
if src == suite_name:
suite_name = dst
if rtype != "silent-map":
- self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
+ self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
elif rtype == "ignore":
ignored = fields[1]
if suite_name == ignored:
def _mapped_suites(self):
"""Get target suites after mappings
- Returns:
- list of daklib.dbconn.Suite giving the mapped target suites of this upload
+ @rtype: list of L{daklib.dbconn.Suite}
+ @return: list giving the mapped target suites of this upload
"""
session = self.session
"""Check if upload is NEW
An upload is NEW if it has binary or source packages that do not have
- an override in `suite` OR if it references files ONLY in a tainted
+ an override in C{suite} OR if it references files ONLY in a tainted
archive (eg. when it references files in NEW).
- Returns:
- True if the upload is NEW, False otherwise
+ @rtype: bool
+ @return: C{True} if the upload is NEW, C{False} otherwise
"""
session = self.session
def _binary_override(self, suite, binary):
"""Get override entry for a binary
- Args:
- suite (daklib.dbconn.Suite)
- binary (daklib.upload.Binary)
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to get override for
- Returns:
- daklib.dbconn.Override or None
+ @type binary: L{daklib.upload.Binary}
+ @param binary: binary to get override for
+
+ @rtype: L{daklib.dbconn.Override} or C{None}
+ @return: override for the given binary or C{None}
"""
if suite.overridesuite is not None:
suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
def _source_override(self, suite, source):
"""Get override entry for a source
- Args:
- suite (daklib.dbconn.Suite)
- source (daklib.upload.Source)
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to get override for
+
+ @type source: L{daklib.upload.Source}
+ @param source: source to get override for
- Returns:
- daklib.dbconn.Override or None
+ @rtype: L{daklib.dbconn.Override} or C{None}
+ @return: override for the given source or C{None}
"""
if suite.overridesuite is not None:
suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
except NoResultFound:
return None
+ def _binary_component(self, suite, binary, only_overrides=True):
+ """get component for a binary
+
+ By default this will only look at overrides to get the right component;
+ if C{only_overrides} is C{False} this method will also look at the
+ Section field.
+
+ @type suite: L{daklib.dbconn.Suite}
+
+ @type binary: L{daklib.upload.Binary}
+
+ @type only_overrides: bool
+ @param only_overrides: only use overrides to get the right component
+
+ @rtype: L{daklib.dbconn.Component} or C{None}
+ """
+ override = self._binary_override(suite, binary)
+ if override is not None:
+ return override.component
+ if only_overrides:
+ return None
+ return get_mapped_component(binary.component, self.session)
+
def check(self, force=False):
"""run checks against the upload
- Args:
- force (bool): ignore failing forcable checks
+ @type force: bool
+ @param force: ignore failing forcable checks
- Returns:
- True if all checks passed, False otherwise
+ @rtype: bool
+ @return: C{True} if all checks passed, C{False} otherwise
"""
# XXX: needs to be better structured.
assert self.changes.valid_signature
try:
+ # Validate signatures and hashes before we do any real work:
for chk in (
checks.SignatureCheck,
checks.ChangesCheck,
checks.HashesCheck,
+ checks.ExternalHashesCheck,
checks.SourceCheck,
checks.BinaryCheck,
- checks.ACLCheck,
+ checks.BinaryTimestampCheck,
checks.SingleDistributionCheck,
- checks.NoSourceOnlyCheck,
- checks.LintianCheck,
):
chk().check(self)
final_suites = self._final_suites()
if len(final_suites) == 0:
- self.reject_reasons.append('Ended with no suite to install to.')
+ self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
return False
+ self.final_suites = final_suites
+
for chk in (
+ checks.TransitionCheck,
+ checks.ACLCheck,
+ checks.NoSourceOnlyCheck,
+ checks.LintianCheck,
+ ):
+ chk().check(self)
+
+ for chk in (
+ checks.ACLCheck,
checks.SourceFormatCheck,
checks.SuiteArchitectureCheck,
checks.VersionCheck,
if len(self.reject_reasons) != 0:
return False
- self.final_suites = final_suites
+ self._checked = True
return True
except checks.Reject as e:
self.reject_reasons.append(unicode(e))
def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
"""Install upload to the given suite
- Args:
- suite (daklib.dbconn.Suite): suite to install the package into.
- This is the real suite, ie. after any redirection to NEW or a policy queue
- source_component_func: function to get the `daklib.dbconn.Component`
- for a `daklib.upload.Source` object
- binary_component_func: function to get the `daklib.dbconn.Component`
- for a `daklib.upload.Binary` object
-
- Kwargs:
- source_suites: see `daklib.archive.ArchiveTransaction.install_binary`
- extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary`
-
- Returns:
- tuple with two elements. The first is a `daklib.dbconn.DBSource`
- object for the install source or None if no source was included.
- The second is a list of `daklib.dbconn.DBBinary` objects for the
- installed binary packages.
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to install the package into. This is the real suite,
+ ie. after any redirection to NEW or a policy queue
+
+ @param source_component_func: function to get the L{daklib.dbconn.Component}
+ for a L{daklib.upload.Source} object
+
+ @param binary_component_func: function to get the L{daklib.dbconn.Component}
+ for a L{daklib.upload.Binary} object
+
+ @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
+
+ @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
+
+ @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
+ object for the install source or C{None} if no source was
+ included. The second is a list of L{daklib.dbconn.DBBinary}
+ objects for the installed binary packages.
"""
# XXX: move this function to ArchiveTransaction?
changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
if source_suites is None:
- source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.suite == suite).subquery()
+ source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery()
source = self.changes.source
if source is not None:
if suite.copychanges:
src = os.path.join(self.directory, self.changes.filename)
dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
- self.transaction.fs.copy(src, dst)
+ self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
return (db_source, db_binaries)
self.transaction.session.flush()
dst = os.path.join(policy_queue.path, self.changes.filename)
- self.transaction.fs.copy(self.changes.path, dst)
+ self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
return u
Try to handle byhand packages automatically.
- Returns:
- list of `daklib.upload.hashed_file` for the remaining byhand packages
+ @rtype: list of L{daklib.upload.HashedFile}
+ @return: list of remaining byhand files
"""
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
assert self.final_suites is not None
+ assert self._checked
byhand = self.changes.byhand_files
if len(byhand) == 0:
remaining = []
for f in byhand:
- package, version, archext = f.filename.split('_', 2)
+ parts = f.filename.split('_', 2)
+ if len(parts) != 3:
+ print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
+ remaining.append(f)
+ continue
+
+ package, version, archext = parts
arch, ext = archext.split('.', 1)
- rule = automatic_byhand_packages.get(package)
- if rule is None:
+ try:
+ rule = automatic_byhand_packages.subtree(package)
+ except KeyError:
remaining.append(f)
continue
return len(remaining) == 0
def _install_byhand(self, policy_queue_upload, hashed_file):
- """
- Args:
- policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX
- hashed_file (daklib.upload.HashedFile): XXX
+ """install byhand file
+
+ @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
+
+ @type hashed_file: L{daklib.upload.HashedFile}
"""
fs = self.transaction.fs
session = self.transaction.session
src = os.path.join(self.directory, hashed_file.filename)
dst = os.path.join(policy_queue.path, hashed_file.filename)
- fs.copy(src, dst)
+ fs.copy(src, dst, mode=policy_queue.change_perms)
return byhand_file
for binary in self.changes.binaries:
control = binary.control
source_package, source_version = binary.source
- line = " ".join([control['Package'], control['Version'], source_package, source_version])
+ line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
print >>debinfo, line
debinfo.close()
+ def _policy_queue(self, suite):
+ if suite.policy_queue is not None:
+ return suite.policy_queue
+ return None
+
def install(self):
"""install upload
- Install upload to a suite or policy queue. This method does *not*
+ Install upload to a suite or policy queue. This method does B{not}
handle uploads to NEW.
- You need to have called the `check` method before calling this method.
+ You need to have called the C{check} method before calling this method.
"""
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
assert self.final_suites is not None
+ assert self._checked
assert not self.new
db_changes = self._install_changes()
if suite.overridesuite is not None:
overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
+ policy_queue = self._policy_queue(suite)
+
redirected_suite = suite
- if suite.policy_queue is not None:
- redirected_suite = suite.policy_queue.suite
+ if policy_queue is not None:
+ redirected_suite = policy_queue.suite
+
+ source_suites = self.session.query(Suite).filter(Suite.suite_id.in_([suite.suite_id, redirected_suite.suite_id])).subquery()
source_component_func = lambda source: self._source_override(overridesuite, source).component
- binary_component_func = lambda binary: self._binary_override(overridesuite, binary).component
+ binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
- (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
+ (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
- if suite.policy_queue is not None:
- self._install_policy(suite.policy_queue, suite, db_changes, db_source, db_binaries)
+ if policy_queue is not None:
+ self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries)
# copy to build queues
- if suite.policy_queue is None or suite.policy_queue.send_to_build_queues:
+ if policy_queue is None or policy_queue.send_to_build_queues:
for build_queue in suite.copy_queues:
- self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
+ self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
self._do_bts_versiontracking()
def install_to_new(self):
"""install upload to NEW
- Install upload to NEW. This method does *not* handle regular uploads
+ Install upload to NEW. This method does B{not} handle regular uploads
to suites or policy queues.
- You need to have called the `check` method before calling this method.
+ You need to have called the C{check} method before calling this method.
"""
# Uploads to NEW are special as we don't have overrides.
assert len(self.reject_reasons) == 0
binaries = self.changes.binaries
byhand = self.changes.byhand_files
- new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
- if len(byhand) > 0:
- new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
- new_suite = new_queue.suite
-
# we need a suite to guess components
suites = list(self.final_suites)
assert len(suites) == 1, "NEW uploads must be to a single suite"
suite = suites[0]
+ # decide which NEW queue to use
+ if suite.new_queue is None:
+ new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
+ else:
+ new_queue = suite.new_queue
+ if len(byhand) > 0:
+ # There is only one global BYHAND queue
+ new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
+ new_suite = new_queue.suite
+
+
def binary_component_func(binary):
- override = self._binary_override(suite, binary)
- if override is not None:
- return override.component
- component_name = binary.component
- component = self.session.query(Component).filter_by(component_name=component_name).one()
- return component
+ return self._binary_component(suite, binary, only_overrides=False)
# guess source component
# XXX: should be moved into an extra method
component = binary_component_func(binary)
binary_component_names.add(component.component_name)
source_component_name = None
- for guess in ('main', 'contrib', 'non-free'):
+ for c in self.session.query(Component).order_by(Component.component_id):
+ guess = c.component_name
if guess in binary_component_names:
source_component_name = guess
break
if source_component_name is None:
- raise Exception('Could not guess source component.')
- source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
+ source_component = self.session.query(Component).order_by(Component.component_id).first()
+ else:
+ source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
source_component_func = lambda source: source_component
db_changes = self._install_changes()