This module provides classes to manipulate the archive.
"""
-from .dbconn import *
+from daklib.dbconn import *
import daklib.checks as checks
from daklib.config import Config
import daklib.upload as upload
import daklib.utils as utils
-from .fstransactions import FilesystemTransaction
-from .regexes import re_changelog_versions, re_bin_only_nmu
+from daklib.fstransactions import FilesystemTransaction
+from daklib.regexes import re_changelog_versions, re_bin_only_nmu
import apt_pkg
from datetime import datetime
self.session = DBConn().session()
def get_file(self, hashed_file, source_name):
- """Look for file `hashed_file` in database
+ """Look for file C{hashed_file} in database
- Args:
- hashed_file (daklib.upload.HashedFile): file to look for in the database
+ @type hashed_file: L{daklib.upload.HashedFile}
+ @param hashed_file: file to look for in the database
- Raises:
- KeyError: file was not found in the database
- HashMismatchException: hash mismatch
+ @raise KeyError: file was not found in the database
+ @raise HashMismatchException: hash mismatch
- Returns:
- `daklib.dbconn.PoolFile` object for the database
+ @rtype: L{daklib.dbconn.PoolFile}
+ @return: database entry for the file
"""
poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
try:
Will not give an error when the file is already present.
- Returns:
- `daklib.dbconn.PoolFile` object for the new file
+ @rtype: L{daklib.dbconn.PoolFile}
+ @return: batabase object for the new file
"""
session = self.session
def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
"""Install a binary package
- Args:
- directory (str): directory the binary package is located in
- binary (daklib.upload.Binary): binary package to install
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
-
- Kwargs:
- allow_tainted (bool): allow to copy additional files from tainted archives
- fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
- source_suites (list of daklib.dbconn.Suite or True): suites to copy
- the source from if they are not in `suite` or True to allow
- copying from any suite.
- This can also be a SQLAlchemy (sub)query object.
- extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
-
- Returns:
- `daklib.dbconn.DBBinary` object for the new package
+ @type directory: str
+ @param directory: directory the binary package is located in
+
+ @type binary: L{daklib.upload.Binary}
+ @param binary: binary package to install
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy additional files from tainted archives
+
+ @type fingerprint: L{daklib.dbconn.Fingerprint}
+ @param fingerprint: optional fingerprint
+
+ @type source_suites: list of L{daklib.dbconn.Suite} or C{True}
+ @param source_suites: suites to copy the source from if they are not
+ in C{suite} or C{True} to allow copying from any
+ suite.
+ This can also be a SQLAlchemy (sub)query object.
+
+ @type extra_source_archives: list of L{daklib.dbconn.Archive}
+ @param extra_source_archives: extra archives to copy Built-Using sources from
+
+ @rtype: L{daklib.dbconn.DBBinary}
+ @return: databse object for the new package
"""
session = self.session
control = binary.control
This is intended to be used to check that Built-Using sources exist.
- Args:
- filename (str): filename to use in error messages
- source (daklib.dbconn.DBSource): source to look for
- archive (daklib.dbconn.Archive): archive to look in
+ @type filename: str
+ @param filename: filename to use in error messages
+
+ @type source: L{daklib.dbconn.DBSource}
+ @param source: source to look for
+
+ @type archive: L{daklib.dbconn.Archive}
+ @param archive: archive to look in
- Kwargs:
- extra_archives (list of daklib.dbconn.Archive): list of archives to copy
- the source package from if it is not yet present in `archive`
+ @type extra_archives: list of L{daklib.dbconn.Archive}
+ @param extra_archives: list of archives to copy the source package from
+ if it is not yet present in C{archive}
"""
session = self.session
db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
- """Add Built-Using sources to `db_binary.extra_sources`
+ """Add Built-Using sources to C{db_binary.extra_sources}
"""
session = self.session
built_using = control.get('Built-Using', None)
def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
"""Install a source package
- Args:
- directory (str): directory the source package is located in
- source (daklib.upload.Source): source package to install
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
- changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package
+ @type directory: str
+ @param directory: directory the source package is located in
+
+ @type source: L{daklib.upload.Source}
+ @param source: source package to install
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
- Kwargs:
- allow_tainted (bool): allow to copy additional files from tainted archives
- fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
+ @type changed_by: L{daklib.dbconn.Maintainer}
+ @param changed_by: person who prepared this version of the package
- Returns:
- `daklib.dbconn.DBSource` object for the new source
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy additional files from tainted archives
+
+ @type fingerprint: L{daklib.dbconn.Fingerprint}
+ @param fingerprint: optional fingerprint
+
+ @rtype: L{daklib.dbconn.DBSource}
+ @return: database object for the new source
"""
session = self.session
archive = suite.archive
def _copy_file(self, db_file, archive, component, allow_tainted=False):
"""Copy a file to the given archive and component
- Args:
- db_file (daklib.dbconn.PoolFile): file to copy
- archive (daklib.dbconn.Archive): target archive
- component (daklib.dbconn.Component): target component
+ @type db_file: L{daklib.dbconn.PoolFile}
+ @param db_file: file to copy
+
+ @type archive: L{daklib.dbconn.Archive}
+ @param archive: target archive
+
+ @type component: L{daklib.dbconn.Archive}
+ @param component: target component
- Kwargs:
- allow_tainted (bool): allow to copy from tainted archives (such as NEW)
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy from tainted archives (such as NEW)
"""
session = self.session
def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
"""Copy a binary package to the given suite and component
- Args:
- db_binary (daklib.dbconn.DBBinary): binary to copy
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
+ @type db_binary: L{daklib.dbconn.DBBinary}
+ @param db_binary: binary to copy
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy from tainted archives (such as NEW)
- Kwargs:
- allow_tainted (bool): allow to copy from tainted archives (such as NEW)
- extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
+ @type extra_archives: list of L{daklib.dbconn.Archive}
+ @param extra_archives: extra archives to copy Built-Using sources from
"""
session = self.session
archive = suite.archive
def copy_source(self, db_source, suite, component, allow_tainted=False):
"""Copy a source package to the given suite and component
- Args:
- db_source (daklib.dbconn.DBSource): source to copy
- suite (daklib.dbconn.Suite): target suite
- component (daklib.dbconn.Component): target component
+ @type db_source: L{daklib.dbconn.DBSource}
+ @param db_source: source to copy
- Kwargs:
- allow_tainted (bool): allow to copy from tainted archives (such as NEW)
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: target suite
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: target component
+
+ @type allow_tainted: bool
+ @param allow_tainted: allow to copy from tainted archives (such as NEW)
"""
archive = suite.archive
if archive.tainted:
def remove_file(self, db_file, archive, component):
"""Remove a file from a given archive and component
- Args:
- db_file (daklib.dbconn.PoolFile): file to remove
- archive (daklib.dbconn.Archive): archive to remove the file from
- component (daklib.dbconn.Component): component to remove the file from
+ @type db_file: L{daklib.dbconn.PoolFile}
+ @param db_file: file to remove
+
+ @type archive: L{daklib.dbconn.Archive}
+ @param archive: archive to remove the file from
+
+ @type component: L{daklib.dbconn.Component}
+ @param component: component to remove the file from
"""
af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
self.fs.unlink(af.path)
def remove_binary(self, binary, suite):
"""Remove a binary from a given suite and component
- Args:
- binary (daklib.dbconn.DBBinary): binary to remove
- suite (daklib.dbconn.Suite): suite to remove the package from
+ @type binary: L{daklib.dbconn.DBBinary}
+ @param binary: binary to remove
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to remove the package from
"""
binary.suites.remove(suite)
self.session.flush()
def remove_source(self, source, suite):
"""Remove a source from a given suite and component
- Raises:
- ArchiveException: source package is still referenced by other
- binaries in the suite
+ @type source: L{daklib.dbconn.DBSource}
+ @param source: source to remove
+
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to remove the package from
- Args:
- binary (daklib.dbconn.DBSource): source to remove
- suite (daklib.dbconn.Suite): suite to remove the package from
+ @raise ArchiveException: source package is still referenced by other
+ binaries in the suite
"""
session = self.session
class ArchiveUpload(object):
"""handle an upload
- This class can be used in a with-statement:
+ This class can be used in a with-statement::
with ArchiveUpload(...) as upload:
...
Doing so will automatically run any required cleanup and also rollback the
transaction if it was not committed.
-
- Attributes:
- changes (daklib.upload.Changes): upload to process
- directory (str): directory with temporary copy of files. set by `prepare`
- fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload
- new (bool): upload is NEW. set by `check`
- reject_reasons (list of str): reasons why the upload cannot be accepted
- session: database session
- transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload
- warnings (list of str): warnings (NOT USED YET)
"""
def __init__(self, directory, changes, keyrings):
self.transaction = ArchiveTransaction()
+ """transaction used to handle the upload
+ @type: L{daklib.archive.ArchiveTransaction}
+ """
+
self.session = self.transaction.session
+ """database session"""
self.original_directory = directory
self.original_changes = changes
+
self.changes = None
+ """upload to process
+ @type: L{daklib.upload.Changes}
+ """
+
self.directory = None
+ """directory with temporary copy of files. set by C{prepare}
+ @type: str
+ """
+
self.keyrings = keyrings
self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
+ """fingerprint of the key used to sign the upload
+ @type: L{daklib.dbconn.Fingerprint}
+ """
self.reject_reasons = []
+ """reasons why the upload cannot by accepted
+ @type: list of str
+ """
+
self.warnings = []
+ """warnings
+ @note: Not used yet.
+ @type: list of str
+ """
+
self.final_suites = None
+
self.new = False
+ """upload is NEW. set by C{check}
+ @type: bool
+ """
self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
self._new = self._new_queue.suite
This copies the files involved to a temporary directory. If you use
this method directly, you have to remove the directory given by the
- `directory` attribute later on your own.
+ C{directory} attribute later on your own.
- Instead of using the method directly, you can also use a with-statement:
+ Instead of using the method directly, you can also use a with-statement::
with ArchiveUpload(...) as upload:
...
"""Path to unpacked source
Get path to the unpacked source. This method does unpack the source
- into a temporary directory under `self.directory` if it has not
+ into a temporary directory under C{self.directory} if it has not
been done so already.
- Returns:
- String giving the path to the unpacked source directory
- or None if no source was included in the upload.
+ @rtype: str or C{None}
+ @return: string giving the path to the unpacked source directory
+ or C{None} if no source was included in the upload.
"""
assert self.directory is not None
def _mapped_suites(self):
"""Get target suites after mappings
- Returns:
- list of daklib.dbconn.Suite giving the mapped target suites of this upload
+ @rtype: list of L{daklib.dbconn.Suite}
+ @return: list giving the mapped target suites of this upload
"""
session = self.session
Evaluate component mappings from ComponentMappings in dak.conf for the
given component name.
- NOTE: ansgar wants to get rid of this. It's currently only used for
- the security archive
+ @todo: ansgar wants to get rid of this. It's currently only used for
+ the security archive
- Args:
- component_name (str): component name
+ @type component_name: str
+ @param component_name: component name
- Returns:
- `daklib.dbconn.Component` object
+ @rtype: L{daklib.dbconn.Component}
+ @return: component after applying maps
"""
cnf = Config()
for m in cnf.value_list("ComponentMappings"):
"""Check if upload is NEW
An upload is NEW if it has binary or source packages that do not have
- an override in `suite` OR if it references files ONLY in a tainted
+ an override in C{suite} OR if it references files ONLY in a tainted
archive (eg. when it references files in NEW).
- Returns:
- True if the upload is NEW, False otherwise
+ @rtype: bool
+ @return: C{True} if the upload is NEW, C{False} otherwise
"""
session = self.session
def _binary_override(self, suite, binary):
"""Get override entry for a binary
- Args:
- suite (daklib.dbconn.Suite)
- binary (daklib.upload.Binary)
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to get override for
- Returns:
- daklib.dbconn.Override or None
+ @type binary: L{daklib.upload.Binary}
+ @param binary: binary to get override for
+
+ @rtype: L{daklib.dbconn.Override} or C{None}
+ @return: override for the given binary or C{None}
"""
if suite.overridesuite is not None:
suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
def _source_override(self, suite, source):
"""Get override entry for a source
- Args:
- suite (daklib.dbconn.Suite)
- source (daklib.upload.Source)
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to get override for
+
+ @type source: L{daklib.upload.Source}
+ @param source: source to get override for
- Returns:
- daklib.dbconn.Override or None
+ @rtype: L{daklib.dbconn.Override} or C{None}
+ @return: override for the given source or C{None}
"""
if suite.overridesuite is not None:
suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
"""get component for a binary
By default this will only look at overrides to get the right component;
- if `only_overrides` is False this method will also look at the Section field.
+ if C{only_overrides} is C{False} this method will also look at the
+ Section field.
- Args:
- suite (daklib.dbconn.Suite)
- binary (daklib.upload.Binary)
+ @type suite: L{daklib.dbconn.Suite}
- Kwargs:
- only_overrides (bool): only use overrides to get the right component.
- defaults to True.
+ @type binary: L{daklib.upload.Binary}
- Returns:
- `daklib.dbconn.Component` object or None
+ @type only_overrides: bool
+ @param only_overrides: only use overrides to get the right component
+
+ @rtype: L{daklib.dbconn.Component} or C{None}
"""
override = self._binary_override(suite, binary)
if override is not None:
def check(self, force=False):
"""run checks against the upload
- Args:
- force (bool): ignore failing forcable checks
+ @type force: bool
+ @param force: ignore failing forcable checks
- Returns:
- True if all checks passed, False otherwise
+ @rtype: bool
+ @return: C{True} if all checks passed, C{False} otherwise
"""
# XXX: needs to be better structured.
assert self.changes.valid_signature
def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
"""Install upload to the given suite
- Args:
- suite (daklib.dbconn.Suite): suite to install the package into.
- This is the real suite, ie. after any redirection to NEW or a policy queue
- source_component_func: function to get the `daklib.dbconn.Component`
- for a `daklib.upload.Source` object
- binary_component_func: function to get the `daklib.dbconn.Component`
- for a `daklib.upload.Binary` object
-
- Kwargs:
- source_suites: see `daklib.archive.ArchiveTransaction.install_binary`
- extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary`
-
- Returns:
- tuple with two elements. The first is a `daklib.dbconn.DBSource`
- object for the install source or None if no source was included.
- The second is a list of `daklib.dbconn.DBBinary` objects for the
- installed binary packages.
+ @type suite: L{daklib.dbconn.Suite}
+ @param suite: suite to install the package into. This is the real suite,
+ ie. after any redirection to NEW or a policy queue
+
+ @param source_component_func: function to get the L{daklib.dbconn.Component}
+ for a L{daklib.upload.Source} object
+
+ @param binary_component_func: function to get the L{daklib.dbconn.Component}
+ for a L{daklib.upload.Binary} object
+
+ @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary}
+
+ @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary}
+
+ @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource}
+ object for the install source or C{None} if no source was
+ included. The second is a list of L{daklib.dbconn.DBBinary}
+ objects for the installed binary packages.
"""
# XXX: move this function to ArchiveTransaction?
Try to handle byhand packages automatically.
- Returns:
- list of `daklib.upload.hashed_file` for the remaining byhand packages
+ @rtype: list of L{daklib.upload.HashedFile}
+ @return: list of remaining byhand files
"""
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
return len(remaining) == 0
def _install_byhand(self, policy_queue_upload, hashed_file):
- """
- Args:
- policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX
- hashed_file (daklib.upload.HashedFile): XXX
+ """install byhand file
+
+ @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload}
+
+ @type hashed_file: L{daklib.upload.HashedFile}
"""
fs = self.transaction.fs
session = self.transaction.session
def install(self):
"""install upload
- Install upload to a suite or policy queue. This method does *not*
+ Install upload to a suite or policy queue. This method does B{not}
handle uploads to NEW.
- You need to have called the `check` method before calling this method.
+ You need to have called the C{check} method before calling this method.
"""
assert len(self.reject_reasons) == 0
assert self.changes.valid_signature
def install_to_new(self):
"""install upload to NEW
- Install upload to NEW. This method does *not* handle regular uploads
+ Install upload to NEW. This method does B{not} handle regular uploads
to suites or policy queues.
- You need to have called the `check` method before calling this method.
+ You need to have called the C{check} method before calling this method.
"""
# Uploads to NEW are special as we don't have overrides.
assert len(self.reject_reasons) == 0
"""module to handle uploads not yet installed to the archive
This module provides classes to handle uploads not yet installed to the
-archive. Central is the `Changes` class which represents a changes file.
+archive. Central is the L{Changes} class which represents a changes file.
It provides methods to access the included binary and source packages.
"""
import apt_pkg
import os
import re
-from .gpg import SignedFile
-from .regexes import *
+
+from daklib.gpg import SignedFile
+from daklib.regexes import *
class InvalidChangesException(Exception):
pass
class HashedFile(object):
"""file with checksums
-
- Attributes:
- filename (str): name of the file
- size (long): size in bytes
- md5sum (str): MD5 hash in hexdigits
- sha1sum (str): SHA1 hash in hexdigits
- sha256sum (str): SHA256 hash in hexdigits
- section (str): section or None
- priority (str): priority or None
"""
def __init__(self, filename, size, md5sum, sha1sum, sha256sum, section=None, priority=None):
self.filename = filename
+ """name of the file
+ @type: str
+ """
+
self.size = size
+ """size in bytes
+ @type: long
+ """
+
self.md5sum = md5sum
+ """MD5 hash in hexdigits
+ @type: str
+ """
+
self.sha1sum = sha1sum
+ """SHA1 hash in hexdigits
+ @type: str
+ """
+
self.sha256sum = sha256sum
+ """SHA256 hash in hexdigits
+ @type: str
+ """
+
self.section = section
+ """section or C{None}
+ @type: str or C{None}
+ """
+
self.priority = priority
+ """priority or C{None}
+ @type: str of C{None}
+ """
def check(self, directory):
"""Validate hashes
Check if size and hashes match the expected value.
- Args:
- directory (str): directory the file is located in
+ @type directory: str
+ @param directory: directory the file is located in
- Raises:
- InvalidHashException: hash mismatch
+ @raise InvalidHashException: hash mismatch
"""
path = os.path.join(directory, self.filename)
fh = open(path, 'r')
def parse_file_list(control, has_priority_and_section):
"""Parse Files and Checksums-* fields
- Args:
- control (dict-like): control file to take fields from
- has_priority_and_section (bool): Files include section and priority (as in .changes)
+ @type control: dict-like
+ @param control: control file to take fields from
+
+ @type has_priority_and_section: bool
+ @param has_priority_and_section: Files field include section and priority
+ (as in .changes)
- Raises:
- InvalidChangesException: missing fields or other grave errors
+ @raise InvalidChangesException: missing fields or other grave errors
- Returns:
- dictonary mapping filenames to `daklib.upload.HashedFile` objects
+ @rtype: dict
+ @return: dict mapping filenames to L{daklib.upload.HashedFile} objects
"""
entries = {}
class Changes(object):
"""Representation of a .changes file
-
- Attributes:
- architectures (list of str): list of architectures included in the upload
- binaries (list of daklib.upload.Binary): included binary packages
- binary_names (list of str): names of included binary packages
- byhand_files (list of daklib.upload.HashedFile): included byhand files
- bytes (int): total size of files included in this upload in bytes
- changes (dict-like): dict to access fields of the .changes file
- closed_bugs (list of str): list of bugs closed by this upload
- directory (str): directory the .changes is located in
- distributions (list of str): list of target distributions for the upload
- filename (str): name of the .changes file
- files (dict): dict mapping filenames to daklib.upload.HashedFile objects
- path (str): path to the .changes files
- primary_fingerprint (str): fingerprint of the PGP key used for the signature
- source (daklib.upload.Source or None): included source
- valid_signature (bool): True if the changes has a valid signature
"""
def __init__(self, directory, filename, keyrings, require_signature=True):
if not re_file_safe.match(filename):
raise InvalidChangesException('{0}: unsafe filename'.format(filename))
+
self.directory = directory
+ """directory the .changes is located in
+ @type: str
+ """
+
self.filename = filename
+ """name of the .changes file
+ @type: str
+ """
+
data = open(self.path).read()
self._signed_file = SignedFile(data, keyrings, require_signature)
self.changes = apt_pkg.TagSection(self._signed_file.contents)
+ """dict to access fields of the .changes file
+ @type: dict-like
+ """
+
self._binaries = None
self._source = None
self._files = None
@property
def path(self):
+ """path to the .changes file
+ @type: str
+ """
return os.path.join(self.directory, self.filename)
@property
def primary_fingerprint(self):
+ """fingerprint of the key used for signing the .changes file
+ @type: str
+ """
return self._signed_file.primary_fingerprint
@property
def valid_signature(self):
+ """C{True} if the .changes has a valid signature
+ @type: bool
+ """
return self._signed_file.valid
@property
def architectures(self):
+ """list of architectures included in the upload
+ @type: list of str
+ """
return self.changes['Architecture'].split()
@property
def distributions(self):
+ """list of target distributions for the upload
+ @type: list of str
+ """
return self.changes['Distribution'].split()
@property
def source(self):
+ """included source or C{None}
+ @type: L{daklib.upload.Source} or C{None}
+ """
if self._source is None:
source_files = []
for f in self.files.itervalues():
@property
def binaries(self):
+ """included binary packages
+ @type: list of L{daklib.upload.Binary}
+ """
if self._binaries is None:
binaries = []
for f in self.files.itervalues():
@property
def byhand_files(self):
+ """included byhand files
+ @type: list of L{daklib.upload.HashedFile}
+ """
byhand = []
for f in self.files.itervalues():
@property
def binary_names(self):
+ """names of included binary packages
+ @type: list of str
+ """
return self.changes['Binary'].split()
@property
def closed_bugs(self):
+ """bugs closed by this upload
+ @type: list of str
+ """
return self.changes.get('Closes', '').split()
@property
def files(self):
+ """dict mapping filenames to L{daklib.upload.HashedFile} objects
+ @type: dict
+ """
if self._files is None:
self._files = parse_file_list(self.changes, True)
return self._files
@property
def bytes(self):
+ """total size of files included in this upload in bytes
+ @type: number
+ """
count = 0
for f in self.files.itervalues():
count += f.size
return count
def __cmp__(self, other):
- """Compare two changes packages
+ """compare two changes files
We sort by source name and version first. If these are identical,
we sort changes that include source before those without source (so
that sourceful uploads get processed first), and finally fall back
to the filename (this should really never happen).
- Returns:
- -1 if self < other, 0 if self == other, 1 if self > other
+ @rtype: number
+ @return: n where n < 0 if self < other, n = 0 if self == other, n > 0 if self > other
"""
ret = cmp(self.changes.get('Source'), other.changes.get('Source'))
class Binary(object):
"""Representation of a binary package
-
- Attributes:
- component (str): component name
- control (dict-like): dict to access fields in DEBIAN/control
- hashed_file (HashedFile): HashedFile object for the .deb
"""
def __init__(self, directory, hashed_file):
self.hashed_file = hashed_file
+ """file object for the .deb
+ @type: HashedFile
+ """
path = os.path.join(directory, hashed_file.filename)
data = apt_inst.DebFile(path).control.extractdata("control")
+
self.control = apt_pkg.TagSection(data)
+ """dict to access fields in DEBIAN/control
+ @type: dict-like
+ """
@property
def source(self):
- """Get source package name and version
-
- Returns:
- tuple containing source package name and version
+ """get tuple with source package name and version
+ @type: tuple of str
"""
source = self.control.get("Source", None)
if source is None:
@property
def type(self):
- """Get package type
-
- Returns:
- String with the package type ('deb' or 'udeb')
+ """package type ('deb' or 'udeb')
+ @type: str
"""
match = re_file_binary.match(self.hashed_file.filename)
if not match:
@property
def component(self):
+ """component name
+ @type: str
+ """
fields = self.control['Section'].split('/')
if len(fields) > 1:
return fields[0]
class Source(object):
"""Representation of a source package
-
- Attributes:
- component (str): guessed component name. Might be wrong!
- dsc (dict-like): dict to access fields in the .dsc file
- hashed_files (list of daklib.upload.HashedFile): list of source files (including .dsc)
- files (dict): dictonary mapping filenames to HashedFile objects for
- additional source files (not including .dsc)
- primary_fingerprint (str): fingerprint of the PGP key used for the signature
- valid_signature (bool): True if the dsc has a valid signature
"""
def __init__(self, directory, hashed_files, keyrings, require_signature=True):
self.hashed_files = hashed_files
+ """list of source files (including the .dsc itself)
+ @type: list of L{HashedFile}
+ """
+
self._dsc_file = None
for f in hashed_files:
if re_file_dsc.match(f.filename):
data = open(dsc_file_path, 'r').read()
self._signed_file = SignedFile(data, keyrings, require_signature)
self.dsc = apt_pkg.TagSection(self._signed_file.contents)
+ """dict to access fields in the .dsc file
+ @type: dict-like
+ """
+
self._files = None
@property
def files(self):
+ """dict mapping filenames to L{HashedFile} objects for additional source files
+
+ This list does not include the .dsc itself.
+
+ @type: dict
+ """
if self._files is None:
self._files = parse_file_list(self.dsc, False)
return self._files
@property
def primary_fingerprint(self):
+ """fingerprint of the key used to sign the .dsc
+ @type: str
+ """
return self._signed_file.primary_fingerprint
@property
def valid_signature(self):
+ """C{True} if the .dsc has a valid signature
+ @type: bool
+ """
return self._signed_file.valid
@property
def component(self):
+ """guessed component name
+
+ Might be wrong. Don't rely on this.
+
+ @type: str
+ """
if 'Section' not in self.dsc:
return 'main'
fields = self.dsc['Section'].split('/')