From c47d45b655bbf9e6d13d21a1ebe4b5248f5d50df Mon Sep 17 00:00:00 2001 From: Ansgar Burchardt Date: Wed, 1 Aug 2012 17:35:24 +0200 Subject: [PATCH] change documentation style --- daklib/archive.py | 389 +++++++++++++++++++++++---------------- daklib/checks.py | 34 ++-- daklib/fstransactions.py | 55 +++--- daklib/policy.py | 65 ++++--- daklib/upload.py | 200 +++++++++++++------- daklib/utils.py | 34 ++-- 6 files changed, 469 insertions(+), 308 deletions(-) diff --git a/daklib/archive.py b/daklib/archive.py index 7f6895ad..29a5621d 100644 --- a/daklib/archive.py +++ b/daklib/archive.py @@ -19,13 +19,13 @@ This module provides classes to manipulate the archive. """ -from .dbconn import * +from daklib.dbconn import * import daklib.checks as checks from daklib.config import Config import daklib.upload as upload import daklib.utils as utils -from .fstransactions import FilesystemTransaction -from .regexes import re_changelog_versions, re_bin_only_nmu +from daklib.fstransactions import FilesystemTransaction +from daklib.regexes import re_changelog_versions, re_bin_only_nmu import apt_pkg from datetime import datetime @@ -50,17 +50,16 @@ class ArchiveTransaction(object): self.session = DBConn().session() def get_file(self, hashed_file, source_name): - """Look for file `hashed_file` in database + """Look for file C{hashed_file} in database - Args: - hashed_file (daklib.upload.HashedFile): file to look for in the database + @type hashed_file: L{daklib.upload.HashedFile} + @param hashed_file: file to look for in the database - Raises: - KeyError: file was not found in the database - HashMismatchException: hash mismatch + @raise KeyError: file was not found in the database + @raise HashMismatchException: hash mismatch - Returns: - `daklib.dbconn.PoolFile` object for the database + @rtype: L{daklib.dbconn.PoolFile} + @return: database entry for the file """ poolname = os.path.join(utils.poolify(source_name), hashed_file.filename) try: @@ -76,8 +75,8 @@ class ArchiveTransaction(object): Will not give an error when the file is already present. - Returns: - `daklib.dbconn.PoolFile` object for the new file + @rtype: L{daklib.dbconn.PoolFile} + @return: batabase object for the new file """ session = self.session @@ -108,23 +107,35 @@ class ArchiveTransaction(object): def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None): """Install a binary package - Args: - directory (str): directory the binary package is located in - binary (daklib.upload.Binary): binary package to install - suite (daklib.dbconn.Suite): target suite - component (daklib.dbconn.Component): target component - - Kwargs: - allow_tainted (bool): allow to copy additional files from tainted archives - fingerprint (daklib.dbconn.Fingerprint): optional fingerprint - source_suites (list of daklib.dbconn.Suite or True): suites to copy - the source from if they are not in `suite` or True to allow - copying from any suite. - This can also be a SQLAlchemy (sub)query object. - extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from - - Returns: - `daklib.dbconn.DBBinary` object for the new package + @type directory: str + @param directory: directory the binary package is located in + + @type binary: L{daklib.upload.Binary} + @param binary: binary package to install + + @type suite: L{daklib.dbconn.Suite} + @param suite: target suite + + @type component: L{daklib.dbconn.Component} + @param component: target component + + @type allow_tainted: bool + @param allow_tainted: allow to copy additional files from tainted archives + + @type fingerprint: L{daklib.dbconn.Fingerprint} + @param fingerprint: optional fingerprint + + @type source_suites: list of L{daklib.dbconn.Suite} or C{True} + @param source_suites: suites to copy the source from if they are not + in C{suite} or C{True} to allow copying from any + suite. + This can also be a SQLAlchemy (sub)query object. + + @type extra_source_archives: list of L{daklib.dbconn.Archive} + @param extra_source_archives: extra archives to copy Built-Using sources from + + @rtype: L{daklib.dbconn.DBBinary} + @return: databse object for the new package """ session = self.session control = binary.control @@ -185,14 +196,18 @@ class ArchiveTransaction(object): This is intended to be used to check that Built-Using sources exist. - Args: - filename (str): filename to use in error messages - source (daklib.dbconn.DBSource): source to look for - archive (daklib.dbconn.Archive): archive to look in + @type filename: str + @param filename: filename to use in error messages + + @type source: L{daklib.dbconn.DBSource} + @param source: source to look for + + @type archive: L{daklib.dbconn.Archive} + @param archive: archive to look in - Kwargs: - extra_archives (list of daklib.dbconn.Archive): list of archives to copy - the source package from if it is not yet present in `archive` + @type extra_archives: list of L{daklib.dbconn.Archive} + @param extra_archives: list of archives to copy the source package from + if it is not yet present in C{archive} """ session = self.session db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first() @@ -213,7 +228,7 @@ class ArchiveTransaction(object): self._copy_file(af.file, archive, db_file.component, allow_tainted=True) def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None): - """Add Built-Using sources to `db_binary.extra_sources` + """Add Built-Using sources to C{db_binary.extra_sources} """ session = self.session built_using = control.get('Built-Using', None) @@ -235,19 +250,29 @@ class ArchiveTransaction(object): def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None): """Install a source package - Args: - directory (str): directory the source package is located in - source (daklib.upload.Source): source package to install - suite (daklib.dbconn.Suite): target suite - component (daklib.dbconn.Component): target component - changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package + @type directory: str + @param directory: directory the source package is located in + + @type source: L{daklib.upload.Source} + @param source: source package to install + + @type suite: L{daklib.dbconn.Suite} + @param suite: target suite + + @type component: L{daklib.dbconn.Component} + @param component: target component - Kwargs: - allow_tainted (bool): allow to copy additional files from tainted archives - fingerprint (daklib.dbconn.Fingerprint): optional fingerprint + @type changed_by: L{daklib.dbconn.Maintainer} + @param changed_by: person who prepared this version of the package - Returns: - `daklib.dbconn.DBSource` object for the new source + @type allow_tainted: bool + @param allow_tainted: allow to copy additional files from tainted archives + + @type fingerprint: L{daklib.dbconn.Fingerprint} + @param fingerprint: optional fingerprint + + @rtype: L{daklib.dbconn.DBSource} + @return: database object for the new source """ session = self.session archive = suite.archive @@ -342,13 +367,17 @@ class ArchiveTransaction(object): def _copy_file(self, db_file, archive, component, allow_tainted=False): """Copy a file to the given archive and component - Args: - db_file (daklib.dbconn.PoolFile): file to copy - archive (daklib.dbconn.Archive): target archive - component (daklib.dbconn.Component): target component + @type db_file: L{daklib.dbconn.PoolFile} + @param db_file: file to copy + + @type archive: L{daklib.dbconn.Archive} + @param archive: target archive + + @type component: L{daklib.dbconn.Archive} + @param component: target component - Kwargs: - allow_tainted (bool): allow to copy from tainted archives (such as NEW) + @type allow_tainted: bool + @param allow_tainted: allow to copy from tainted archives (such as NEW) """ session = self.session @@ -368,14 +397,20 @@ class ArchiveTransaction(object): def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None): """Copy a binary package to the given suite and component - Args: - db_binary (daklib.dbconn.DBBinary): binary to copy - suite (daklib.dbconn.Suite): target suite - component (daklib.dbconn.Component): target component + @type db_binary: L{daklib.dbconn.DBBinary} + @param db_binary: binary to copy + + @type suite: L{daklib.dbconn.Suite} + @param suite: target suite + + @type component: L{daklib.dbconn.Component} + @param component: target component + + @type allow_tainted: bool + @param allow_tainted: allow to copy from tainted archives (such as NEW) - Kwargs: - allow_tainted (bool): allow to copy from tainted archives (such as NEW) - extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from + @type extra_archives: list of L{daklib.dbconn.Archive} + @param extra_archives: extra archives to copy Built-Using sources from """ session = self.session archive = suite.archive @@ -403,13 +438,17 @@ class ArchiveTransaction(object): def copy_source(self, db_source, suite, component, allow_tainted=False): """Copy a source package to the given suite and component - Args: - db_source (daklib.dbconn.DBSource): source to copy - suite (daklib.dbconn.Suite): target suite - component (daklib.dbconn.Component): target component + @type db_source: L{daklib.dbconn.DBSource} + @param db_source: source to copy - Kwargs: - allow_tainted (bool): allow to copy from tainted archives (such as NEW) + @type suite: L{daklib.dbconn.Suite} + @param suite: target suite + + @type component: L{daklib.dbconn.Component} + @param component: target component + + @type allow_tainted: bool + @param allow_tainted: allow to copy from tainted archives (such as NEW) """ archive = suite.archive if archive.tainted: @@ -423,10 +462,14 @@ class ArchiveTransaction(object): def remove_file(self, db_file, archive, component): """Remove a file from a given archive and component - Args: - db_file (daklib.dbconn.PoolFile): file to remove - archive (daklib.dbconn.Archive): archive to remove the file from - component (daklib.dbconn.Component): component to remove the file from + @type db_file: L{daklib.dbconn.PoolFile} + @param db_file: file to remove + + @type archive: L{daklib.dbconn.Archive} + @param archive: archive to remove the file from + + @type component: L{daklib.dbconn.Component} + @param component: component to remove the file from """ af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component) self.fs.unlink(af.path) @@ -435,9 +478,11 @@ class ArchiveTransaction(object): def remove_binary(self, binary, suite): """Remove a binary from a given suite and component - Args: - binary (daklib.dbconn.DBBinary): binary to remove - suite (daklib.dbconn.Suite): suite to remove the package from + @type binary: L{daklib.dbconn.DBBinary} + @param binary: binary to remove + + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to remove the package from """ binary.suites.remove(suite) self.session.flush() @@ -445,13 +490,14 @@ class ArchiveTransaction(object): def remove_source(self, source, suite): """Remove a source from a given suite and component - Raises: - ArchiveException: source package is still referenced by other - binaries in the suite + @type source: L{daklib.dbconn.DBSource} + @param source: source to remove + + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to remove the package from - Args: - binary (daklib.dbconn.DBSource): source to remove - suite (daklib.dbconn.Suite): suite to remove the package from + @raise ArchiveException: source package is still referenced by other + binaries in the suite """ session = self.session @@ -490,40 +536,60 @@ class ArchiveTransaction(object): class ArchiveUpload(object): """handle an upload - This class can be used in a with-statement: + This class can be used in a with-statement:: with ArchiveUpload(...) as upload: ... Doing so will automatically run any required cleanup and also rollback the transaction if it was not committed. - - Attributes: - changes (daklib.upload.Changes): upload to process - directory (str): directory with temporary copy of files. set by `prepare` - fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload - new (bool): upload is NEW. set by `check` - reject_reasons (list of str): reasons why the upload cannot be accepted - session: database session - transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload - warnings (list of str): warnings (NOT USED YET) """ def __init__(self, directory, changes, keyrings): self.transaction = ArchiveTransaction() + """transaction used to handle the upload + @type: L{daklib.archive.ArchiveTransaction} + """ + self.session = self.transaction.session + """database session""" self.original_directory = directory self.original_changes = changes + self.changes = None + """upload to process + @type: L{daklib.upload.Changes} + """ + self.directory = None + """directory with temporary copy of files. set by C{prepare} + @type: str + """ + self.keyrings = keyrings self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one() + """fingerprint of the key used to sign the upload + @type: L{daklib.dbconn.Fingerprint} + """ self.reject_reasons = [] + """reasons why the upload cannot by accepted + @type: list of str + """ + self.warnings = [] + """warnings + @note: Not used yet. + @type: list of str + """ + self.final_suites = None + self.new = False + """upload is NEW. set by C{check} + @type: bool + """ self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one() self._new = self._new_queue.suite @@ -533,9 +599,9 @@ class ArchiveUpload(object): This copies the files involved to a temporary directory. If you use this method directly, you have to remove the directory given by the - `directory` attribute later on your own. + C{directory} attribute later on your own. - Instead of using the method directly, you can also use a with-statement: + Instead of using the method directly, you can also use a with-statement:: with ArchiveUpload(...) as upload: ... @@ -575,12 +641,12 @@ class ArchiveUpload(object): """Path to unpacked source Get path to the unpacked source. This method does unpack the source - into a temporary directory under `self.directory` if it has not + into a temporary directory under C{self.directory} if it has not been done so already. - Returns: - String giving the path to the unpacked source directory - or None if no source was included in the upload. + @rtype: str or C{None} + @return: string giving the path to the unpacked source directory + or C{None} if no source was included in the upload. """ assert self.directory is not None @@ -621,8 +687,8 @@ class ArchiveUpload(object): def _mapped_suites(self): """Get target suites after mappings - Returns: - list of daklib.dbconn.Suite giving the mapped target suites of this upload + @rtype: list of L{daklib.dbconn.Suite} + @return: list giving the mapped target suites of this upload """ session = self.session @@ -641,14 +707,14 @@ class ArchiveUpload(object): Evaluate component mappings from ComponentMappings in dak.conf for the given component name. - NOTE: ansgar wants to get rid of this. It's currently only used for - the security archive + @todo: ansgar wants to get rid of this. It's currently only used for + the security archive - Args: - component_name (str): component name + @type component_name: str + @param component_name: component name - Returns: - `daklib.dbconn.Component` object + @rtype: L{daklib.dbconn.Component} + @return: component after applying maps """ cnf = Config() for m in cnf.value_list("ComponentMappings"): @@ -662,11 +728,11 @@ class ArchiveUpload(object): """Check if upload is NEW An upload is NEW if it has binary or source packages that do not have - an override in `suite` OR if it references files ONLY in a tainted + an override in C{suite} OR if it references files ONLY in a tainted archive (eg. when it references files in NEW). - Returns: - True if the upload is NEW, False otherwise + @rtype: bool + @return: C{True} if the upload is NEW, C{False} otherwise """ session = self.session @@ -714,12 +780,14 @@ class ArchiveUpload(object): def _binary_override(self, suite, binary): """Get override entry for a binary - Args: - suite (daklib.dbconn.Suite) - binary (daklib.upload.Binary) + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to get override for - Returns: - daklib.dbconn.Override or None + @type binary: L{daklib.upload.Binary} + @param binary: binary to get override for + + @rtype: L{daklib.dbconn.Override} or C{None} + @return: override for the given binary or C{None} """ if suite.overridesuite is not None: suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() @@ -736,12 +804,14 @@ class ArchiveUpload(object): def _source_override(self, suite, source): """Get override entry for a source - Args: - suite (daklib.dbconn.Suite) - source (daklib.upload.Source) + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to get override for + + @type source: L{daklib.upload.Source} + @param source: source to get override for - Returns: - daklib.dbconn.Override or None + @rtype: L{daklib.dbconn.Override} or C{None} + @return: override for the given source or C{None} """ if suite.overridesuite is not None: suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() @@ -759,18 +829,17 @@ class ArchiveUpload(object): """get component for a binary By default this will only look at overrides to get the right component; - if `only_overrides` is False this method will also look at the Section field. + if C{only_overrides} is C{False} this method will also look at the + Section field. - Args: - suite (daklib.dbconn.Suite) - binary (daklib.upload.Binary) + @type suite: L{daklib.dbconn.Suite} - Kwargs: - only_overrides (bool): only use overrides to get the right component. - defaults to True. + @type binary: L{daklib.upload.Binary} - Returns: - `daklib.dbconn.Component` object or None + @type only_overrides: bool + @param only_overrides: only use overrides to get the right component + + @rtype: L{daklib.dbconn.Component} or C{None} """ override = self._binary_override(suite, binary) if override is not None: @@ -782,11 +851,11 @@ class ArchiveUpload(object): def check(self, force=False): """run checks against the upload - Args: - force (bool): ignore failing forcable checks + @type force: bool + @param force: ignore failing forcable checks - Returns: - True if all checks passed, False otherwise + @rtype: bool + @return: C{True} if all checks passed, C{False} otherwise """ # XXX: needs to be better structured. assert self.changes.valid_signature @@ -835,23 +904,24 @@ class ArchiveUpload(object): def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None): """Install upload to the given suite - Args: - suite (daklib.dbconn.Suite): suite to install the package into. - This is the real suite, ie. after any redirection to NEW or a policy queue - source_component_func: function to get the `daklib.dbconn.Component` - for a `daklib.upload.Source` object - binary_component_func: function to get the `daklib.dbconn.Component` - for a `daklib.upload.Binary` object - - Kwargs: - source_suites: see `daklib.archive.ArchiveTransaction.install_binary` - extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary` - - Returns: - tuple with two elements. The first is a `daklib.dbconn.DBSource` - object for the install source or None if no source was included. - The second is a list of `daklib.dbconn.DBBinary` objects for the - installed binary packages. + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to install the package into. This is the real suite, + ie. after any redirection to NEW or a policy queue + + @param source_component_func: function to get the L{daklib.dbconn.Component} + for a L{daklib.upload.Source} object + + @param binary_component_func: function to get the L{daklib.dbconn.Component} + for a L{daklib.upload.Binary} object + + @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary} + + @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary} + + @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource} + object for the install source or C{None} if no source was + included. The second is a list of L{daklib.dbconn.DBBinary} + objects for the installed binary packages. """ # XXX: move this function to ArchiveTransaction? @@ -934,8 +1004,8 @@ class ArchiveUpload(object): Try to handle byhand packages automatically. - Returns: - list of `daklib.upload.hashed_file` for the remaining byhand packages + @rtype: list of L{daklib.upload.HashedFile} + @return: list of remaining byhand files """ assert len(self.reject_reasons) == 0 assert self.changes.valid_signature @@ -982,10 +1052,11 @@ class ArchiveUpload(object): return len(remaining) == 0 def _install_byhand(self, policy_queue_upload, hashed_file): - """ - Args: - policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX - hashed_file (daklib.upload.HashedFile): XXX + """install byhand file + + @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload} + + @type hashed_file: L{daklib.upload.HashedFile} """ fs = self.transaction.fs session = self.transaction.session @@ -1041,10 +1112,10 @@ class ArchiveUpload(object): def install(self): """install upload - Install upload to a suite or policy queue. This method does *not* + Install upload to a suite or policy queue. This method does B{not} handle uploads to NEW. - You need to have called the `check` method before calling this method. + You need to have called the C{check} method before calling this method. """ assert len(self.reject_reasons) == 0 assert self.changes.valid_signature @@ -1082,10 +1153,10 @@ class ArchiveUpload(object): def install_to_new(self): """install upload to NEW - Install upload to NEW. This method does *not* handle regular uploads + Install upload to NEW. This method does B{not} handle regular uploads to suites or policy queues. - You need to have called the `check` method before calling this method. + You need to have called the C{check} method before calling this method. """ # Uploads to NEW are special as we don't have overrides. assert len(self.reject_reasons) == 0 diff --git a/daklib/checks.py b/daklib/checks.py index 260eb18a..06626fd1 100644 --- a/daklib/checks.py +++ b/daklib/checks.py @@ -20,14 +20,14 @@ """module provided pre-acceptance tests -Please read the documentation for the `Check` class for the interface. +Please read the documentation for the L{Check} class for the interface. """ from daklib.config import Config -from .dbconn import * +from daklib.dbconn import * import daklib.dbconn as dbconn -from .regexes import * -from .textutils import fix_maintainer, ParseMaintError +from daklib.regexes import * +from daklib.textutils import fix_maintainer, ParseMaintError import daklib.lintian as lintian import daklib.utils as utils @@ -48,37 +48,37 @@ class Reject(Exception): class Check(object): """base class for checks - checks are called by daklib.archive.ArchiveUpload. Failing tests should - raise a `daklib.checks.Reject` exception including a human-readable + checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should + raise a L{daklib.checks.Reject} exception including a human-readable description why the upload should be rejected. """ def check(self, upload): """do checks - Args: - upload (daklib.archive.ArchiveUpload): upload to check + @type upload: L{daklib.archive.ArchiveUpload} + @param upload: upload to check - Raises: - daklib.checks.Reject + @raise daklib.checks.Reject: upload should be rejected """ raise NotImplemented def per_suite_check(self, upload, suite): """do per-suite checks - Args: - upload (daklib.archive.ArchiveUpload): upload to check - suite (daklib.dbconn.Suite): suite to check + @type upload: L{daklib.archive.ArchiveUpload} + @param upload: upload to check - Raises: - daklib.checks.Reject + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to check + + @raise daklib.checks.Reject: upload should be rejected """ raise NotImplemented @property def forcable(self): """allow to force ignore failing test - True if it is acceptable to force ignoring a failing test, - False otherwise + C{True} if it is acceptable to force ignoring a failing test, + C{False} otherwise """ return False diff --git a/daklib/fstransactions.py b/daklib/fstransactions.py index f76a4bc6..33f59c8c 100644 --- a/daklib/fstransactions.py +++ b/daklib/fstransactions.py @@ -123,16 +123,22 @@ class FilesystemTransaction(object): self.actions = [] def copy(self, source, destination, link=True, symlink=False, mode=None): - """copy `source` to `destination` + """copy C{source} to C{destination} - Args: - source (str): source file - destination (str): destination file + @type source: str + @param source: source file - Kwargs: - link (bool): Try hardlinking, falling back to copying. - symlink (bool): Create a symlink instead - mode (int): Permissions to change `destination` to. + @type destination: str + @param destination: destination file + + @type link: bool + @param link: try hardlinking, falling back to copying + + @type symlink: bool + @param symlink: create a symlink instead of copying + + @type mode: int + @param mode: permissions to change C{destination} to """ if isinstance(mode, str) or isinstance(mode, unicode): mode = int(mode, 8) @@ -140,37 +146,38 @@ class FilesystemTransaction(object): self.actions.append(_FilesystemCopyAction(source, destination, link=link, symlink=symlink, mode=mode)) def move(self, source, destination, mode=None): - """move `source` to `destination` + """move C{source} to C{destination} + + @type source: str + @param source: source file - Args: - source (str): source file - destination (str): destination file + @type destination: str + @param destination: destination file - Kwargs: - mode (int): Permissions to change `destination` to. + @type mode: int + @param mode: permissions to change C{destination} to """ self.copy(source, destination, link=True, mode=mode) self.unlink(source) def unlink(self, path): - """unlink `path` + """unlink C{path} - Args: - path (str): file to unlink + @type path: str + @param path: file to unlink """ self.actions.append(_FilesystemUnlinkAction(path)) def create(self, path, mode=None): - """create `filename` and return file handle + """create C{filename} and return file handle - Args: - filename (str): file to create + @type filename: str + @param filename: file to create - Kwargs: - mode (int): Permissions for the new file + @type mode: int + @param mode: permissions for the new file - Returns: - file handle of the new file + @return: file handle of the new file """ if isinstance(mode, str) or isinstance(mode, unicode): mode = int(mode, 8) diff --git a/daklib/policy.py b/daklib/policy.py index c989f1f2..78d58d67 100644 --- a/daklib/policy.py +++ b/daklib/policy.py @@ -29,31 +29,36 @@ import tempfile class UploadCopy(object): """export a policy queue upload - This class can be used in a with-statements: + This class can be used in a with-statement:: with UploadCopy(...) as copy: ... Doing so will provide a temporary copy of the upload in the directory - given by the `directory` attribute. The copy will be removed on leaving + given by the C{directory} attribute. The copy will be removed on leaving the with-block. - - Args: - upload (daklib.dbconn.PolicyQueueUpload) """ def __init__(self, upload): + """initializer + + @type upload: L{daklib.dbconn.PolicyQueueUpload} + @param upload: upload to handle + """ + self.directory = None self.upload = upload def export(self, directory, mode=None, symlink=True): """export a copy of the upload - Args: - directory (str) + @type directory: str + @param directory: directory to export to - Kwargs: - mode (int): permissions to use for the copied files - symlink (bool): use symlinks instead of copying the files (default: True) + @type mode: int + @param mode: permissions to use for the copied files + + @type symlink: bool + @param symlink: use symlinks instead of copying the files """ with FilesystemTransaction() as fs: source = self.upload.source @@ -103,9 +108,10 @@ class PolicyQueueUploadHandler(object): def __init__(self, upload, session): """initializer - Args: - upload (daklib.dbconn.PolicyQueueUpload): upload to process - session: database session + @type upload: L{daklib.dbconn.PolicyQueueUpload} + @param upload: upload to process + + @param session: database session """ self.upload = upload self.session = session @@ -169,8 +175,8 @@ class PolicyQueueUploadHandler(object): def reject(self, reason): """mark upload as rejected - Args: - reason (str): reason for the rejection + @type reason: str + @param reason: reason for the rejection """ fn1 = 'REJECT.{0}'.format(self._changes_prefix) assert re_file_safe.match(fn1) @@ -190,8 +196,8 @@ class PolicyQueueUploadHandler(object): def get_action(self): """get current action - Returns: - string giving the current action, one of 'ACCEPT', 'ACCEPTED', 'REJECT' + @rtype: str + @return: string giving the current action, one of 'ACCEPT', 'ACCEPTED', 'REJECT' """ changes_prefix = self._changes_prefix @@ -206,18 +212,19 @@ class PolicyQueueUploadHandler(object): def missing_overrides(self, hints=None): """get missing override entries for the upload - Kwargs: - hints (list of dict): suggested hints for new overrides in the same - format as the return value - - Returns: - list of dicts with the following keys: - package: package name - priority: default priority (from upload) - section: default section (from upload) - component: default component (from upload) - type: type of required override ('dsc', 'deb' or 'udeb') - All values are strings. + @type hints: list of dict + @param hints: suggested hints for new overrides in the same format as + the return value + + @return: list of dicts with the following keys: + + - package: package name + - priority: default priority (from upload) + - section: default section (from upload) + - component: default component (from upload) + - type: type of required override ('dsc', 'deb' or 'udeb') + + All values are strings. """ # TODO: use Package-List field missing = [] diff --git a/daklib/upload.py b/daklib/upload.py index 5c19478b..b0baf8cb 100644 --- a/daklib/upload.py +++ b/daklib/upload.py @@ -17,7 +17,7 @@ """module to handle uploads not yet installed to the archive This module provides classes to handle uploads not yet installed to the -archive. Central is the `Changes` class which represents a changes file. +archive. Central is the L{Changes} class which represents a changes file. It provides methods to access the included binary and source packages. """ @@ -25,8 +25,9 @@ import apt_inst import apt_pkg import os import re -from .gpg import SignedFile -from .regexes import * + +from daklib.gpg import SignedFile +from daklib.regexes import * class InvalidChangesException(Exception): pass @@ -54,35 +55,52 @@ class InvalidFilenameException(Exception): class HashedFile(object): """file with checksums - - Attributes: - filename (str): name of the file - size (long): size in bytes - md5sum (str): MD5 hash in hexdigits - sha1sum (str): SHA1 hash in hexdigits - sha256sum (str): SHA256 hash in hexdigits - section (str): section or None - priority (str): priority or None """ def __init__(self, filename, size, md5sum, sha1sum, sha256sum, section=None, priority=None): self.filename = filename + """name of the file + @type: str + """ + self.size = size + """size in bytes + @type: long + """ + self.md5sum = md5sum + """MD5 hash in hexdigits + @type: str + """ + self.sha1sum = sha1sum + """SHA1 hash in hexdigits + @type: str + """ + self.sha256sum = sha256sum + """SHA256 hash in hexdigits + @type: str + """ + self.section = section + """section or C{None} + @type: str or C{None} + """ + self.priority = priority + """priority or C{None} + @type: str of C{None} + """ def check(self, directory): """Validate hashes Check if size and hashes match the expected value. - Args: - directory (str): directory the file is located in + @type directory: str + @param directory: directory the file is located in - Raises: - InvalidHashException: hash mismatch + @raise InvalidHashException: hash mismatch """ path = os.path.join(directory, self.filename) fh = open(path, 'r') @@ -108,15 +126,17 @@ class HashedFile(object): def parse_file_list(control, has_priority_and_section): """Parse Files and Checksums-* fields - Args: - control (dict-like): control file to take fields from - has_priority_and_section (bool): Files include section and priority (as in .changes) + @type control: dict-like + @param control: control file to take fields from + + @type has_priority_and_section: bool + @param has_priority_and_section: Files field include section and priority + (as in .changes) - Raises: - InvalidChangesException: missing fields or other grave errors + @raise InvalidChangesException: missing fields or other grave errors - Returns: - dictonary mapping filenames to `daklib.upload.HashedFile` objects + @rtype: dict + @return: dict mapping filenames to L{daklib.upload.HashedFile} objects """ entries = {} @@ -172,32 +192,28 @@ def parse_file_list(control, has_priority_and_section): class Changes(object): """Representation of a .changes file - - Attributes: - architectures (list of str): list of architectures included in the upload - binaries (list of daklib.upload.Binary): included binary packages - binary_names (list of str): names of included binary packages - byhand_files (list of daklib.upload.HashedFile): included byhand files - bytes (int): total size of files included in this upload in bytes - changes (dict-like): dict to access fields of the .changes file - closed_bugs (list of str): list of bugs closed by this upload - directory (str): directory the .changes is located in - distributions (list of str): list of target distributions for the upload - filename (str): name of the .changes file - files (dict): dict mapping filenames to daklib.upload.HashedFile objects - path (str): path to the .changes files - primary_fingerprint (str): fingerprint of the PGP key used for the signature - source (daklib.upload.Source or None): included source - valid_signature (bool): True if the changes has a valid signature """ def __init__(self, directory, filename, keyrings, require_signature=True): if not re_file_safe.match(filename): raise InvalidChangesException('{0}: unsafe filename'.format(filename)) + self.directory = directory + """directory the .changes is located in + @type: str + """ + self.filename = filename + """name of the .changes file + @type: str + """ + data = open(self.path).read() self._signed_file = SignedFile(data, keyrings, require_signature) self.changes = apt_pkg.TagSection(self._signed_file.contents) + """dict to access fields of the .changes file + @type: dict-like + """ + self._binaries = None self._source = None self._files = None @@ -206,26 +222,44 @@ class Changes(object): @property def path(self): + """path to the .changes file + @type: str + """ return os.path.join(self.directory, self.filename) @property def primary_fingerprint(self): + """fingerprint of the key used for signing the .changes file + @type: str + """ return self._signed_file.primary_fingerprint @property def valid_signature(self): + """C{True} if the .changes has a valid signature + @type: bool + """ return self._signed_file.valid @property def architectures(self): + """list of architectures included in the upload + @type: list of str + """ return self.changes['Architecture'].split() @property def distributions(self): + """list of target distributions for the upload + @type: list of str + """ return self.changes['Distribution'].split() @property def source(self): + """included source or C{None} + @type: L{daklib.upload.Source} or C{None} + """ if self._source is None: source_files = [] for f in self.files.itervalues(): @@ -237,6 +271,9 @@ class Changes(object): @property def binaries(self): + """included binary packages + @type: list of L{daklib.upload.Binary} + """ if self._binaries is None: binaries = [] for f in self.files.itervalues(): @@ -247,6 +284,9 @@ class Changes(object): @property def byhand_files(self): + """included byhand files + @type: list of L{daklib.upload.HashedFile} + """ byhand = [] for f in self.files.itervalues(): @@ -260,35 +300,47 @@ class Changes(object): @property def binary_names(self): + """names of included binary packages + @type: list of str + """ return self.changes['Binary'].split() @property def closed_bugs(self): + """bugs closed by this upload + @type: list of str + """ return self.changes.get('Closes', '').split() @property def files(self): + """dict mapping filenames to L{daklib.upload.HashedFile} objects + @type: dict + """ if self._files is None: self._files = parse_file_list(self.changes, True) return self._files @property def bytes(self): + """total size of files included in this upload in bytes + @type: number + """ count = 0 for f in self.files.itervalues(): count += f.size return count def __cmp__(self, other): - """Compare two changes packages + """compare two changes files We sort by source name and version first. If these are identical, we sort changes that include source before those without source (so that sourceful uploads get processed first), and finally fall back to the filename (this should really never happen). - Returns: - -1 if self < other, 0 if self == other, 1 if self > other + @rtype: number + @return: n where n < 0 if self < other, n = 0 if self == other, n > 0 if self > other """ ret = cmp(self.changes.get('Source'), other.changes.get('Source')) @@ -313,25 +365,25 @@ class Changes(object): class Binary(object): """Representation of a binary package - - Attributes: - component (str): component name - control (dict-like): dict to access fields in DEBIAN/control - hashed_file (HashedFile): HashedFile object for the .deb """ def __init__(self, directory, hashed_file): self.hashed_file = hashed_file + """file object for the .deb + @type: HashedFile + """ path = os.path.join(directory, hashed_file.filename) data = apt_inst.DebFile(path).control.extractdata("control") + self.control = apt_pkg.TagSection(data) + """dict to access fields in DEBIAN/control + @type: dict-like + """ @property def source(self): - """Get source package name and version - - Returns: - tuple containing source package name and version + """get tuple with source package name and version + @type: tuple of str """ source = self.control.get("Source", None) if source is None: @@ -346,10 +398,8 @@ class Binary(object): @property def type(self): - """Get package type - - Returns: - String with the package type ('deb' or 'udeb') + """package type ('deb' or 'udeb') + @type: str """ match = re_file_binary.match(self.hashed_file.filename) if not match: @@ -358,6 +408,9 @@ class Binary(object): @property def component(self): + """component name + @type: str + """ fields = self.control['Section'].split('/') if len(fields) > 1: return fields[0] @@ -365,18 +418,13 @@ class Binary(object): class Source(object): """Representation of a source package - - Attributes: - component (str): guessed component name. Might be wrong! - dsc (dict-like): dict to access fields in the .dsc file - hashed_files (list of daklib.upload.HashedFile): list of source files (including .dsc) - files (dict): dictonary mapping filenames to HashedFile objects for - additional source files (not including .dsc) - primary_fingerprint (str): fingerprint of the PGP key used for the signature - valid_signature (bool): True if the dsc has a valid signature """ def __init__(self, directory, hashed_files, keyrings, require_signature=True): self.hashed_files = hashed_files + """list of source files (including the .dsc itself) + @type: list of L{HashedFile} + """ + self._dsc_file = None for f in hashed_files: if re_file_dsc.match(f.filename): @@ -388,24 +436,46 @@ class Source(object): data = open(dsc_file_path, 'r').read() self._signed_file = SignedFile(data, keyrings, require_signature) self.dsc = apt_pkg.TagSection(self._signed_file.contents) + """dict to access fields in the .dsc file + @type: dict-like + """ + self._files = None @property def files(self): + """dict mapping filenames to L{HashedFile} objects for additional source files + + This list does not include the .dsc itself. + + @type: dict + """ if self._files is None: self._files = parse_file_list(self.dsc, False) return self._files @property def primary_fingerprint(self): + """fingerprint of the key used to sign the .dsc + @type: str + """ return self._signed_file.primary_fingerprint @property def valid_signature(self): + """C{True} if the .dsc has a valid signature + @type: bool + """ return self._signed_file.valid @property def component(self): + """guessed component name + + Might be wrong. Don't rely on this. + + @type: str + """ if 'Section' not in self.dsc: return 'main' fields = self.dsc['Section'].split('/') diff --git a/daklib/utils.py b/daklib/utils.py index cd5eb791..4ee00a1c 100644 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -1550,7 +1550,6 @@ def get_packages_from_ftp(root, suite, component, architecture): @rtype: TagFile @return: apt_pkg class containing package data - """ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture) (fd, temp_file) = temp_filename() @@ -1576,15 +1575,20 @@ def deb_extract_control(fh): ################################################################################ def mail_addresses_for_upload(maintainer, changed_by, fingerprint): - """Mail addresses to contact for an upload + """mail addresses to contact for an upload + + @type maintainer: str + @param maintainer: Maintainer field of the .changes file - Args: - maintainer (str): Maintainer field of the changes file - changed_by (str): Changed-By field of the changes file - fingerprint (str): Fingerprint of the PGP key used to sign the upload + @type changed_by: str + @param changed_by: Changed-By field of the .changes file - Returns: - List of RFC 2047-encoded mail addresses to contact regarding this upload + @type fingerprint: str + @param fingerprint: fingerprint of the key used to sign the upload + + @rtype: list of str + @return: list of RFC 2047-encoded mail addresses to contact regarding + this upload """ addresses = [maintainer] if changed_by != maintainer: @@ -1600,14 +1604,16 @@ def mail_addresses_for_upload(maintainer, changed_by, fingerprint): ################################################################################ def call_editor(text="", suffix=".txt"): - """Run editor and return the result as a string + """run editor and return the result as a string + + @type text: str + @param text: initial text - Kwargs: - text (str): initial text - suffix (str): extension for temporary file + @type suffix: str + @param suffix: extension for temporary file - Returns: - string with the edited text + @rtype: str + @return: string with the edited text """ editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi')) tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) -- 2.39.2