]> git.decadent.org.uk Git - dak.git/commitdiff
Merge remote-tracking branch 'origin/master' into version-checks
authorAnsgar Burchardt <ansgar@debian.org>
Thu, 24 Mar 2011 19:33:06 +0000 (20:33 +0100)
committerAnsgar Burchardt <ansgar@debian.org>
Thu, 24 Mar 2011 19:33:06 +0000 (20:33 +0100)
Conflicts:
dak/update_db.py

1  2 
daklib/dbconn.py
daklib/queue.py

diff --combined daklib/dbconn.py
index bd2356176e49e3d75fce13f67e6bed005dff71ec,47e2b130645fd582df973432bf33cb4c6d7bb04d..fe04ebc3df4c90e1f1bf13a4c0ddb3966bab3d1c
@@@ -2195,6 -2195,18 +2195,18 @@@ __all__.append('get_sections'
  
  ################################################################################
  
+ class SrcContents(ORMObject):
+     def __init__(self, file = None, source = None):
+         self.file = file
+         self.source = source
+     def properties(self):
+         return ['file', 'source']
+ __all__.append('SrcContents')
+ ################################################################################
  from debian.debfile import Deb822
  
  # Temporary Deb822 subclass to fix bugs with : handling; see #597249
@@@ -2266,7 -2278,7 +2278,7 @@@ class DBSource(ORMObject)
      def properties(self):
          return ['source', 'source_id', 'maintainer', 'changedby', \
              'fingerprint', 'poolfile', 'version', 'suites_count', \
-             'install_date', 'binaries_count']
+             'install_date', 'binaries_count', 'uploaders_count']
  
      def not_null_constraints(self):
          return ['source', 'version', 'install_date', 'maintainer', \
  
      metadata = association_proxy('key', 'value')
  
+     def scan_contents(self):
+         '''
+         Returns a set of names for non directories. The path names are
+         normalized after converting them from either utf-8 or iso8859-1
+         encoding.
+         '''
+         fullpath = self.poolfile.fullpath
+         from daklib.contents import UnpackedSource
+         unpacked = UnpackedSource(fullpath)
+         fileset = set()
+         for name in unpacked.get_all_filenames():
+             # enforce proper utf-8 encoding
+             try:
+                 name.decode('utf-8')
+             except UnicodeDecodeError:
+                 name = name.decode('iso8859-1').encode('utf-8')
+             fileset.add(name)
+         return fileset
  __all__.append('DBSource')
  
  @session_wrapper
@@@ -2540,25 -2571,11 +2571,11 @@@ def add_dsc_to_db(u, filename, session=
          session.add(df)
  
      # Add the src_uploaders to the DB
-     uploader_ids = [source.maintainer_id]
+     source.uploaders = [source.maintainer]
      if u.pkg.dsc.has_key("uploaders"):
          for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
              up = up.strip()
-             uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
-     added_ids = {}
-     for up_id in uploader_ids:
-         if added_ids.has_key(up_id):
-             import utils
-             utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
-             continue
-         added_ids[up_id]=1
-         su = SrcUploader()
-         su.maintainer_id = up_id
-         su.source_id = source.source_id
-         session.add(su)
+             source.uploaders.append(get_or_set_maintainer(up, session))
  
      session.flush()
  
@@@ -2660,17 -2677,6 +2677,6 @@@ __all__.append('SrcFormat'
  
  ################################################################################
  
- class SrcUploader(object):
-     def __init__(self, *args, **kwargs):
-         pass
-     def __repr__(self):
-         return '<SrcUploader %s>' % self.uploader_id
- __all__.append('SrcUploader')
- ################################################################################
  SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
                   ('SuiteID', 'suite_id'),
                   ('Version', 'version'),
@@@ -3021,33 -3027,6 +3027,33 @@@ __all__.append('SourceMetadata'
  
  ################################################################################
  
 +class VersionCheck(ORMObject):
 +    def __init__(self, *args, **kwargs):
 +      pass
 +
 +    def properties(self):
 +        #return ['suite_id', 'check', 'reference_id']
 +        return ['check']
 +
 +    def not_null_constraints(self):
 +        return ['suite', 'check', 'reference']
 +
 +__all__.append('VersionCheck')
 +
 +@session_wrapper
 +def get_version_checks(suite_name, check = None, session = None):
 +    suite = get_suite(suite_name, session)
 +    if not suite:
 +        return None
 +    q = session.query(VersionCheck).filter_by(suite=suite)
 +    if check:
 +        q = q.filter_by(check=check)
 +    return q.all()
 +
 +__all__.append('get_version_checks')
 +
 +################################################################################
 +
  class DBConn(object):
      """
      database module init.
              'source_acl',
              'source_metadata',
              'src_associations',
+             'src_contents',
              'src_format',
              'src_uploaders',
              'suite',
              'suite_src_formats',
              'uid',
              'upload_blocks',
 +            'version_check',
          )
  
          views = (
                                                       primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
                                   suites = relation(Suite, secondary=self.tbl_src_associations,
                                       backref=backref('sources', lazy='dynamic')),
-                                  srcuploaders = relation(SrcUploader),
+                                  uploaders = relation(Maintainer,
+                                      secondary=self.tbl_src_uploaders),
                                   key = relation(SourceMetadata, cascade='all',
                                       collection_class=attribute_mapped_collection('key'))),
                 extension = validator)
                 properties = dict(src_format_id = self.tbl_src_format.c.id,
                                   format_name = self.tbl_src_format.c.format_name))
  
-         mapper(SrcUploader, self.tbl_src_uploaders,
-                properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
-                                  source_id = self.tbl_src_uploaders.c.source,
-                                  source = relation(DBSource,
-                                                    primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
-                                  maintainer_id = self.tbl_src_uploaders.c.maintainer,
-                                  maintainer = relation(Maintainer,
-                                                        primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
          mapper(Suite, self.tbl_suite,
                 properties = dict(suite_id = self.tbl_suite.c.id,
                                   policy_queue = relation(PolicyQueue),
                      backref=backref('contents', lazy='dynamic', cascade='all')),
                  file = self.tbl_bin_contents.c.file))
  
+         mapper(SrcContents, self.tbl_src_contents,
+             properties = dict(
+                 source = relation(DBSource,
+                     backref=backref('contents', lazy='dynamic', cascade='all')),
+                 file = self.tbl_src_contents.c.file))
          mapper(MetadataKey, self.tbl_metadata_keys,
              properties = dict(
                  key_id = self.tbl_metadata_keys.c.key_id,
                  key = relation(MetadataKey),
                  value = self.tbl_source_metadata.c.value))
  
 +      mapper(VersionCheck, self.tbl_version_check,
 +          properties = dict(
 +              suite_id = self.tbl_version_check.c.suite,
 +              suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
 +              reference_id = self.tbl_version_check.c.reference,
 +              reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
 +
      ## Connection functions
      def __createconn(self):
          from config import Config
diff --combined daklib/queue.py
index a1ab43ef531774e2f697ad360db264b3b765d508,213dbd58cc6f3a1b1c66d3b6adff814d865d9a62..b7eba9537aba8fbabb8883cb0e9c4a9726af47b2
@@@ -51,7 -51,7 +51,7 @@@ from holding import Holdin
  from urgencylog import UrgencyLog
  from dbconn import *
  from summarystats import SummaryStats
- from utils import parse_changes, check_dsc_files
+ from utils import parse_changes, check_dsc_files, build_package_set
  from textutils import fix_maintainer
  from lintian import parse_lintian_output, generate_reject_messages
  from contents import UnpackedSource
@@@ -102,7 -102,7 +102,7 @@@ def get_type(f, session)
  
  # Determine what parts in a .changes are NEW
  
- def determine_new(filename, changes, files, warn=1, session = None):
+ def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
      """
      Determine what parts in a C{changes} file are NEW.
  
      @type warn: bool
      @param warn: Warn if overrides are added for (old)stable
  
+     @type dsc: Upload.Pkg.dsc dict
+     @param dsc: (optional); Dsc dictionary
+     @type new: dict
+     @param new: new packages as returned by a previous call to this function, but override information may have changed
      @rtype: dict
      @return: dictionary of NEW components.
  
      """
      # TODO: This should all use the database instead of parsing the changes
      # file again
-     new = {}
      byhand = {}
  
      dbchg = get_dbchange(filename, session)
      if dbchg is None:
          print "Warning: cannot find changes file in database; won't check byhand"
  
+     # Try to get the Package-Set field from an included .dsc file (if possible).
+     if dsc:
+         for package, entry in build_package_set(dsc, session).items():
+             if not new.has_key(package):
+                 new[package] = entry
      # Build up a list of potentially new things
      for name, f in files.items():
          # Keep a record of byhand elements
@@@ -1105,40 -1116,76 +1116,76 @@@ class Upload(object)
                  self.rejects.append("source only uploads are not supported.")
  
      ###########################################################################
-     def check_dsc(self, action=True, session=None):
-         """Returns bool indicating whether or not the source changes are valid"""
-         # Ensure there is source to check
-         if not self.pkg.changes["architecture"].has_key("source"):
-             return True
  
-         # Find the .dsc
+     def __dsc_filename(self):
+         """
+         Returns: (Status, Dsc_Filename)
+         where
+           Status: Boolean; True when there was no error, False otherwise
+           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
+         """
          dsc_filename = None
-         for f, entry in self.pkg.files.items():
-             if entry["type"] == "dsc":
+         # find the dsc
+         for name, entry in self.pkg.files.items():
+             if entry.has_key("type") and entry["type"] == "dsc":
                  if dsc_filename:
-                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
-                     return False
+                     return False, "cannot process a .changes file with multiple .dsc's."
                  else:
-                     dsc_filename = f
+                     dsc_filename = name
  
-         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
          if not dsc_filename:
-             self.rejects.append("source uploads must contain a dsc file")
-             return False
+             return False, "source uploads must contain a dsc file"
+         return True, dsc_filename
+     def load_dsc(self, action=True, signing_rules=1):
+         """
+         Find and load the dsc from self.pkg.files into self.dsc
+         Returns: (Status, Reason)
+         where
+           Status: Boolean; True when there was no error, False otherwise
+           Reason: String; When Status is False this describes the error
+         """
+         # find the dsc
+         (status, dsc_filename) = self.__dsc_filename()
+         if not status:
+             # If status is false, dsc_filename has the reason
+             return False, dsc_filename
  
-         # Parse the .dsc file
          try:
-             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
+             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
          except CantOpenError:
-             # if not -n copy_to_holding() will have done this for us...
              if not action:
-                 self.rejects.append("%s: can't read file." % (dsc_filename))
+                 return False, "%s: can't read file." % (dsc_filename)
          except ParseChangesError, line:
-             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
+             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
          except InvalidDscError, line:
-             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
+             return False, "%s: syntax error on line %s." % (dsc_filename, line)
          except ChangesUnicodeError:
-             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
+             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
+         return True, None
+     ###########################################################################
+     def check_dsc(self, action=True, session=None):
+         """Returns bool indicating whether or not the source changes are valid"""
+         # Ensure there is source to check
+         if not self.pkg.changes["architecture"].has_key("source"):
+             return True
+         (status, reason) = self.load_dsc(action=action)
+         if not status:
+             self.rejects.append(reason)
+             return False
+         (status, dsc_filename) = self.__dsc_filename()
+         if not status:
+             # If status is false, dsc_filename has the reason
+             self.rejects.append(dsc_filename)
+             return False
  
          # Build up the file list of files mentioned by the .dsc
          try:
  
          # If we do not have a tagfile, don't do anything
          tagfile = cnf.get("Dinstall::LintianTags")
-         if tagfile is None:
+         if not tagfile:
              return
  
          # Parse the yaml file
          # Check any one-off upload blocks
          self.check_upload_blocks(fpr, session)
  
-         # Start with DM as a special case
+         # If the source_acl is None, source is never allowed
+         if fpr.source_acl is None:
+             if self.pkg.changes["architecture"].has_key("source"):
+                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
+                 self.rejects.append(rej)
+                 return
+         # Do DM as a special case
          # DM is a special case unfortunately, so we check it first
          # (keys with no source access get more access than DMs in one
          #  way; DMs can only upload for their packages whether source
          #  or binary, whereas keys with no access might be able to
          #  upload some binaries)
-         if fpr.source_acl.access_level == 'dm':
+         elif fpr.source_acl.access_level == 'dm':
              self.check_dm_upload(fpr, session)
          else:
-             # Check source-based permissions for other types
-             if self.pkg.changes["architecture"].has_key("source") and \
-                 fpr.source_acl.access_level is None:
-                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
-                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
-                 self.rejects.append(rej)
-                 return
              # If not a DM, we allow full upload rights
              uid_email = "%s@debian.org" % (fpr.uid.uid)
              self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
  
          if len(tmparches.keys()) > 0:
              if fpr.binary_reject:
-                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
-                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+                 if len(tmparches.keys()) == 1:
+                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
+                 else:
+                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
                  self.rejects.append(rej)
              else:
                  # TODO: This is where we'll implement reject vs throw away binaries later
          ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
          ## non-developer maintainers cannot NMU or hijack packages)
  
-         # srcuploaders includes the maintainer
+         # uploader includes the maintainer
          accept = False
-         for sup in r.srcuploaders:
-             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+         for uploader in r.uploaders:
+             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
              # Eww - I hope we never have two people with the same name in Debian
              if email == fpr.uid.uid or name == fpr.uid.name:
                  accept = True
@@@ -2467,7 -2517,7 +2517,7 @@@ distribution.""
          """
          Cnf = Config()
          anyversion = None
 -        anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
 +        anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
          for (s, v) in sv_list:
              if s in [ x.lower() for x in anysuite ]:
                  if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
  
          # Check versions for each target suite
          for target_suite in self.pkg.changes["distribution"].keys():
 -            must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
 -            must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
 +            must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
 +            must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
  
              # Enforce "must be newer than target suite" even if conffile omits it
              if target_suite not in must_be_newer_than: