X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=c7785adeb0ac81ec1bf133c5e322e3e1d2ada758;hb=7ba4ac6dfc9ec4473e99b814a35add978cc00264;hp=bbbf1dfa1fe7949e94491361a30c573d19a692d9;hpb=af4d6b256683e0e102b91a05649f97a81988f5b3;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index bbbf1dfa..c7785ade 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -51,10 +51,10 @@ from holding import Holding from urgencylog import UrgencyLog from dbconn import * from summarystats import SummaryStats -from utils import parse_changes, check_dsc_files +from utils import parse_changes, check_dsc_files, build_package_set from textutils import fix_maintainer -from binary import Binary from lintian import parse_lintian_output, generate_reject_messages +from contents import UnpackedSource ############################################################################### @@ -77,6 +77,9 @@ def get_type(f, session): file_type = f["dbtype"] elif re_source_ext.match(f["type"]): file_type = "dsc" + elif f['architecture'] == 'source' and f["type"] == 'unreadable': + utils.warn('unreadable source file (will continue and hope for the best)') + return f["type"] else: file_type = f["type"] utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type)) @@ -92,7 +95,7 @@ def get_type(f, session): # Determine what parts in a .changes are NEW -def determine_new(filename, changes, files, warn=1, session = None): +def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None): """ Determine what parts in a C{changes} file are NEW. @@ -108,19 +111,32 @@ def determine_new(filename, changes, files, warn=1, session = None): @type warn: bool @param warn: Warn if overrides are added for (old)stable + @type dsc: Upload.Pkg.dsc dict + @param dsc: (optional); Dsc dictionary + + @type new: dict + @param new: new packages as returned by a previous call to this function, but override information may have changed + @rtype: dict @return: dictionary of NEW components. """ # TODO: This should all use the database instead of parsing the changes # file again - new = {} byhand = {} + if new is None: + new = {} dbchg = get_dbchange(filename, session) if dbchg is None: print "Warning: cannot find changes file in database; won't check byhand" + # Try to get the Package-Set field from an included .dsc file (if possible). + if dsc: + for package, entry in build_package_set(dsc, session).items(): + if not new.has_key(package): + new[package] = entry + # Build up a list of potentially new things for name, f in files.items(): # Keep a record of byhand elements @@ -272,10 +288,10 @@ class TarTime(object): self.future_files = {} self.ancient_files = {} - def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor): - if MTime > self.future_cutoff: + def callback(self, member, data): + if member.mtime > self.future_cutoff: self.future_files[Name] = MTime - if MTime < self.past_cutoff: + if member.mtime < self.past_cutoff: self.ancient_files[Name] = MTime ############################################################################### @@ -427,7 +443,8 @@ class Upload(object): cnf = Config() self.Subst = {} self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] - self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] + if cnf.has_key("Dinstall::BugServer"): + self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"] self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] @@ -734,7 +751,7 @@ class Upload(object): self.rejects.append("%s: invalid version number '%s'." % (f, version)) # Ensure the architecture of the .deb is one we know about. - default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable") + default_suite = cnf.get("Dinstall::DefaultSuite", "unstable") architecture = control.Find("Architecture") upload_suite = self.pkg.changes["distribution"].keys()[0] @@ -763,6 +780,30 @@ class Upload(object): if not re_valid_pkg_name.match(prov): self.rejects.append("%s: Invalid Provides field content %s." % (f, prov)) + # If there is a Built-Using field, we need to check we can find the + # exact source version + built_using = control.Find("Built-Using") + if built_using: + try: + entry["built-using"] = [] + for dep in apt_pkg.parse_depends(built_using): + bu_s, bu_v, bu_e = dep[0] + # Check that it's an exact match dependency and we have + # some form of version + if bu_e != "=" or len(bu_v) < 1: + self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v)) + else: + # Find the source id for this version + bu_so = get_sources_from_name(bu_s, version=bu_v, session = session) + if len(bu_so) != 1: + self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v)) + else: + entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) ) + + except ValueError, e: + self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e))) + + # Check the section & priority match those given in the .changes (non-fatal) if control.Find("Section") and entry["section"] != "" \ and entry["section"] != control.Find("Section"): @@ -856,13 +897,6 @@ class Upload(object): # Check the version and for file overwrites self.check_binary_against_db(f, session) - # Temporarily disable contents generation until we change the table storage layout - #b = Binary(f) - #b.scan_package() - #if len(b.rejects) > 0: - # for j in b.rejects: - # self.rejects.append(j) - def source_file_checks(self, f, session): entry = self.pkg.files[f] @@ -1074,44 +1108,80 @@ class Upload(object): if not has_source: self.rejects.append("no source found and Architecture line in changes mention source.") - if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"): + if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")): self.rejects.append("source only uploads are not supported.") ########################################################################### - def check_dsc(self, action=True, session=None): - """Returns bool indicating whether or not the source changes are valid""" - # Ensure there is source to check - if not self.pkg.changes["architecture"].has_key("source"): - return True - # Find the .dsc + def __dsc_filename(self): + """ + Returns: (Status, Dsc_Filename) + where + Status: Boolean; True when there was no error, False otherwise + Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise + """ dsc_filename = None - for f, entry in self.pkg.files.items(): - if entry["type"] == "dsc": + + # find the dsc + for name, entry in self.pkg.files.items(): + if entry.has_key("type") and entry["type"] == "dsc": if dsc_filename: - self.rejects.append("can not process a .changes file with multiple .dsc's.") - return False + return False, "cannot process a .changes file with multiple .dsc's." else: - dsc_filename = f + dsc_filename = name - # If there isn't one, we have nothing to do. (We have reject()ed the upload already) if not dsc_filename: - self.rejects.append("source uploads must contain a dsc file") - return False + return False, "source uploads must contain a dsc file" + + return True, dsc_filename + + def load_dsc(self, action=True, signing_rules=1): + """ + Find and load the dsc from self.pkg.files into self.dsc + + Returns: (Status, Reason) + where + Status: Boolean; True when there was no error, False otherwise + Reason: String; When Status is False this describes the error + """ + + # find the dsc + (status, dsc_filename) = self.__dsc_filename() + if not status: + # If status is false, dsc_filename has the reason + return False, dsc_filename - # Parse the .dsc file try: - self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1)) + self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1)) except CantOpenError: - # if not -n copy_to_holding() will have done this for us... if not action: - self.rejects.append("%s: can't read file." % (dsc_filename)) + return False, "%s: can't read file." % (dsc_filename) except ParseChangesError, line: - self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line)) + return False, "%s: parse error, can't grok: %s." % (dsc_filename, line) except InvalidDscError, line: - self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line)) + return False, "%s: syntax error on line %s." % (dsc_filename, line) except ChangesUnicodeError: - self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename)) + return False, "%s: dsc file not proper utf-8." % (dsc_filename) + + return True, None + + ########################################################################### + + def check_dsc(self, action=True, session=None): + """Returns bool indicating whether or not the source changes are valid""" + # Ensure there is source to check + if not self.pkg.changes["architecture"].has_key("source"): + return True + + (status, reason) = self.load_dsc(action=action) + if not status: + self.rejects.append(reason) + return False + (status, dsc_filename) = self.__dsc_filename() + if not status: + # If status is false, dsc_filename has the reason + self.rejects.append(dsc_filename) + return False # Build up the file list of files mentioned by the .dsc try: @@ -1195,7 +1265,7 @@ class Upload(object): session.close() - return True + return (len(self.rejects) == 0) ########################################################################### @@ -1239,14 +1309,13 @@ class Upload(object): os.symlink(self.pkg.orig_files[orig_file]["path"], dest) # Extract the source - cmd = "dpkg-source -sn -x %s" % (dsc_filename) - (result, output) = commands.getstatusoutput(cmd) - if (result != 0): - self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)) - self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] ")) + try: + unpacked = UnpackedSource(dsc_filename) + except: + self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename) return - if not cnf.Find("Dir::Queue::BTSVersionTrack"): + if not cnf.Find("Dir::BTSVersionTrack"): return # Get the upstream version @@ -1255,19 +1324,19 @@ class Upload(object): upstr_version = re_strip_revision.sub('', upstr_version) # Ensure the changelog file exists - changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version) - if not os.path.exists(changelog_filename): + changelog_file = unpacked.get_changelog_file() + if changelog_file is None: self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename)) return # Parse the changelog self.pkg.dsc["bts changelog"] = "" - changelog_file = utils.open_file(changelog_filename) for line in changelog_file.readlines(): m = re_changelog_versions.match(line) if m: self.pkg.dsc["bts changelog"] += line changelog_file.close() + unpacked.cleanup() # Check we found at least one revision in the changelog if not self.pkg.dsc["bts changelog"]: @@ -1469,7 +1538,7 @@ class Upload(object): # If we do not have a tagfile, don't do anything tagfile = cnf.get("Dinstall::LintianTags") - if tagfile is None: + if not tagfile: return # Parse the yaml file @@ -1552,19 +1621,8 @@ class Upload(object): if entry["type"] == "deb": tar.reset() try: - deb_file = utils.open_file(filename) - apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz") - deb_file.seek(0) - try: - apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz") - except SystemError, e: - # If we can't find a data.tar.gz, look for data.tar.bz2 instead. - if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)): - raise - deb_file.seek(0) - apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2") - - deb_file.close() + deb = apt_inst.DebFile(filename) + deb.control.go(tar.callback) future_files = tar.future_files.keys() if future_files: @@ -1636,22 +1694,22 @@ class Upload(object): # Check any one-off upload blocks self.check_upload_blocks(fpr, session) - # Start with DM as a special case + # If the source_acl is None, source is never allowed + if fpr.source_acl is None: + if self.pkg.changes["architecture"].has_key("source"): + rej = 'Fingerprint %s may not upload source' % fpr.fingerprint + rej += '\nPlease contact ftpmaster if you think this is incorrect' + self.rejects.append(rej) + return + # Do DM as a special case # DM is a special case unfortunately, so we check it first # (keys with no source access get more access than DMs in one # way; DMs can only upload for their packages whether source # or binary, whereas keys with no access might be able to # upload some binaries) - if fpr.source_acl.access_level == 'dm': + elif fpr.source_acl.access_level == 'dm': self.check_dm_upload(fpr, session) else: - # Check source-based permissions for other types - if self.pkg.changes["architecture"].has_key("source") and \ - fpr.source_acl.access_level is None: - rej = 'Fingerprint %s may not upload source' % fpr.fingerprint - rej += '\nPlease contact ftpmaster if you think this is incorrect' - self.rejects.append(rej) - return # If not a DM, we allow full upload rights uid_email = "%s@debian.org" % (fpr.uid.uid) self.check_if_upload_is_sponsored(uid_email, fpr.uid.name) @@ -1673,8 +1731,11 @@ class Upload(object): if len(tmparches.keys()) > 0: if fpr.binary_reject: - rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint - rej += "\narchitectures involved are: ", ",".join(tmparches.keys()) + rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint + if len(tmparches.keys()) == 1: + rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys()) + else: + rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys()) self.rejects.append(rej) else: # TODO: This is where we'll implement reject vs throw away binaries later @@ -1743,10 +1804,10 @@ class Upload(object): ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie, ## non-developer maintainers cannot NMU or hijack packages) - # srcuploaders includes the maintainer + # uploader includes the maintainer accept = False - for sup in r.srcuploaders: - (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer() + for uploader in r.uploaders: + (rfc822, rfc2047, name, email) = uploader.get_split_maintainer() # Eww - I hope we never have two people with the same name in Debian if email == fpr.uid.uid or name == fpr.uid.name: accept = True @@ -1779,7 +1840,7 @@ class Upload(object): # Also only check if there is a file defined (and existant) with # checks. - transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "") + transpath = cnf.get("Dinstall::ReleaseTransitions", "") if transpath == "" or not os.path.exists(transpath): return @@ -1967,7 +2028,7 @@ distribution.""" del self.Subst["__ANNOUNCE_LIST_ADDRESS__"] - if cnf.FindB("Dinstall::CloseBugs"): + if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"): summary = self.close_bugs(summary, action) del self.Subst["__SHORT_SUMMARY__"] @@ -1999,6 +2060,7 @@ distribution.""" print "Installing." self.logger.log(["installing changes", self.pkg.changes_file]) + binaries = [] poolfiles = [] # Add the .dsc file to the DB first @@ -2011,7 +2073,9 @@ distribution.""" # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb) for newfile, entry in self.pkg.files.items(): if entry["type"] == "deb": - poolfiles.append(add_deb_to_db(self, newfile, session)) + b, pf = add_deb_to_db(self, newfile, session) + binaries.append(b) + poolfiles.append(pf) # If this is a sourceful diff only upload that is moving # cross-component we need to copy the .orig files into the new @@ -2096,11 +2160,28 @@ distribution.""" # Our SQL session will automatically start a new transaction after # the last commit + # Now ensure that the metadata has been added + # This has to be done after we copy the files into the pool + # For source if we have it: + if self.pkg.changes["architecture"].has_key("source"): + import_metadata_into_db(source, session) + + # Now for any of our binaries + for b in binaries: + import_metadata_into_db(b, session) + + session.commit() + # Move the .changes into the 'done' directory + ye, mo, da = time.gmtime()[0:3] + donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da) + if not os.path.isdir(donedir): + os.makedirs(donedir) + utils.move(self.pkg.changes_file, - os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file))) + os.path.join(donedir, os.path.basename(self.pkg.changes_file))) - if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"): + if self.pkg.changes["architecture"].has_key("source"): UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"]) self.update_subst() @@ -2111,19 +2192,19 @@ distribution.""" self.announce(short_summary, 1) ## Helper stuff for DebBugs Version Tracking - if cnf.Find("Dir::Queue::BTSVersionTrack"): + if cnf.Find("Dir::BTSVersionTrack"): if self.pkg.changes["architecture"].has_key("source"): - (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".") + (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".") version_history = os.fdopen(fd, 'w') version_history.write(self.pkg.dsc["bts changelog"]) version_history.close() - filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"], + filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"], self.pkg.changes_file[:-8]+".versions") os.rename(temp_filename, filename) os.chmod(filename, 0644) # Write out the binary -> source mapping. - (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".") + (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".") debinfo = os.fdopen(fd, 'w') for name, entry in sorted(self.pkg.files.items()): if entry["type"] == "deb": @@ -2132,7 +2213,7 @@ distribution.""" entry["source version"]]) debinfo.write(line+"\n") debinfo.close() - filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"], + filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"], self.pkg.changes_file[:-8]+".debinfo") os.rename(temp_filename, filename) os.chmod(filename, 0644) @@ -2360,6 +2441,8 @@ distribution.""" if self.logger: self.logger.log(["rejected", self.pkg.changes_file]) + stats = SummaryStats() + stats.reject_count += 1 return 0 ################################################################################ @@ -2426,7 +2509,7 @@ distribution.""" """ Cnf = Config() anyversion = None - anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite)) + anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ] for (s, v) in sv_list: if s in [ x.lower() for x in anysuite ]: if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0: @@ -2456,8 +2539,14 @@ distribution.""" # Check versions for each target suite for target_suite in self.pkg.changes["distribution"].keys(): - must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ] - must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ] + # Check we can find the target suite + ts = get_suite(target_suite) + if ts is None: + self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite) + continue + + must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ] + must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ] # Enforce "must be newer than target suite" even if conffile omits it if target_suite not in must_be_newer_than: