X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=52483cca4bd8dc0d53b2f6b95670f250ef3d2b9e;hb=4f29e33412c9bf42023483ef6f6e8dcf2fbe0dc5;hp=7a49242284026ec39f12e5fe3d13928d46094819;hpb=46ed481d97efd2ca4d0914de1dd1393300cf21a5;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index 7a492422..52483cca 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -53,8 +53,18 @@ from dbconn import * from summarystats import SummaryStats from utils import parse_changes, check_dsc_files from textutils import fix_maintainer -from binary import Binary from lintian import parse_lintian_output, generate_reject_messages +from contents import UnpackedSource + +# suppress some deprecation warnings in squeeze related to apt_pkg +# module +import warnings +warnings.filterwarnings('ignore', \ + "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \ + DeprecationWarning) +warnings.filterwarnings('ignore', \ + "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \ + DeprecationWarning) ############################################################################### @@ -370,6 +380,44 @@ def edit_note(note, upload, session, trainee=False): ############################################################################### +# suite names DMs can upload to +dm_suites = ['unstable', 'experimental'] + +def get_newest_source(source, session): + 'returns the newest DBSource object in dm_suites' + ## the most recent version of the package uploaded to unstable or + ## experimental includes the field "DM-Upload-Allowed: yes" in the source + ## section of its control file + q = session.query(DBSource).filter_by(source = source). \ + filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \ + order_by(desc('source.version')) + return q.first() + +def get_suite_version_by_source(source, session): + 'returns a list of tuples (suite_name, version) for source package' + q = session.query(Suite.suite_name, DBSource.version). \ + join(Suite.sources).filter_by(source = source) + return q.all() + +def get_source_by_package_and_suite(package, suite_name, session): + ''' + returns a DBSource query filtered by DBBinary.package and this package's + suite_name + ''' + return session.query(DBSource). \ + join(DBSource.binaries).filter_by(package = package). \ + join(DBBinary.suites).filter_by(suite_name = suite_name) + +def get_suite_version_by_package(package, arch_string, session): + ''' + returns a list of tuples (suite_name, version) for binary package and + arch_string + ''' + return session.query(Suite.suite_name, DBBinary.version). \ + join(Suite.binaries).filter_by(package = package). \ + join(DBBinary.architecture). \ + filter(Architecture.arch_string.in_([arch_string, 'all'])).all() + class Upload(object): """ Everything that has to do with an upload processed. @@ -725,6 +773,30 @@ class Upload(object): if not re_valid_pkg_name.match(prov): self.rejects.append("%s: Invalid Provides field content %s." % (f, prov)) + # If there is a Built-Using field, we need to check we can find the + # exact source version + built_using = control.Find("Built-Using") + if built_using: + try: + entry["built-using"] = [] + for dep in apt_pkg.parse_depends(built_using): + bu_s, bu_v, bu_e = dep[0] + # Check that it's an exact match dependency and we have + # some form of version + if bu_e != "=" or len(bu_v) < 1: + self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v)) + else: + # Find the source id for this version + bu_so = get_sources_from_name(bu_s, version=bu_v, session = session) + if len(bu_so) != 1: + self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v)) + else: + entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) ) + + except ValueError, e: + self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e))) + + # Check the section & priority match those given in the .changes (non-fatal) if control.Find("Section") and entry["section"] != "" \ and entry["section"] != control.Find("Section"): @@ -795,7 +867,8 @@ class Upload(object): (source_version, f, self.pkg.changes["version"])) else: # Check in the SQL database - if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session): + if not source_exists(source_package, source_version, suites = \ + self.pkg.changes["distribution"].keys(), session = session): # Check in one of the other directories source_epochless_version = re_no_epoch.sub('', source_version) dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version) @@ -817,13 +890,6 @@ class Upload(object): # Check the version and for file overwrites self.check_binary_against_db(f, session) - # Temporarily disable contents generation until we change the table storage layout - #b = Binary(f) - #b.scan_package() - #if len(b.rejects) > 0: - # for j in b.rejects: - # self.rejects.append(j) - def source_file_checks(self, f, session): entry = self.pkg.files[f] @@ -933,9 +999,11 @@ class Upload(object): # Check for packages that have moved from one component to another entry['suite'] = suite - res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session) - if res.rowcount > 0: - entry["othercomponents"] = res.fetchone()[0] + arch_list = [entry["architecture"], 'all'] + component = get_component_by_package_suite(self.pkg.files[f]['package'], \ + [suite], arch_list = arch_list, session = session) + if component is not None: + entry["othercomponents"] = component def check_files(self, action=True): file_keys = self.pkg.files.keys() @@ -1198,11 +1266,10 @@ class Upload(object): os.symlink(self.pkg.orig_files[orig_file]["path"], dest) # Extract the source - cmd = "dpkg-source -sn -x %s" % (dsc_filename) - (result, output) = commands.getstatusoutput(cmd) - if (result != 0): - self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)) - self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] ")) + try: + unpacked = UnpackedSource(dsc_filename) + except: + self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename) return if not cnf.Find("Dir::Queue::BTSVersionTrack"): @@ -1214,19 +1281,19 @@ class Upload(object): upstr_version = re_strip_revision.sub('', upstr_version) # Ensure the changelog file exists - changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version) - if not os.path.exists(changelog_filename): + changelog_file = unpacked.get_changelog_file() + if changelog_file is None: self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename)) return # Parse the changelog self.pkg.dsc["bts changelog"] = "" - changelog_file = utils.open_file(changelog_filename) for line in changelog_file.readlines(): m = re_changelog_versions.match(line) if m: self.pkg.dsc["bts changelog"] += line changelog_file.close() + unpacked.cleanup() # Check we found at least one revision in the changelog if not self.pkg.dsc["bts changelog"]: @@ -1680,22 +1747,13 @@ class Upload(object): if rej: return - ## the most recent version of the package uploaded to unstable or - ## experimental includes the field "DM-Upload-Allowed: yes" in the source - ## section of its control file - q = session.query(DBSource).filter_by(source=self.pkg.changes["source"]) - q = q.join(SrcAssociation) - q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental'])) - q = q.order_by(desc('source.version')).limit(1) + r = get_newest_source(self.pkg.changes["source"], session) - r = q.all() - - if len(r) != 1: + if r is None: rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"] self.rejects.append(rej) return - r = r[0] if not r.dm_upload_allowed: rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version) self.rejects.append(rej) @@ -1727,11 +1785,7 @@ class Upload(object): ## none of the packages are being taken over from other source packages for b in self.pkg.changes["binary"].keys(): for suite in self.pkg.changes["distribution"].keys(): - q = session.query(DBSource) - q = q.join(DBBinary).filter_by(package=b) - q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite) - - for s in q.all(): + for s in get_source_by_package_and_suite(b, suite, session): if s.source != self.pkg.changes["source"]: self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite)) @@ -1971,6 +2025,7 @@ distribution.""" print "Installing." self.logger.log(["installing changes", self.pkg.changes_file]) + binaries = [] poolfiles = [] # Add the .dsc file to the DB first @@ -1983,7 +2038,9 @@ distribution.""" # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb) for newfile, entry in self.pkg.files.items(): if entry["type"] == "deb": - poolfiles.append(add_deb_to_db(self, newfile, session)) + b, pf = add_deb_to_db(self, newfile, session) + binaries.append(b) + poolfiles.append(pf) # If this is a sourceful diff only upload that is moving # cross-component we need to copy the .orig files into the new @@ -2068,6 +2125,18 @@ distribution.""" # Our SQL session will automatically start a new transaction after # the last commit + # Now ensure that the metadata has been added + # This has to be done after we copy the files into the pool + # For source if we have it: + if self.pkg.changes["architecture"].has_key("source"): + import_metadata_into_db(source, session) + + # Now for any of our binaries + for b in binaries: + import_metadata_into_db(b, session) + + session.commit() + # Move the .changes into the 'done' directory utils.move(self.pkg.changes_file, os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file))) @@ -2488,12 +2557,10 @@ distribution.""" ################################################################################ def check_binary_against_db(self, filename, session): # Ensure version is sane - q = session.query(BinAssociation) - q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"]) - q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all'])) - - self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ], - filename, self.pkg.files[filename]["version"], sourceful=False) + self.cross_suite_version_check( \ + get_suite_version_by_package(self.pkg.files[filename]["package"], \ + self.pkg.files[filename]["architecture"], session), + filename, self.pkg.files[filename]["version"], sourceful=False) # Check for any existing copies of the file q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"]) @@ -2510,11 +2577,9 @@ distribution.""" version = self.pkg.dsc.get("version") # Ensure version is sane - q = session.query(SrcAssociation) - q = q.join(DBSource).filter(DBSource.source==source) - - self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ], - filename, version, sourceful=True) + self.cross_suite_version_check( \ + get_suite_version_by_source(source, session), filename, version, + sourceful=True) ################################################################################ def check_dsc_against_db(self, filename, session): @@ -2681,7 +2746,8 @@ distribution.""" source_version = entry["source version"] source_package = entry["source package"] if not self.pkg.changes["architecture"].has_key("source") \ - and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session): + and not source_exists(source_package, source_version, \ + suites = self.pkg.changes["distribution"].keys(), session = session): source_epochless_version = re_no_epoch.sub('', source_version) dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version) found = False @@ -2728,7 +2794,9 @@ distribution.""" source_version = entry["source version"] source_package = entry["source package"] if not self.pkg.changes["architecture"].has_key("source") \ - and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()): + and not source_exists(source_package, source_version, \ + suites = self.pkg.changes["distribution"].keys(), \ + session = session): self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile)) # Version and file overwrite checks