X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Farchive.py;h=c2cc8392777ce9750e501b6a6e01bb308f301f4a;hb=31de75724c3cf43d635c55cd18953329a87032b1;hp=6b4d0dce853b92188b483fe4b18115641f9a61db;hpb=d9e994b6ff8a8d57062aaed322396614112c83ac;p=dak.git diff --git a/daklib/archive.py b/daklib/archive.py index 6b4d0dce..c2cc8392 100644 --- a/daklib/archive.py +++ b/daklib/archive.py @@ -49,12 +49,18 @@ class ArchiveTransaction(object): self.fs = FilesystemTransaction() self.session = DBConn().session() - def get_file(self, hashed_file, source_name): + def get_file(self, hashed_file, source_name, check_hashes=True): """Look for file C{hashed_file} in database @type hashed_file: L{daklib.upload.HashedFile} @param hashed_file: file to look for in the database + @type source_name: str + @param source_name: source package name + + @type check_hashes: bool + @param check_hashes: check size and hashes match + @raise KeyError: file was not found in the database @raise HashMismatchException: hash mismatch @@ -64,7 +70,10 @@ class ArchiveTransaction(object): poolname = os.path.join(utils.poolify(source_name), hashed_file.filename) try: poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one() - if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum: + if check_hashes and (poolfile.filesize != hashed_file.size + or poolfile.md5sum != hashed_file.md5sum + or poolfile.sha1sum != hashed_file.sha1sum + or poolfile.sha256sum != hashed_file.sha256sum): raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename)) return poolfile except NoResultFound: @@ -352,11 +361,7 @@ class ArchiveTransaction(object): # Uploaders are the maintainer and co-maintainers from the Uploaders field db_source.uploaders.append(maintainer) if 'Uploaders' in control: - def split_uploaders(field): - import re - for u in re.sub(">[ ]*,", ">\t", field).split("\t"): - yield u.strip() - + from daklib.textutils import split_uploaders for u in split_uploaders(control['Uploaders']): db_source.uploaders.append(get_or_set_maintainer(u, session)) session.flush() @@ -590,9 +595,24 @@ class ArchiveUpload(object): @type: bool """ + self._checked = False + """checks passes. set by C{check} + @type: bool + """ + self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one() self._new = self._new_queue.suite + def warn(self, message): + """add a warning message + + Adds a warning message that can later be seen in C{self.warnings} + + @type message: string + @param message: warning message + """ + self.warnings.append(message) + def prepare(self): """prepare upload for further processing @@ -613,11 +633,12 @@ class ArchiveUpload(object): cnf = Config() session = self.transaction.session - self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath')) + self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'), + mode=0o2750, group=cnf.unprivgroup) with FilesystemTransaction() as fs: src = os.path.join(self.original_directory, self.original_changes.filename) dst = os.path.join(self.directory, self.original_changes.filename) - fs.copy(src, dst) + fs.copy(src, dst, mode=0o640) self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings) @@ -626,7 +647,7 @@ class ArchiveUpload(object): dst = os.path.join(self.directory, f.filename) if not os.path.exists(src): continue - fs.copy(src, dst) + fs.copy(src, dst, mode=0o640) source = self.changes.source if source is not None: @@ -635,7 +656,7 @@ class ArchiveUpload(object): dst = os.path.join(self.directory, f.filename) if not os.path.exists(dst): try: - db_file = self.transaction.get_file(f, source.dsc['Source']) + db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False) db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first() fs.copy(db_archive_file.path, dst, symlink=True) except KeyError: @@ -663,7 +684,8 @@ class ArchiveUpload(object): sourcedir = os.path.join(self.directory, 'source') if not os.path.exists(sourcedir): - subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False) + devnull = open('/dev/null', 'w') + subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull) if not os.path.isdir(sourcedir): raise Exception("{0} is not a directory after extracting source package".format(sourcedir)) return sourcedir @@ -844,28 +866,36 @@ class ArchiveUpload(object): assert self.changes.valid_signature try: + # Validate signatures and hashes before we do any real work: for chk in ( checks.SignatureCheck, checks.ChangesCheck, - checks.TransitionCheck, - checks.UploadBlockCheck, checks.HashesCheck, + checks.ExternalHashesCheck, checks.SourceCheck, checks.BinaryCheck, checks.BinaryTimestampCheck, - checks.ACLCheck, checks.SingleDistributionCheck, - checks.NoSourceOnlyCheck, - checks.LintianCheck, ): chk().check(self) final_suites = self._final_suites() if len(final_suites) == 0: - self.reject_reasons.append('Ended with no suite to install to.') + self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.') return False + self.final_suites = final_suites + + for chk in ( + checks.TransitionCheck, + checks.ACLCheck, + checks.NoSourceOnlyCheck, + checks.LintianCheck, + ): + chk().check(self) + for chk in ( + checks.ACLCheck, checks.SourceFormatCheck, checks.SuiteArchitectureCheck, checks.VersionCheck, @@ -876,7 +906,7 @@ class ArchiveUpload(object): if len(self.reject_reasons) != 0: return False - self.final_suites = final_suites + self._checked = True return True except checks.Reject as e: self.reject_reasons.append(unicode(e)) @@ -930,7 +960,7 @@ class ArchiveUpload(object): if suite.copychanges: src = os.path.join(self.directory, self.changes.filename) dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename) - self.transaction.fs.copy(src, dst) + self.transaction.fs.copy(src, dst, mode=suite.archive.mode) return (db_source, db_binaries) @@ -978,7 +1008,7 @@ class ArchiveUpload(object): self.transaction.session.flush() dst = os.path.join(policy_queue.path, self.changes.filename) - self.transaction.fs.copy(self.changes.path, dst) + self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms) return u @@ -993,6 +1023,7 @@ class ArchiveUpload(object): assert len(self.reject_reasons) == 0 assert self.changes.valid_signature assert self.final_suites is not None + assert self._checked byhand = self.changes.byhand_files if len(byhand) == 0: @@ -1017,8 +1048,9 @@ class ArchiveUpload(object): package, version, archext = parts arch, ext = archext.split('.', 1) - rule = automatic_byhand_packages.get(package) - if rule is None: + try: + rule = automatic_byhand_packages.subtree(package) + except KeyError: remaining.append(f) continue @@ -1053,7 +1085,7 @@ class ArchiveUpload(object): src = os.path.join(self.directory, hashed_file.filename) dst = os.path.join(policy_queue.path, hashed_file.filename) - fs.copy(src, dst) + fs.copy(src, dst, mode=policy_queue.change_perms) return byhand_file @@ -1083,7 +1115,7 @@ class ArchiveUpload(object): for binary in self.changes.binaries: control = binary.control source_package, source_version = binary.source - line = " ".join([control['Package'], control['Version'], source_package, source_version]) + line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version]) print >>debinfo, line debinfo.close() @@ -1103,6 +1135,7 @@ class ArchiveUpload(object): assert len(self.reject_reasons) == 0 assert self.changes.valid_signature assert self.final_suites is not None + assert self._checked assert not self.new db_changes = self._install_changes() @@ -1118,10 +1151,12 @@ class ArchiveUpload(object): if policy_queue is not None: redirected_suite = policy_queue.suite + source_suites = self.session.query(Suite).filter(Suite.suite_id.in_([suite.suite_id, redirected_suite.suite_id])).subquery() + source_component_func = lambda source: self._source_override(overridesuite, source).component binary_component_func = lambda binary: self._binary_component(overridesuite, binary) - (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive]) + (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive]) if policy_queue is not None: self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries) @@ -1129,7 +1164,7 @@ class ArchiveUpload(object): # copy to build queues if policy_queue is None or policy_queue.send_to_build_queues: for build_queue in suite.copy_queues: - self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive]) + self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive]) self._do_bts_versiontracking() @@ -1150,16 +1185,22 @@ class ArchiveUpload(object): binaries = self.changes.binaries byhand = self.changes.byhand_files - new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one() - if len(byhand) > 0: - new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one() - new_suite = new_queue.suite - # we need a suite to guess components suites = list(self.final_suites) assert len(suites) == 1, "NEW uploads must be to a single suite" suite = suites[0] + # decide which NEW queue to use + if suite.new_queue is None: + new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one() + else: + new_queue = suite.new_queue + if len(byhand) > 0: + # There is only one global BYHAND queue + new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one() + new_suite = new_queue.suite + + def binary_component_func(binary): return self._binary_component(suite, binary, only_overrides=False) @@ -1176,8 +1217,9 @@ class ArchiveUpload(object): source_component_name = guess break if source_component_name is None: - raise Exception('Could not guess source component.') - source_component = self.session.query(Component).filter_by(component_name=source_component_name).one() + source_component = self.session.query(Component).order_by(Component.component_id).first() + else: + source_component = self.session.query(Component).filter_by(component_name=source_component_name).one() source_component_func = lambda source: source_component db_changes = self._install_changes()