-
- ###########################################################################
-
- def remove(self, dir=None):
- """
- Used (for instance) in p-u to remove the package from unchecked
- """
- if dir is None:
- os.chdir(self.pkg.directory)
- else:
- os.chdir(dir)
-
- for f in self.pkg.files.keys():
- os.unlink(f)
- os.unlink(self.pkg.changes_file)
-
- ###########################################################################
-
- def move_to_dir (self, dest, perms=0660, changesperms=0664):
- """
- Move files to dest with certain perms/changesperms
- """
- utils.move(self.pkg.changes_file, dest, perms=changesperms)
- for f in self.pkg.files.keys():
- utils.move(f, dest, perms=perms)
-
- ###########################################################################
-
- def force_reject(self, reject_files):
- """
- Forcefully move files from the current directory to the
- reject directory. If any file already exists in the reject
- directory it will be moved to the morgue to make way for
- the new file.
-
- @type files: dict
- @param files: file dictionary
-
- """
-
- cnf = Config()
-
- for file_entry in reject_files:
- # Skip any files which don't exist or which we don't have permission to copy.
- if os.access(file_entry, os.R_OK) == 0:
- continue
-
- dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
-
- try:
- dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
- except OSError, e:
- # File exists? Let's try and move it to the morgue
- if e.errno == errno.EEXIST:
- morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
- try:
- morgue_file = utils.find_next_free(morgue_file)
- except NoFreeFilenameError:
- # Something's either gone badly Pete Tong, or
- # someone is trying to exploit us.
- utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
- return
- utils.move(dest_file, morgue_file, perms=0660)
- try:
- dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- except OSError, e:
- # Likewise
- utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
- return
- else:
- raise
- # If we got here, we own the destination file, so we can
- # safely overwrite it.
- utils.move(file_entry, dest_file, 1, perms=0660)
- os.close(dest_fd)
-
- ###########################################################################
- def do_reject (self, manual=0, reject_message="", note=""):
- """
- Reject an upload. If called without a reject message or C{manual} is
- true, spawn an editor so the user can write one.
-
- @type manual: bool
- @param manual: manual or automated rejection
-
- @type reject_message: string
- @param reject_message: A reject message
-
- @return: 0
-
- """
- # If we weren't given a manual rejection message, spawn an
- # editor so the user can add one in...
- if manual and not reject_message:
- (fd, temp_filename) = utils.temp_filename()
- temp_file = os.fdopen(fd, 'w')
- if len(note) > 0:
- for line in note:
- temp_file.write(line)
- temp_file.close()
- editor = os.environ.get("EDITOR","vi")
- answer = 'E'
- while answer == 'E':
- os.system("%s %s" % (editor, temp_filename))
- temp_fh = utils.open_file(temp_filename)
- reject_message = "".join(temp_fh.readlines())
- temp_fh.close()
- print "Reject message:"
- print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
- prompt = "[R]eject, Edit, Abandon, Quit ?"
- answer = "XXX"
- while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = re_default_answer.search(prompt)
- if answer == "":
- answer = m.group(1)
- answer = answer[:1].upper()
- os.unlink(temp_filename)
- if answer == 'A':
- return 1
- elif answer == 'Q':
- sys.exit(0)
-
- print "Rejecting.\n"
-
- cnf = Config()
-
- reason_filename = self.pkg.changes_file[:-8] + ".reason"
- reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
-
- # Move all the files into the reject directory
- reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
- self.force_reject(reject_files)
-
- # If we fail here someone is probably trying to exploit the race
- # so let's just raise an exception ...
- if os.path.exists(reason_filename):
- os.unlink(reason_filename)
- reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
-
- rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
-
- if not manual:
- self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
- self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
- self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
- os.write(reason_fd, reject_message)
- reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
- else:
- # Build up the rejection email
- user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
- self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
- self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
- self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
- # Write the rejection email out as the <foo>.reason file
- os.write(reason_fd, reject_mail_message)
-
- del self.Subst["__REJECTOR_ADDRESS__"]
- del self.Subst["__MANUAL_REJECT_MESSAGE__"]
- del self.Subst["__CC__"]
-
- os.close(reason_fd)
-
- # Send the rejection mail if appropriate
- if not cnf["Dinstall::Options::No-Mail"]:
- utils.send_mail(reject_mail_message)
-
- if self.logger:
- self.logger.log(["rejected", self.pkg.changes_file])
-
- return 0
-
- ################################################################################
- def in_override_p(self, package, component, suite, binary_type, file, session):
- """
- Check if a package already has override entries in the DB
-
- @type package: string
- @param package: package name
-
- @type component: string
- @param component: database id of the component
-
- @type suite: int
- @param suite: database id of the suite
-
- @type binary_type: string
- @param binary_type: type of the package
-
- @type file: string
- @param file: filename we check
-
- @return: the database result. But noone cares anyway.
-
- """
-
- cnf = Config()
-
- if binary_type == "": # must be source
- file_type = "dsc"
- else:
- file_type = binary_type
-
- # Override suite name; used for example with proposed-updates
- if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
- suite = cnf["Suite::%s::OverrideSuite" % (suite)]
-
- result = get_override(package, suite, component, file_type, session)
-
- # If checking for a source package fall back on the binary override type
- if file_type == "dsc" and len(result) < 1:
- result = get_override(package, suite, component, ['deb', 'udeb'], session)
-
- # Remember the section and priority so we can check them later if appropriate
- if len(result) > 0:
- result = result[0]
- self.pkg.files[file]["override section"] = result.section.section
- self.pkg.files[file]["override priority"] = result.priority.priority
- return result
-
- return None
-
- ################################################################################
- def get_anyversion(self, sv_list, suite):
- """
- @type sv_list: list
- @param sv_list: list of (suite, version) tuples to check
-
- @type suite: string
- @param suite: suite name
-
- Description: TODO
- """
- Cnf = Config()
- anyversion = None
- anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
- for (s, v) in sv_list:
- if s in [ x.lower() for x in anysuite ]:
- if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
- anyversion = v
-
- return anyversion
-
- ################################################################################
-
- def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
- """
- @type sv_list: list
- @param sv_list: list of (suite, version) tuples to check
-
- @type file: string
- @param file: XXX
-
- @type new_version: string
- @param new_version: XXX
-
- Ensure versions are newer than existing packages in target
- suites and that cross-suite version checking rules as
- set out in the conf file are satisfied.
- """
-
- cnf = Config()
-
- # Check versions for each target suite
- for target_suite in self.pkg.changes["distribution"].keys():
- must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
- must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
-
- # Enforce "must be newer than target suite" even if conffile omits it
- if target_suite not in must_be_newer_than:
- must_be_newer_than.append(target_suite)
-
- for (suite, existent_version) in sv_list:
- vercmp = apt_pkg.VersionCompare(new_version, existent_version)
-
- if suite in must_be_newer_than and sourceful and vercmp < 1:
- self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
-
- if suite in must_be_older_than and vercmp > -1:
- cansave = 0
-
- if self.pkg.changes.get('distribution-version', {}).has_key(suite):
- # we really use the other suite, ignoring the conflicting one ...
- addsuite = self.pkg.changes["distribution-version"][suite]
-
- add_version = self.get_anyversion(sv_list, addsuite)
- target_version = self.get_anyversion(sv_list, target_suite)
-
- if not add_version:
- # not add_version can only happen if we map to a suite
- # that doesn't enhance the suite we're propup'ing from.
- # so "propup-ver x a b c; map a d" is a problem only if
- # d doesn't enhance a.
- #
- # i think we could always propagate in this case, rather
- # than complaining. either way, this isn't a REJECT issue
- #
- # And - we really should complain to the dorks who configured dak
- self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
- self.pkg.changes.setdefault("propdistribution", {})
- self.pkg.changes["propdistribution"][addsuite] = 1
- cansave = 1
- elif not target_version:
- # not targets_version is true when the package is NEW
- # we could just stick with the "...old version..." REJECT
- # for this, I think.
- self.rejects.append("Won't propogate NEW packages.")
- elif apt_pkg.VersionCompare(new_version, add_version) < 0:
- # propogation would be redundant. no need to reject though.
- self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
- cansave = 1
- elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
- apt_pkg.VersionCompare(add_version, target_version) >= 0:
- # propogate!!
- self.warnings.append("Propogating upload to %s" % (addsuite))
- self.pkg.changes.setdefault("propdistribution", {})
- self.pkg.changes["propdistribution"][addsuite] = 1
- cansave = 1
-
- if not cansave:
- self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
-
- ################################################################################
- def check_binary_against_db(self, file, session):
- # Ensure version is sane
- q = session.query(BinAssociation)
- q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
- q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
-
- self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
- file, self.pkg.files[file]["version"], sourceful=False)
-
- # Check for any existing copies of the file
- q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
- q = q.filter_by(version=self.pkg.files[file]["version"])
- q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
-
- if q.count() > 0:
- self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
-
- ################################################################################
-
- def check_source_against_db(self, file, session):
- """
- """
- source = self.pkg.dsc.get("source")
- version = self.pkg.dsc.get("version")
-
- # Ensure version is sane
- q = session.query(SrcAssociation)
- q = q.join(DBSource).filter(DBSource.source==source)
-
- self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
- file, version, sourceful=True)
-
- ################################################################################
- def check_dsc_against_db(self, file, session):
- """
-
- @warning: NB: this function can remove entries from the 'files' index [if
- the .orig.tar.gz is a duplicate of the one in the archive]; if
- you're iterating over 'files' and call this function as part of
- the loop, be sure to add a check to the top of the loop to
- ensure you haven't just tried to dereference the deleted entry.
-
- """
-
- Cnf = Config()
- self.pkg.orig_tar_gz = None
-
- # Try and find all files mentioned in the .dsc. This has
- # to work harder to cope with the multiple possible
- # locations of an .orig.tar.gz.
- # The ordering on the select is needed to pick the newest orig
- # when it exists in multiple places.
- for dsc_name, dsc_entry in self.pkg.dsc_files.items():
- found = None
- if self.pkg.files.has_key(dsc_name):
- actual_md5 = self.pkg.files[dsc_name]["md5sum"]
- actual_size = int(self.pkg.files[dsc_name]["size"])
- found = "%s in incoming" % (dsc_name)
-
- # Check the file does not already exist in the archive
- ql = get_poolfile_like_name(dsc_name, session)
-
- # Strip out anything that isn't '%s' or '/%s$'
- for i in ql:
- if not i.filename.endswith(dsc_name):
- ql.remove(i)
-
- # "[dak] has not broken them. [dak] has fixed a
- # brokenness. Your crappy hack exploited a bug in
- # the old dinstall.
- #
- # "(Come on! I thought it was always obvious that
- # one just doesn't release different files with
- # the same name and version.)"
- # -- ajk@ on d-devel@l.d.o
-
- if len(ql) > 0:
- # Ignore exact matches for .orig.tar.gz
- match = 0
- if dsc_name.endswith(".orig.tar.gz"):
- for i in ql:
- if self.pkg.files.has_key(dsc_name) and \
- int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
- self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
- self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
- # TODO: Don't delete the entry, just mark it as not needed
- # This would fix the stupidity of changing something we often iterate over
- # whilst we're doing it
- del self.pkg.files[dsc_name]
- self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
- match = 1
-
- if not match:
- self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
-
- elif dsc_name.endswith(".orig.tar.gz"):
- # Check in the pool
- ql = get_poolfile_like_name(dsc_name, session)
-
- # Strip out anything that isn't '%s' or '/%s$'
- # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
- for i in ql:
- if not i.filename.endswith(dsc_name):
- ql.remove(i)
-
- if len(ql) > 0:
- # Unfortunately, we may get more than one match here if,
- # for example, the package was in potato but had an -sa
- # upload in woody. So we need to choose the right one.
-
- # default to something sane in case we don't match any or have only one
- x = ql[0]
-
- if len(ql) > 1:
- for i in ql:
- old_file = os.path.join(i.location.path, i.filename)
- old_file_fh = utils.open_file(old_file)
- actual_md5 = apt_pkg.md5sum(old_file_fh)
- old_file_fh.close()
- actual_size = os.stat(old_file)[stat.ST_SIZE]
- if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
- x = i
-
- old_file = os.path.join(i.location.path, i.filename)
- old_file_fh = utils.open_file(old_file)
- actual_md5 = apt_pkg.md5sum(old_file_fh)
- old_file_fh.close()
- actual_size = os.stat(old_file)[stat.ST_SIZE]
- found = old_file
- suite_type = x.location.archive_type
- # need this for updating dsc_files in install()
- dsc_entry["files id"] = x.file_id
- # See install() in process-accepted...
- self.pkg.orig_tar_id = x.file_id
- self.pkg.orig_tar_gz = old_file
- self.pkg.orig_tar_location = x.location.location_id
- else:
- # TODO: Record the queues and info in the DB so we don't hardcode all this crap
- # Not there? Check the queue directories...
- for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
- if not Cnf.has_key("Dir::Queue::%s" % (directory)):
- continue
- in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
- if os.path.exists(in_otherdir):
- in_otherdir_fh = utils.open_file(in_otherdir)
- actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
- in_otherdir_fh.close()
- actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
- found = in_otherdir
- self.pkg.orig_tar_gz = in_otherdir
-
- if not found:
- self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
- self.pkg.orig_tar_gz = -1
- continue
- else:
- self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
- continue
- if actual_md5 != dsc_entry["md5sum"]:
- self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
- if actual_size != int(dsc_entry["size"]):
- self.rejects.append("size for %s doesn't match %s." % (found, file))
-
- ################################################################################
- def accepted_checks(self, overwrite_checks, session):
- # Recheck anything that relies on the database; since that's not
- # frozen between accept and our run time when called from p-a.
-
- # overwrite_checks is set to False when installing to stable/oldstable
-
- propogate={}
- nopropogate={}
-
- # Find the .dsc (again)
- dsc_filename = None
- for f in self.pkg.files.keys():
- if self.pkg.files[f]["type"] == "dsc":
- dsc_filename = f
-
- for checkfile in self.pkg.files.keys():
- # The .orig.tar.gz can disappear out from under us is it's a
- # duplicate of one in the archive.
- if not self.pkg.files.has_key(checkfile):
- continue
-
- entry = self.pkg.files[checkfile]
-
- # Check that the source still exists
- if entry["type"] == "deb":
- source_version = entry["source version"]
- source_package = entry["source package"]
- if not self.pkg.changes["architecture"].has_key("source") \
- and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
- self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
-
- # Version and file overwrite checks
- if overwrite_checks:
- if entry["type"] == "deb":
- self.check_binary_against_db(checkfile, session)
- elif entry["type"] == "dsc":
- self.check_source_against_db(checkfile, session)
- self.check_dsc_against_db(dsc_filename, session)
-
- # propogate in the case it is in the override tables:
- for suite in self.pkg.changes.get("propdistribution", {}).keys():
- if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
- propogate[suite] = 1
- else:
- nopropogate[suite] = 1
-
- for suite in propogate.keys():
- if suite in nopropogate:
- continue
- self.pkg.changes["distribution"][suite] = 1
-
- for checkfile in self.pkg.files.keys():
- # Check the package is still in the override tables
- for suite in self.pkg.changes["distribution"].keys():
- if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
- self.rejects.append("%s is NEW for %s." % (checkfile, suite))
-
- ################################################################################
- # This is not really a reject, but an unaccept, but since a) the code for
- # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
- # extremely rare, for now we'll go with whining at our admin folks...
-
- def do_unaccept(self):
- cnf = Config()
-
- self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
- self.Subst["__REJECT_MESSAGE__"] = self.package_info()
- self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
- self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
- if cnf.has_key("Dinstall::Bcc"):
- self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
-
- template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
-
- reject_mail_message = utils.TemplateSubst(self.Subst, template)
-
- # Write the rejection email out as the <foo>.reason file
- reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
- reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
-
- # If we fail here someone is probably trying to exploit the race
- # so let's just raise an exception ...
- if os.path.exists(reject_filename):
- os.unlink(reject_filename)
-
- fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- os.write(fd, reject_mail_message)
- os.close(fd)
-
- utils.send_mail(reject_mail_message)
-
- del self.Subst["__REJECTOR_ADDRESS__"]
- del self.Subst["__REJECT_MESSAGE__"]
- del self.Subst["__CC__"]
-
- ################################################################################
- # If any file of an upload has a recent mtime then chances are good
- # the file is still being uploaded.
-
- def upload_too_new(self):
- cnf = Config()
- too_new = False
- # Move back to the original directory to get accurate time stamps
- cwd = os.getcwd()
- os.chdir(self.pkg.directory)
- file_list = self.pkg.files.keys()
- file_list.extend(self.pkg.dsc_files.keys())
- file_list.append(self.pkg.changes_file)
- for f in file_list:
- try:
- last_modified = time.time()-os.path.getmtime(f)
- if last_modified < int(cnf["Dinstall::SkipTime"]):
- too_new = True
- break
- except:
- pass
-
- os.chdir(cwd)
- return too_new