X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=7a49242284026ec39f12e5fe3d13928d46094819;hb=ec257c02a5d62fd27844c70814acd9616b24b4c8;hp=3ab87246d1601f1fc5dd52912293deb155f95b38;hpb=d74b701dca669286bcb68578c584e1de0080289b;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index 3ab87246..7a492422 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -78,6 +78,7 @@ def get_type(f, session): elif re_source_ext.match(f["type"]): file_type = "dsc" else: + file_type = f["type"] utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type)) # Validate the override type @@ -91,10 +92,13 @@ def get_type(f, session): # Determine what parts in a .changes are NEW -def determine_new(changes, files, warn=1): +def determine_new(filename, changes, files, warn=1, session = None): """ Determine what parts in a C{changes} file are NEW. + @type filename: str + @param filename: changes filename + @type changes: Upload.Pkg.changes dict @param changes: Changes dictionary @@ -108,15 +112,22 @@ def determine_new(changes, files, warn=1): @return: dictionary of NEW components. """ + # TODO: This should all use the database instead of parsing the changes + # file again new = {} + byhand = {} - session = DBConn().session() + dbchg = get_dbchange(filename, session) + if dbchg is None: + print "Warning: cannot find changes file in database; won't check byhand" # Build up a list of potentially new things for name, f in files.items(): - # Skip byhand elements -# if f["type"] == "byhand": -# continue + # Keep a record of byhand elements + if f["section"] == "byhand": + byhand[name] = 1 + continue + pkg = f["package"] priority = f["priority"] section = f["section"] @@ -151,19 +162,40 @@ def determine_new(changes, files, warn=1): # Fix up the list of target suites cnf = Config() for suite in changes["suite"].keys(): - override = cnf.Find("Suite::%s::OverrideSuite" % (suite)) - if override: - (olderr, newerr) = (get_suite(suite, session) == None, - get_suite(override, session) == None) - if olderr or newerr: - (oinv, newinv) = ("", "") - if olderr: oinv = "invalid " - if newerr: ninv = "invalid " - print "warning: overriding %ssuite %s to %ssuite %s" % ( - oinv, suite, ninv, override) - del changes["suite"][suite] - changes["suite"][override] = 1 + oldsuite = get_suite(suite, session) + if not oldsuite: + print "WARNING: Invalid suite %s found" % suite + continue + + if oldsuite.overridesuite: + newsuite = get_suite(oldsuite.overridesuite, session) + + if newsuite: + print "INFORMATION: Using overrides from suite %s instead of suite %s" % ( + oldsuite.overridesuite, suite) + del changes["suite"][suite] + changes["suite"][oldsuite.overridesuite] = 1 + else: + print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % ( + oldsuite.overridesuite, suite) + # Check for unprocessed byhand files + if dbchg is not None: + for b in byhand.keys(): + # Find the file entry in the database + found = False + for f in dbchg.files: + if f.filename == b: + found = True + # If it's processed, we can ignore it + if f.processed: + del byhand[b] + break + + if not found: + print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed" + + # Check for new stuff for suite in changes["suite"].keys(): for pkg in new.keys(): ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session) @@ -181,13 +213,11 @@ def determine_new(changes, files, warn=1): if new[pkg].has_key("othercomponents"): print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"]) - session.close() - - return new + return new, byhand ################################################################################ -def check_valid(new): +def check_valid(new, session = None): """ Check if section and priority for NEW packages exist in database. Additionally does sanity checks: @@ -204,13 +234,13 @@ def check_valid(new): priority_name = new[pkg]["priority"] file_type = new[pkg]["type"] - section = get_section(section_name) + section = get_section(section_name, session) if section is None: new[pkg]["section id"] = -1 else: new[pkg]["section id"] = section.section_id - priority = get_priority(priority_name) + priority = get_priority(priority_name, session) if priority is None: new[pkg]["priority id"] = -1 else: @@ -250,6 +280,96 @@ class TarTime(object): ############################################################################### +def prod_maintainer(notes, upload): + cnf = Config() + + # Here we prepare an editor and get them ready to prod... + (fd, temp_filename) = utils.temp_filename() + temp_file = os.fdopen(fd, 'w') + for note in notes: + temp_file.write(note.comment) + temp_file.close() + editor = os.environ.get("EDITOR","vi") + answer = 'E' + while answer == 'E': + os.system("%s %s" % (editor, temp_filename)) + temp_fh = utils.open_file(temp_filename) + prod_message = "".join(temp_fh.readlines()) + temp_fh.close() + print "Prod message:" + print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1) + prompt = "[P]rod, Edit, Abandon, Quit ?" + answer = "XXX" + while prompt.find(answer) == -1: + answer = utils.our_raw_input(prompt) + m = re_default_answer.search(prompt) + if answer == "": + answer = m.group(1) + answer = answer[:1].upper() + os.unlink(temp_filename) + if answer == 'A': + return + elif answer == 'Q': + end() + sys.exit(0) + # Otherwise, do the proding... + user_email_address = utils.whoami() + " <%s>" % ( + cnf["Dinstall::MyAdminAddress"]) + + Subst = upload.Subst + + Subst["__FROM_ADDRESS__"] = user_email_address + Subst["__PROD_MESSAGE__"] = prod_message + Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"] + + prod_mail_message = utils.TemplateSubst( + Subst,cnf["Dir::Templates"]+"/process-new.prod") + + # Send the prod mail + utils.send_mail(prod_mail_message) + + print "Sent prodding message" + +################################################################################ + +def edit_note(note, upload, session, trainee=False): + # Write the current data to a temporary file + (fd, temp_filename) = utils.temp_filename() + editor = os.environ.get("EDITOR","vi") + answer = 'E' + while answer == 'E': + os.system("%s %s" % (editor, temp_filename)) + temp_file = utils.open_file(temp_filename) + newnote = temp_file.read().rstrip() + temp_file.close() + print "New Note:" + print utils.prefix_multi_line_string(newnote," ") + prompt = "[D]one, Edit, Abandon, Quit ?" + answer = "XXX" + while prompt.find(answer) == -1: + answer = utils.our_raw_input(prompt) + m = re_default_answer.search(prompt) + if answer == "": + answer = m.group(1) + answer = answer[:1].upper() + os.unlink(temp_filename) + if answer == 'A': + return + elif answer == 'Q': + end() + sys.exit(0) + + comment = NewComment() + comment.package = upload.pkg.changes["source"] + comment.version = upload.pkg.changes["version"] + comment.comment = newnote + comment.author = utils.whoami() + comment.trainee = trainee + session.add(comment) + session.commit() + +############################################################################### + class Upload(object): """ Everything that has to do with an upload processed. @@ -332,8 +452,18 @@ class Upload(object): self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"] self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown") - if "sponsoremail" in self.pkg.changes: - self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"] + # Process policy doesn't set the fingerprint field and I don't want to make it + # do it for now as I don't want to have to deal with the case where we accepted + # the package into PU-NEW, but the fingerprint has gone away from the keyring in + # the meantime so the package will be remarked as rejectable. Urgh. + # TODO: Fix this properly + if self.pkg.changes.has_key('fingerprint'): + session = DBConn().session() + fpr = get_fingerprint(self.pkg.changes['fingerprint'], session) + if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name): + if self.pkg.changes.has_key("sponsoremail"): + self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"] + session.close() if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"): self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"]) @@ -346,6 +476,7 @@ class Upload(object): self.Subst["__REJECT_MESSAGE__"] = self.package_info() self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown") self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown") + self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"]) ########################################################################### def load_changes(self, filename): @@ -569,8 +700,8 @@ class Upload(object): architecture = control.Find("Architecture") upload_suite = self.pkg.changes["distribution"].keys()[0] - if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \ - and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]: + if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \ + and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]: self.rejects.append("Unknown architecture '%s'." % (architecture)) # Ensure the architecture of the .deb is one of the ones @@ -779,7 +910,7 @@ class Upload(object): location = cnf["Dir::Pool"] l = get_location(location, entry["component"], session=session) if l is None: - self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"]) + self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"]) entry["location id"] = -1 else: entry["location id"] = l.location_id @@ -1004,11 +1135,24 @@ class Upload(object): session = DBConn().session() self.check_source_against_db(dsc_filename, session) self.check_dsc_against_db(dsc_filename, session) - session.close() + + dbchg = get_dbchange(self.pkg.changes_file, session) # Finally, check if we're missing any files for f in self.later_check_files: - self.rejects.append("Could not find file %s references in changes" % f) + print 'XXX: %s' % f + # Check if we've already processed this file if we have a dbchg object + ok = False + if dbchg: + for pf in dbchg.files: + if pf.filename == f and pf.processed: + self.notes.append('%s was already processed so we can go ahead' % f) + ok = True + del self.pkg.files[f] + if not ok: + self.rejects.append("Could not find file %s references in changes" % f) + + session.close() return True @@ -1773,7 +1917,9 @@ distribution.""" self.Subst["__SHORT_SUMMARY__"] = short_summary for dist in self.pkg.changes["distribution"].keys(): - announce_list = cnf.Find("Suite::%s::Announce" % (dist)) + suite = get_suite(dist) + if suite is None: continue + announce_list = suite.announce if announce_list == "" or lists_done.has_key(announce_list): continue @@ -1910,10 +2056,9 @@ distribution.""" stats.accept_bytes += float(entry["size"]) # Copy the .changes file across for suite which need it. - copy_changes = {} - for suite_name in self.pkg.changes["distribution"].keys(): - if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)): - copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = "" + copy_changes = dict([(x.copychanges, '') + for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all() + if x.copychanges is not None]) for dest in copy_changes.keys(): utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest)) @@ -1931,7 +2076,6 @@ distribution.""" UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"]) self.update_subst() - self.Subst["__SUITE__"] = "" self.Subst["__SUMMARY__"] = summary mail_message = utils.TemplateSubst(self.Subst, os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')) @@ -2222,8 +2366,9 @@ distribution.""" file_type = binary_type # Override suite name; used for example with proposed-updates - if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "": - suite = cnf["Suite::%s::OverrideSuite" % (suite)] + oldsuite = get_suite(suite, session) + if (not oldsuite is None) and oldsuite.overridesuite: + suite = oldsuite.overridesuite result = get_override(package, suite, component, file_type, session) @@ -2612,45 +2757,6 @@ distribution.""" if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session): self.rejects.append("%s is NEW for %s." % (checkfile, suite)) - ################################################################################ - # This is not really a reject, but an unaccept, but since a) the code for - # that is non-trivial (reopen bugs, unannounce etc.), b) this should be - # extremely rare, for now we'll go with whining at our admin folks... - - def do_unaccept(self): - cnf = Config() - - self.update_subst() - self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] - self.Subst["__REJECT_MESSAGE__"] = self.package_info() - self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"] - self.Subst["__BCC__"] = "X-DAK: dak process-accepted" - if cnf.has_key("Dinstall::Bcc"): - self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"]) - - template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept") - - reject_mail_message = utils.TemplateSubst(self.Subst, template) - - # Write the rejection email out as the .reason file - reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason" - reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename) - - # If we fail here someone is probably trying to exploit the race - # so let's just raise an exception ... - if os.path.exists(reject_filename): - os.unlink(reject_filename) - - fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644) - os.write(fd, reject_mail_message) - os.close(fd) - - utils.send_mail(reject_mail_message) - - del self.Subst["__REJECTOR_ADDRESS__"] - del self.Subst["__REJECT_MESSAGE__"] - del self.Subst["__CC__"] - ################################################################################ # If any file of an upload has a recent mtime then chances are good # the file is still being uploaded.