-
-def reject (str, prefix="Rejected: "):
- global reject_message
- if str:
- reject_message += prefix + str + "\n"
-
-def recheck():
- global reject_message
- files = Upload.pkg.files
- reject_message = ""
-
- for f in files.keys():
- # The .orig.tar.gz can disappear out from under us is it's a
- # duplicate of one in the archive.
- if not files.has_key(f):
- continue
- # Check that the source still exists
- if files[f]["type"] == "deb":
- source_version = files[f]["source version"]
- source_package = files[f]["source package"]
- if not Upload.pkg.changes["architecture"].has_key("source") \
- and not Upload.source_exists(source_package, source_version, Upload.pkg.changes["distribution"].keys()):
- source_epochless_version = re_no_epoch.sub('', source_version)
- dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
- found = 0
- for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
- if Cnf.has_key("Dir::Queue::%s" % (q)):
- if os.path.exists(Cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
- found = 1
- if not found:
- reject("no source found for %s %s (%s)." % (source_package, source_version, f))
-
- # Version and file overwrite checks
- if files[f]["type"] == "deb":
- reject(Upload.check_binary_against_db(f), "")
- elif files[f]["type"] == "dsc":
- reject(Upload.check_source_against_db(f), "")
- (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(f)
- reject(reject_msg, "")
-
- if reject_message.find("Rejected") != -1:
- answer = "XXX"
- if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
- answer = 'S'
-
- print "REJECT\n" + reject_message,
- prompt = "[R]eject, Skip, Quit ?"
-
- while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = re_default_answer.match(prompt)
- if answer == "":
- answer = m.group(1)
- answer = answer[:1].upper()
-
- if answer == 'R':
- Upload.do_reject(0, reject_message)
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
- return 0
- elif answer == 'S':
- return 0
- elif answer == 'Q':
- end()
- sys.exit(0)
-
- return 1
-
-################################################################################
-
-def indiv_sg_compare (a, b):
- """Sort by source name, source, version, 'have source', and
- finally by filename."""
- # Sort by source version
- q = apt_pkg.VersionCompare(a["version"], b["version"])
- if q:
- return -q
-
- # Sort by 'have source'
- a_has_source = a["architecture"].get("source")
- b_has_source = b["architecture"].get("source")
- if a_has_source and not b_has_source:
- return -1
- elif b_has_source and not a_has_source:
- return 1
-
- return cmp(a["filename"], b["filename"])
-
-############################################################
-
-def sg_compare (a, b):
- a = a[1]
- b = b[1]
- """Sort by have note, source already in database and time of oldest upload."""
- # Sort by have note
- a_note_state = a["note_state"]
- b_note_state = b["note_state"]
- if a_note_state < b_note_state:
- return -1
- elif a_note_state > b_note_state:
- return 1
- # Sort by source already in database (descending)
- source_in_database = cmp(a["source_in_database"], b["source_in_database"])
- if source_in_database:
- return -source_in_database
-
- # Sort by time of oldest upload
- return cmp(a["oldest"], b["oldest"])
-
-def sort_changes(changes_files):
- """Sort into source groups, then sort each source group by version,
- have source, filename. Finally, sort the source groups by have
- note, time of oldest upload of each source upload."""
- if len(changes_files) == 1:
- return changes_files
-
- sorted_list = []
- cache = {}
- # Read in all the .changes files
- for filename in changes_files:
- try:
- Upload.pkg.changes_file = filename
- Upload.init_vars()
- Upload.update_vars()
- cache[filename] = copy.copy(Upload.pkg.changes)
- cache[filename]["filename"] = filename
- except:
- sorted_list.append(filename)
- break
- # Divide the .changes into per-source groups
- per_source = {}
- for filename in cache.keys():
- source = cache[filename]["source"]
- if not per_source.has_key(source):
- per_source[source] = {}
- per_source[source]["list"] = []
- per_source[source]["list"].append(cache[filename])
- # Determine oldest time and have note status for each source group
- for source in per_source.keys():
- q = projectB.query("SELECT 1 FROM source WHERE source = '%s'" % source)
- ql = q.getresult()
- per_source[source]["source_in_database"] = len(ql)>0
- source_list = per_source[source]["list"]
- first = source_list[0]
- oldest = os.stat(first["filename"])[stat.ST_MTIME]
- have_note = 0
- for d in per_source[source]["list"]:
- mtime = os.stat(d["filename"])[stat.ST_MTIME]
- if mtime < oldest:
- oldest = mtime
- have_note += (database.has_new_comment(d["source"], d["version"]))
- per_source[source]["oldest"] = oldest
- if not have_note:
- per_source[source]["note_state"] = 0; # none
- elif have_note < len(source_list):
- per_source[source]["note_state"] = 1; # some
- else:
- per_source[source]["note_state"] = 2; # all
- per_source[source]["list"].sort(indiv_sg_compare)
- per_source_items = per_source.items()
- per_source_items.sort(sg_compare)
- for i in per_source_items:
- for j in i[1]["list"]:
- sorted_list.append(j["filename"])
- return sorted_list
-