- changes = changesfiles.keys()
- return null_adv_changes
-
-def load_adv_changes():
- global srcverarches, changes
-
- for c in os.listdir("."):
- if not c.endswith(".changes"): continue
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
- if "adv id" not in Upload.pkg.changes:
- continue
- if Upload.pkg.changes["adv id"] != advisory:
- continue
-
- if c not in changes: changes.append(c)
- srcver = "%s %s" % (Upload.pkg.changes["source"],
- Upload.pkg.changes["version"])
- srcverarches.setdefault(srcver, {})
- for arch in Upload.pkg.changes["architecture"].keys():
- srcverarches[srcver][arch] = 1
-
-def advisory_info():
- if advisory != None:
- print "Advisory: %s" % (advisory)
- print "Changes:"
- for c in changes:
- print " %s" % (c)
-
- print "Packages:"
- svs = srcverarches.keys()
- svs.sort()
- for sv in svs:
- as = srcverarches[sv].keys()
- as.sort()
- print " %s (%s)" % (sv, ", ".join(as))
-
-def prompt(opts, default):
- p = ""
- v = {}
- for o in opts:
- v[o[0].upper()] = o
- if o[0] == default:
- p += ", [%s]%s" % (o[0], o[1:])
- else:
- p += ", " + o
- p = p[2:] + "? "
- a = None
-
- if Options["Automatic"]:
- a = default
-
- while a not in v:
- a = utils.our_raw_input(p) + default
- a = a[:1].upper()
-
- return v[a]
-
-def add_changes(extras):
- for c in extras:
- changes.append(c)
- Upload.init_vars()
- Upload.pkg.changes_file = c
- Upload.update_vars()
- srcver = "%s %s" % (Upload.pkg.changes["source"], Upload.pkg.changes["version"])
- srcverarches.setdefault(srcver, {})
- for arch in Upload.pkg.changes["architecture"].keys():
- srcverarches[srcver][arch] = 1
- Upload.pkg.changes["adv id"] = advisory
- Upload.dump_vars(os.getcwd())
-
-def yes_no(prompt):
- if Options["Automatic"]: return True
- while 1:
- answer = utils.our_raw_input(prompt + " ").lower()
- if answer in "yn":
- return answer == "y"
- print "Invalid answer; please try again."
-
-def do_upload():
- if Options["No-Upload"]:
- print "Not uploading as requested"
- elif Options["Foreground-Upload"]:
- actually_upload(changes)
- else:
- child = os.fork()
- if child == 0:
- actually_upload(changes)
- os._exit(0)
- print "Uploading in the background"
-
-def actually_upload(changes_files):
- file_list = ""
- suites = {}
- component_mapping = {}
- for component in Cnf.SubTree("Security-Install::ComponentMappings").List():
- component_mapping[component] = Cnf["Security-Install::ComponentMappings::%s" % (component)]
- uploads = {}; # uploads[uri] = file_list
- changesfiles = {}; # changesfiles[uri] = file_list
- package_list = {} # package_list[source_name][version]
- changes_files.sort(utils.changes_compare)
- for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file)
- # Reset variables
- components = {}
- upload_uris = {}
- file_list = []
- Upload.init_vars()
- # Parse the .dak file for the .changes file
- Upload.pkg.changes_file = changes_file
- Upload.update_vars()
- files = Upload.pkg.files
- changes = Upload.pkg.changes
- dsc = Upload.pkg.dsc
- # Build the file list for this .changes file
- for file in files.keys():
- poolname = os.path.join(Cnf["Dir::Root"], Cnf["Dir::PoolRoot"],
- utils.poolify(changes["source"], files[file]["component"]),
- file)
- file_list.append(poolname)
- orig_component = files[file].get("original component", files[file]["component"])
- components[orig_component] = ""
- # Determine the upload uri for this .changes file
- for component in components.keys():
- upload_uri = component_mapping.get(component)
- if upload_uri:
- upload_uris[upload_uri] = ""
- num_upload_uris = len(upload_uris.keys())
- if num_upload_uris == 0:
- utils.fubar("%s: No valid upload URI found from components (%s)."
- % (changes_file, ", ".join(components.keys())))
- elif num_upload_uris > 1:
- utils.fubar("%s: more than one upload URI (%s) from components (%s)."
- % (changes_file, ", ".join(upload_uris.keys()),
- ", ".join(components.keys())))
- upload_uri = upload_uris.keys()[0]
- # Update the file list for the upload uri
- if not uploads.has_key(upload_uri):
- uploads[upload_uri] = []
- uploads[upload_uri].extend(file_list)
- # Update the changes list for the upload uri
- if not changesfiles.has_key(upload_uri):
- changesfiles[upload_uri] = []
- changesfiles[upload_uri].append(changes_file)
- # Remember the suites and source name/version
- for suite in changes["distribution"].keys():
- suites[suite] = ""
- # Remember the source name and version
- if changes["architecture"].has_key("source") and \
- changes["distribution"].has_key("testing"):
- if not package_list.has_key(dsc["source"]):
- package_list[dsc["source"]] = {}
- package_list[dsc["source"]][dsc["version"]] = ""
-
- for uri in uploads.keys():
- uploads[uri].extend(changesfiles[uri])
- (host, path) = uri.split(":")
- # file_list = " ".join(uploads[uri])
- print "Moving files to UploadQueue"
- for filename in uploads[uri]:
- utils.copy(filename, Cnf["Dir::Upload"])
- # .changes files have already been moved to queue/done by p-a
- if not filename.endswith('.changes'):
- remove_from_buildd(suites, filename)
- #spawn("lftp -c 'open %s; cd %s; put %s'" % (host, path, file_list))
-
- if not Options["No-Action"]:
- filename = "%s/testing-processed" % (Cnf["Dir::Log"])
- file = utils.open_file(filename, 'a')
- for source in package_list.keys():
- for version in package_list[source].keys():
- file.write(" ".join([source, version])+'\n')
- file.close()
-
-def remove_from_buildd(suites, filename):
- """Check the buildd dir for each suite and remove the file if needed"""
- builddbase = Cnf["Dir::QueueBuild"]
- filebase = os.path.basename(filename)
- for s in suites:
- try:
- os.unlink(os.path.join(builddbase, s, filebase))
- except OSError, e:
- utils.warn("Problem removing %s from buildd queue %s [%s]" % (filebase, s, str(e)))
-
-
-def generate_advisory(template):
- global changes, advisory
-
- adv_packages = []
- updated_pkgs = {}; # updated_pkgs[distro][arch][file] = {path,md5,size}
-
- for arg in changes:
- arg = utils.validate_changes_file_arg(arg)
- Upload.pkg.changes_file = arg
- Upload.init_vars()
- Upload.update_vars()
-
- src = Upload.pkg.changes["source"]
- src_ver = "%s (%s)" % (src, Upload.pkg.changes["version"])
- if src_ver not in adv_packages:
- adv_packages.append(src_ver)
-
- suites = Upload.pkg.changes["distribution"].keys()
- for suite in suites:
- if not updated_pkgs.has_key(suite):
- updated_pkgs[suite] = {}
-
- files = Upload.pkg.files
- for file in files.keys():
- arch = files[file]["architecture"]
- md5 = files[file]["md5sum"]
- size = files[file]["size"]
- poolname = Cnf["Dir::PoolRoot"] + \
- utils.poolify(src, files[file]["component"])
- if arch == "source" and file.endswith(".dsc"):
- dscpoolname = poolname
- for suite in suites:
- if not updated_pkgs[suite].has_key(arch):
- updated_pkgs[suite][arch] = {}
- updated_pkgs[suite][arch][file] = {
- "md5": md5, "size": size, "poolname": poolname }
-
- dsc_files = Upload.pkg.dsc_files
- for file in dsc_files.keys():
- arch = "source"
- if not dsc_files[file].has_key("files id"):
- continue
-
- # otherwise, it's already in the pool and needs to be
- # listed specially
- md5 = dsc_files[file]["md5sum"]
- size = dsc_files[file]["size"]
- for suite in suites:
- if not updated_pkgs[suite].has_key(arch):
- updated_pkgs[suite][arch] = {}
- updated_pkgs[suite][arch][file] = {
- "md5": md5, "size": size, "poolname": dscpoolname }
-
- if os.environ.has_key("SUDO_UID"):
- whoami = long(os.environ["SUDO_UID"])
- else:
- whoami = os.getuid()
- whoamifull = pwd.getpwuid(whoami)
- username = whoamifull[4].split(",")[0]
-
- Subst = {
- "__ADVISORY__": advisory,
- "__WHOAMI__": username,
- "__DATE__": time.strftime("%B %d, %Y", time.gmtime(time.time())),
- "__PACKAGE__": ", ".join(adv_packages),
- "__DAK_ADDRESS__": Cnf["Dinstall::MyEmailAddress"]
- }
-
- if Cnf.has_key("Dinstall::Bcc"):
- Subst["__BCC__"] = "Bcc: %s" % (Cnf["Dinstall::Bcc"])
-
- adv = ""
- archive = Cnf["Archive::%s::PrimaryMirror" % (utils.where_am_i())]
- for suite in updated_pkgs.keys():
- ver = Cnf["Suite::%s::Version" % suite]
- if ver != "": ver += " "
- suite_header = "%s %s(%s)" % (Cnf["Dinstall::MyDistribution"],
- ver, suite)
- adv += "%s\n%s\n\n" % (suite_header, "-"*len(suite_header))
-
- arches = database.get_suite_architectures(suite)
- if "source" in arches:
- arches.remove("source")
- if "all" in arches:
- arches.remove("all")
- arches.sort()
-
- adv += "%s updates are available for %s.\n\n" % (
- suite.capitalize(), utils.join_with_commas_and(arches))
-
- for a in ["source", "all"] + arches:
- if not updated_pkgs[suite].has_key(a):
- continue
-
- if a == "source":
- adv += "Source archives:\n\n"
- elif a == "all":
- adv += "Architecture independent packages:\n\n"
- else:
- adv += "%s architecture (%s)\n\n" % (a,
- Cnf["Architectures::%s" % a])
-
- for file in updated_pkgs[suite][a].keys():
- adv += " http://%s/%s%s\n" % (
- archive, updated_pkgs[suite][a][file]["poolname"], file)
- adv += " Size/MD5 checksum: %8s %s\n" % (
- updated_pkgs[suite][a][file]["size"],
- updated_pkgs[suite][a][file]["md5"])
- adv += "\n"
- adv = adv.rstrip()
-
- Subst["__ADVISORY_TEXT__"] = adv
-
- adv = utils.TemplateSubst(Subst, template)
- return adv