byname = {}
byid = {}
q = projectB.query("SELECT id, uid, name FROM uid")
- for (id, uid, name) in q.getresult():
- byname[uid] = (id, name)
- byid[id] = (uid, name)
+ for (keyid, uid, name) in q.getresult():
+ byname[uid] = (keyid, name)
+ byid[keyid] = (uid, name)
return (byname, byid)
def get_fingerprint_info():
uid = entry["uid"][0]
name = get_ldap_name(entry)
fingerprints = entry["keyFingerPrint"]
- id = None
+ keyid = None
for f in fingerprints:
key = fpr_lookup.get(f, None)
if key not in keys: continue
keys[key]["uid"] = uid
- if id != None: continue
- id = database.get_or_set_uid_id(uid)
- byuid[id] = (uid, name)
- byname[uid] = (id, name)
+ if keyid != None: continue
+ keyid = database.get_or_set_uid_id(uid)
+ byuid[keyid] = (uid, name)
+ byname[uid] = (keyid, name)
return (byname, byuid)
keys[x]["uid"] = format % "invalid-uid"
else:
uid = format % keys[x]["email"]
- id = database.get_or_set_uid_id(uid)
- byuid[id] = (uid, keys[x]["name"])
- byname[uid] = (id, keys[x]["name"])
+ keyid = database.get_or_set_uid_id(uid)
+ byuid[keyid] = (uid, keys[x]["name"])
+ byname[uid] = (keyid, keys[x]["name"])
keys[x]["uid"] = uid
if any_invalid:
uid = format % "invalid-uid"
- id = database.get_or_set_uid_id(uid)
- byuid[id] = (uid, "ungeneratable user id")
- byname[uid] = (id, "ungeneratable user id")
+ keyid = database.get_or_set_uid_id(uid)
+ byuid[keyid] = (uid, "ungeneratable user id")
+ byname[uid] = (keyid, "ungeneratable user id")
return (byname, byuid)
################################################################################
(db_uid_byname, db_uid_byid) = get_uid_info()
### Update full names of applicable users
- for id in desuid_byid.keys():
- uid = (id, desuid_byid[id][0])
- name = desuid_byid[id][1]
- oname = db_uid_byid[id][1]
+ for keyid in desuid_byid.keys():
+ uid = (keyid, desuid_byid[keyid][0])
+ name = desuid_byid[keyid][1]
+ oname = db_uid_byid[keyid][1]
if name and oname != name:
changes.append((uid[1], "Full name: %s" % (name)))
projectB.query("UPDATE uid SET name = '%s' WHERE id = %s" %
- (pg.escape_string(name), id))
+ (pg.escape_string(name), keyid))
# The fingerprint table (fpr) points to a uid and a keyring.
# If the uid is being decided here (ldap/generate) we set it to it.
fpr = {}
for z in keyring.keys.keys():
- id = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0]
- if id == None:
- id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
+ keyid = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0]
+ if keyid == None:
+ keyid = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
for y in keyring.keys[z]["fingerprints"]:
- fpr[y] = (id,keyring_id)
+ fpr[y] = (keyid,keyring_id)
# For any keys that used to be in this keyring, disassociate them.
# We don't change the uid, leaving that for historical info; if
output = utils.open_file(filename, "w")
# Generate the final list of files
files = {}
- for id in list:
- path = packages[id]["path"]
- filename = packages[id]["filename"]
- file_id = packages[id]["file_id"]
+ for fileid in list:
+ path = packages[fileid]["path"]
+ filename = packages[fileid]["filename"]
+ file_id = packages[fileid]["file_id"]
if suite == "stable" and dislocated_files.has_key(file_id):
filename = dislocated_files[file_id]
else:
keys = files.keys()
keys.sort()
# Write the list of files out
- for file in keys:
- output.write(file+'\n')
+ for outfile in keys:
+ output.write(outfile+'\n')
output.close()
############################################################
output = utils.open_file(filename, "w")
# Generate the final list of files
files = {}
- for id in list:
- path = packages[id]["path"]
- filename = packages[id]["filename"]
- file_id = packages[id]["file_id"]
- pkg = packages[id]["pkg"]
+ for fileid in list:
+ path = packages[fileid]["path"]
+ filename = packages[fileid]["filename"]
+ file_id = packages[fileid]["file_id"]
+ pkg = packages[fileid]["pkg"]
if suite == "stable" and dislocated_files.has_key(file_id):
filename = dislocated_files[file_id]
else:
suite = packages[unique_id]["suite"]
component = packages[unique_id]["component"]
arch = packages[unique_id]["arch"]
- type = packages[unique_id]["type"]
+ packagetype = packages[unique_id]["type"]
d.setdefault(suite, {})
d[suite].setdefault(component, {})
d[suite][component].setdefault(arch, {})
- d[suite][component][arch].setdefault(type, [])
- d[suite][component][arch][type].append(unique_id)
+ d[suite][component][arch].setdefault(packagetype, [])
+ d[suite][component][arch][packagetype].append(unique_id)
# Flesh out the index
if not Options["Suite"]:
suites = Cnf.SubTree("Suite").List()
else:
components = utils.split_args(Options["Component"])
udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite))
- udeb_components = udeb_components
for component in components:
d[suite].setdefault(component, {})
if component in udeb_components:
types = [ "dsc" ]
else:
types = binary_types
- for type in types:
- d[suite][component][arch].setdefault(type, [])
+ for packagetype in types:
+ d[suite][component][arch].setdefault(packagetype, [])
# Then walk it
for suite in d.keys():
if Cnf.has_key("Suite::%s::Components" % (suite)):
for arch in d[suite][component].keys():
if arch == "all":
continue
- for type in d[suite][component][arch].keys():
- list = d[suite][component][arch][type]
+ for packagetype in d[suite][component][arch].keys():
+ filelist = d[suite][component][arch][packagetype]
# If it's a binary, we need to add in the arch: all debs too
if arch != "source":
archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
if archall_suite:
- list.extend(d[archall_suite][component]["all"][type])
+ filelist.extend(d[archall_suite][component]["all"][packagetype])
elif d[suite][component].has_key("all") and \
- d[suite][component]["all"].has_key(type):
- list.extend(d[suite][component]["all"][type])
- write_filelist(suite, component, arch, type, list,
+ d[suite][component]["all"].has_key(packagetype):
+ filelist.extend(d[suite][component]["all"][packagetype])
+ write_filelist(suite, component, arch, packagetype, filelist,
packages, dislocated_files)
else: # legacy-mixed suite
- list = []
+ filelist = []
for component in d[suite].keys():
for arch in d[suite][component].keys():
- for type in d[suite][component][arch].keys():
- list.extend(d[suite][component][arch][type])
- write_legacy_mixed_filelist(suite, list, packages, dislocated_files)
+ for packagetype in d[suite][component][arch].keys():
+ filelist.extend(d[suite][component][arch][packagetype])
+ write_legacy_mixed_filelist(suite, filelist, packages, dislocated_files)
################################################################################
packages = {}
unique_id = 0
for i in ql:
- (id, pkg, arch, version, path, filename, component, file_id, suite, type) = i
+ (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i
# 'id' comes from either 'binaries' or 'source', so it's not unique
unique_id += 1
- packages[unique_id] = Dict(id=id, pkg=pkg, arch=arch, version=version,
+ packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version,
path=path, filename=filename,
component=component, file_id=file_id,
- suite=suite, type = type)
+ suite=suite, filetype = filetype)
cleanup(packages)
write_filelists(packages, dislocated_files)
def check():
propogate={}
nopropogate={}
- for file in files.keys():
+ for checkfile in files.keys():
# The .orig.tar.gz can disappear out from under us is it's a
# duplicate of one in the archive.
- if not files.has_key(file):
+ if not files.has_key(checkfile):
continue
# Check that the source still exists
- if files[file]["type"] == "deb":
- source_version = files[file]["source version"]
- source_package = files[file]["source package"]
+ if files[checkfile]["type"] == "deb":
+ source_version = files[checkfile]["source version"]
+ source_package = files[checkfile]["source package"]
if not changes["architecture"].has_key("source") \
and not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
- reject("no source found for %s %s (%s)." % (source_package, source_version, file))
+ reject("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
# Version and file overwrite checks
if not installing_to_stable:
- if files[file]["type"] == "deb":
- reject(Upload.check_binary_against_db(file), "")
- elif files[file]["type"] == "dsc":
- reject(Upload.check_source_against_db(file), "")
- (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(file)
+ if files[checkfile]["type"] == "deb":
+ reject(Upload.check_binary_against_db(checkfile), "")
+ elif files[checkfile]["type"] == "dsc":
+ reject(Upload.check_source_against_db(checkfile), "")
+ (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(checkfile)
reject(reject_msg, "")
# propogate in the case it is in the override tables:
if changes.has_key("propdistribution"):
for suite in changes["propdistribution"].keys():
- if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
+ if Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile):
propogate[suite] = 1
else:
nopropogate[suite] = 1
continue
changes["distribution"][suite] = 1
- for file in files.keys():
+ for checkfile in files.keys():
# Check the package is still in the override tables
for suite in changes["distribution"].keys():
- if not Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
- reject("%s is NEW for %s." % (file, suite))
+ if not Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile):
+ reject("%s is NEW for %s." % (checkfile, suite))
###############################################################################
return
# Add the .dsc file to the DB
- for file in files.keys():
- if files[file]["type"] == "dsc":
+ for newfile in files.keys():
+ if files[newfile]["type"] == "dsc":
package = dsc["source"]
version = dsc["version"] # NB: not files[file]["version"], that has no epoch
maintainer = dsc["maintainer"]
changedby_id = database.get_or_set_maintainer_id(changedby)
fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"])
install_date = time.strftime("%Y-%m-%d")
- filename = files[file]["pool name"] + file
- dsc_component = files[file]["component"]
- dsc_location_id = files[file]["location id"]
+ filename = files[newfile]["pool name"] + newfile
+ dsc_component = files[newfile]["component"]
+ dsc_location_id = files[newfile]["location id"]
if dsc.has_key("dm-upload-allowed") and dsc["dm-upload-allowed"] == "yes":
dm_upload_allowed = "true"
else:
dm_upload_allowed = "false"
- if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+ if not files[newfile].has_key("files id") or not files[newfile]["files id"]:
+ files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], dsc_location_id)
projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)"
- % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed))
+ % (package, version, maintainer_id, changedby_id, files[newfile]["files id"], install_date, fingerprint_id, dm_upload_allowed))
for suite in changes["distribution"].keys():
suite_id = database.get_suite_id(suite)
projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id))
# Add the source files to the DB (files and dsc_files)
- projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"]))
+ projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[newfile]["files id"]))
for dsc_file in dsc_files.keys():
- filename = files[file]["pool name"] + dsc_file
+ filename = files[newfile]["pool name"] + dsc_file
# If the .orig.tar.gz is already in the pool, it's
# files id is stored in dsc_files by check_dsc().
files_id = dsc_files[dsc_file].get("files id", None)
# Add the .deb files to the DB
- for file in files.keys():
- if files[file]["type"] == "deb":
- package = files[file]["package"]
- version = files[file]["version"]
- maintainer = files[file]["maintainer"]
+ for newfile in files.keys():
+ if files[newfile]["type"] == "deb":
+ package = files[newfile]["package"]
+ version = files[newfile]["version"]
+ maintainer = files[newfile]["maintainer"]
maintainer = maintainer.replace("'", "\\'")
maintainer_id = database.get_or_set_maintainer_id(maintainer)
fingerprint_id = database.get_or_set_fingerprint_id(changes["fingerprint"])
- architecture = files[file]["architecture"]
+ architecture = files[newfile]["architecture"]
architecture_id = database.get_architecture_id (architecture)
- type = files[file]["dbtype"]
- source = files[file]["source package"]
- source_version = files[file]["source version"]
- filename = files[file]["pool name"] + file
- if not files[file].has_key("location id") or not files[file]["location id"]:
- files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
- if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], files[file]["location id"])
+ filetype = files[newfile]["dbtype"]
+ source = files[newfile]["source package"]
+ source_version = files[newfile]["source version"]
+ filename = files[newfile]["pool name"] + newfile
+ if not files[newfile].has_key("location id") or not files[newfile]["location id"]:
+ files[newfile]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[newfile]["component"],utils.where_am_i())
+ if not files[newfile].has_key("files id") or not files[newfile]["files id"]:
+ files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], files[newfile]["location id"])
source_id = database.get_source_id (source, source_version)
if source_id:
projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
- % (package, version, maintainer_id, source_id, architecture_id, files[file]["files id"], type, fingerprint_id))
+ % (package, version, maintainer_id, source_id, architecture_id, files[newfile]["files id"], filetype, fingerprint_id))
else:
- raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, file, type, sig_fpr)
+ raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, newfile, filetype, changes["fingerprint"])
for suite in changes["distribution"].keys():
suite_id = database.get_suite_id(suite)
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
continue
# First move the files to the new location
legacy_filename = qid["path"] + qid["filename"]
- pool_location = utils.poolify (changes["source"], files[file]["component"])
+ pool_location = utils.poolify (changes["source"], files[newfile]["component"])
pool_filename = pool_location + os.path.basename(qid["filename"])
destination = Cnf["Dir::Pool"] + pool_location
utils.move(legacy_filename, destination)
projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, database.get_source_id(changes["source"], changes["version"]), orig_tar_id))
# Install the files into the pool
- for file in files.keys():
- destination = Cnf["Dir::Pool"] + files[file]["pool name"] + file
- utils.move(file, destination)
- Logger.log(["installed", file, files[file]["type"], files[file]["size"], files[file]["architecture"]])
- install_bytes += float(files[file]["size"])
+ for newfile in files.keys():
+ destination = Cnf["Dir::Pool"] + files[newfile]["pool name"] + newfile
+ utils.move(newfile, destination)
+ Logger.log(["installed", newfile, files[newfile]["type"], files[newfile]["size"], files[newfile]["architecture"]])
+ install_bytes += float(files[newfile]["size"])
# Copy the .changes file across for suite which need it.
copy_changes = {}
dest_dir = Cnf["Dir::QueueBuild"]
if Cnf.FindB("Dinstall::SecurityQueueBuild"):
dest_dir = os.path.join(dest_dir, suite)
- for file in files.keys():
- dest = os.path.join(dest_dir, file)
+ for newfile in files.keys():
+ dest = os.path.join(dest_dir, newfile)
# Remove it from the list of packages for later processing by apt-ftparchive
projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, dest, suite_id))
if not Cnf.FindB("Dinstall::SecurityQueueBuild"):
# Update the symlink to point to the new location in the pool
- pool_location = utils.poolify (changes["source"], files[file]["component"])
- src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(file))
+ pool_location = utils.poolify (changes["source"], files[newfile]["component"])
+ src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
if os.path.islink(dest):
os.unlink(dest)
os.symlink(src, dest)
projectB.query("BEGIN WORK")
# Add the source to stable (and remove it from proposed-updates)
- for file in files.keys():
- if files[file]["type"] == "dsc":
+ for newfile in files.keys():
+ if files[newfile]["type"] == "dsc":
package = dsc["source"]
version = dsc["version"]; # NB: not files[file]["version"], that has no epoch
q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id))
# Add the binaries to stable (and remove it/them from proposed-updates)
- for file in files.keys():
- if files[file]["type"] == "deb":
- package = files[file]["package"]
- version = files[file]["version"]
- architecture = files[file]["architecture"]
+ for newfile in files.keys():
+ if files[newfile]["type"] == "deb":
+ package = files[newfile]["package"]
+ version = files[newfile]["version"]
+ architecture = files[newfile]["architecture"]
q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture))
ql = q.getresult()
if not ql:
os.unlink (new_changelog_filename)
new_changelog = utils.open_file(new_changelog_filename, 'w')
- for file in files.keys():
- if files[file]["type"] == "deb":
- new_changelog.write("stable/%s/binary-%s/%s\n" % (files[file]["component"], files[file]["architecture"], file))
- elif utils.re_issource.match(file):
- new_changelog.write("stable/%s/source/%s\n" % (files[file]["component"], file))
+ for newfile in files.keys():
+ if files[newfile]["type"] == "deb":
+ new_changelog.write("stable/%s/binary-%s/%s\n" % (files[newfile]["component"], files[newfile]["architecture"], newfile))
+ elif utils.re_issource.match(newfile):
+ new_changelog.write("stable/%s/source/%s\n" % (files[newfile]["component"], newfile))
else:
- new_changelog.write("%s\n" % (file))
+ new_changelog.write("%s\n" % (newfile))
chop_changes = queue.re_fdnic.sub("\n", changes["changes"])
new_changelog.write(chop_changes + '\n\n')
if os.access(changelog_filename, os.R_OK) != 0: