From: Mark Hymers Date: Sat, 24 Jan 2009 19:13:26 +0000 (+0000) Subject: Merge commit 'ftpmaster/master' into regexes X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=cdffcfe2a0e6804ed35df56c4c2aa084c2b63677;hp=ba77095c326cc03fc5ab18423b874ff06e6fcdba;p=dak.git Merge commit 'ftpmaster/master' into regexes Conflicts: dak/make_maintainers.py dak/new_security_install.py dak/process_accepted.py Signed-off-by: Mark Hymers --- diff --git a/dak/dak.py b/dak/dak.py index e8a7df03..ad717ef3 100755 --- a/dak/dak.py +++ b/dak/dak.py @@ -152,8 +152,6 @@ def init(): "Generates Maintainers file for BTS etc"), ("make-overrides", "Generates override files"), - ("mirror-split", - "Split the pool/ by architecture groups"), ("poolize", "Move packages from dists/ to pool/"), ("reject-proposed-updates", diff --git a/dak/examine_package.py b/dak/examine_package.py index 347eb52f..70b1f3ca 100755 --- a/dak/examine_package.py +++ b/dak/examine_package.py @@ -440,11 +440,11 @@ def check_deb (suite, deb_filename): # Read a file, strip the signature and return the modified contents as # a string. def strip_pgp_signature (filename): - file = utils.open_file (filename) + inputfile = utils.open_file (filename) contents = "" inside_signature = 0 skip_next = 0 - for line in file.readlines(): + for line in inputfile.readlines(): if line[:-1] == "": continue if inside_signature: @@ -462,7 +462,7 @@ def strip_pgp_signature (filename): inside_signature = 0 continue contents += line - file.close() + inputfile.close() return contents def display_changes(suite, changes_filename): diff --git a/dak/generate_releases.py b/dak/generate_releases.py index 8d0a3d87..a1552451 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -235,8 +235,8 @@ def main (): for arch in AptCnf["tree::%s::Architectures" % (tree)].split(): if arch == "source": filepath = "%s/%s/Sources" % (sec, arch) - for file in compressnames("tree::%s" % (tree), "Sources", filepath): - files.append(file) + for cfile in compressnames("tree::%s" % (tree), "Sources", filepath): + files.append(cfile) add_tiffani(files, Cnf["Dir::Root"] + tree, filepath) else: disks = "%s/disks-%s" % (sec, arch) @@ -247,8 +247,8 @@ def main (): files.append("%s/%s/md5sum.txt" % (disks, dir)) filepath = "%s/binary-%s/Packages" % (sec, arch) - for file in compressnames("tree::%s" % (tree), "Packages", filepath): - files.append(file) + for cfile in compressnames("tree::%s" % (tree), "Packages", filepath): + files.append(cfile) add_tiffani(files, Cnf["Dir::Root"] + tree, filepath) if arch == "source": @@ -290,10 +290,10 @@ def main (): for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split(): if arch != "source": # always true - for file in compressnames("tree::%s/%s" % (tree,dis), + for cfile in compressnames("tree::%s/%s" % (tree,dis), "Packages", "%s/%s/binary-%s/Packages" % (dis, sec, arch)): - files.append(file) + files.append(cfile) elif AptCnf.has_key("tree::%s::FakeDI" % (tree)): usetree = AptCnf["tree::%s::FakeDI" % (tree)] sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0] @@ -302,14 +302,14 @@ def main (): for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split(): if arch != "source": # always true - for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)): - files.append(file) + for cfile in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)): + files.append(cfile) elif AptCnf.has_key("bindirectory::%s" % (tree)): - for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]): - files.append(file.replace(tree+"/","",1)) - for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]): - files.append(file.replace(tree+"/","",1)) + for cfile in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]): + files.append(cfile.replace(tree+"/","",1)) + for cfile in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]): + files.append(cfile.replace(tree+"/","",1)) else: print "ALERT: no tree/bindirectory for %s" % (tree) diff --git a/dak/import_keyring.py b/dak/import_keyring.py index 602eb374..c8013d6e 100755 --- a/dak/import_keyring.py +++ b/dak/import_keyring.py @@ -35,9 +35,9 @@ def get_uid_info(): byname = {} byid = {} q = projectB.query("SELECT id, uid, name FROM uid") - for (id, uid, name) in q.getresult(): - byname[uid] = (id, name) - byid[id] = (uid, name) + for (keyid, uid, name) in q.getresult(): + byname[uid] = (keyid, name) + byid[keyid] = (uid, name) return (byname, byid) def get_fingerprint_info(): @@ -131,16 +131,16 @@ class Keyring: uid = entry["uid"][0] name = get_ldap_name(entry) fingerprints = entry["keyFingerPrint"] - id = None + keyid = None for f in fingerprints: key = fpr_lookup.get(f, None) if key not in keys: continue keys[key]["uid"] = uid - if id != None: continue - id = database.get_or_set_uid_id(uid) - byuid[id] = (uid, name) - byname[uid] = (id, name) + if keyid != None: continue + keyid = database.get_or_set_uid_id(uid) + byuid[keyid] = (uid, name) + byname[uid] = (keyid, name) return (byname, byuid) @@ -155,15 +155,15 @@ class Keyring: keys[x]["uid"] = format % "invalid-uid" else: uid = format % keys[x]["email"] - id = database.get_or_set_uid_id(uid) - byuid[id] = (uid, keys[x]["name"]) - byname[uid] = (id, keys[x]["name"]) + keyid = database.get_or_set_uid_id(uid) + byuid[keyid] = (uid, keys[x]["name"]) + byname[uid] = (keyid, keys[x]["name"]) keys[x]["uid"] = uid if any_invalid: uid = format % "invalid-uid" - id = database.get_or_set_uid_id(uid) - byuid[id] = (uid, "ungeneratable user id") - byname[uid] = (id, "ungeneratable user id") + keyid = database.get_or_set_uid_id(uid) + byuid[keyid] = (uid, "ungeneratable user id") + byname[uid] = (keyid, "ungeneratable user id") return (byname, byuid) ################################################################################ @@ -237,14 +237,14 @@ def main(): (db_uid_byname, db_uid_byid) = get_uid_info() ### Update full names of applicable users - for id in desuid_byid.keys(): - uid = (id, desuid_byid[id][0]) - name = desuid_byid[id][1] - oname = db_uid_byid[id][1] + for keyid in desuid_byid.keys(): + uid = (keyid, desuid_byid[keyid][0]) + name = desuid_byid[keyid][1] + oname = db_uid_byid[keyid][1] if name and oname != name: changes.append((uid[1], "Full name: %s" % (name))) projectB.query("UPDATE uid SET name = '%s' WHERE id = %s" % - (pg.escape_string(name), id)) + (pg.escape_string(name), keyid)) # The fingerprint table (fpr) points to a uid and a keyring. # If the uid is being decided here (ldap/generate) we set it to it. @@ -254,11 +254,11 @@ def main(): fpr = {} for z in keyring.keys.keys(): - id = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0] - if id == None: - id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0] + keyid = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0] + if keyid == None: + keyid = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0] for y in keyring.keys[z]["fingerprints"]: - fpr[y] = (id,keyring_id) + fpr[y] = (keyid,keyring_id) # For any keys that used to be in this keyring, disassociate them. # We don't change the uid, leaving that for historical info; if diff --git a/dak/import_ldap_fingerprints.py b/dak/import_ldap_fingerprints.py index 610e4a82..1fac652c 100755 --- a/dak/import_ldap_fingerprints.py +++ b/dak/import_ldap_fingerprints.py @@ -149,7 +149,7 @@ SELECT f.fingerprint, f.id, u.uid FROM fingerprint f, uid u WHERE f.uid = u.id print "Assigning %s to 0x%s." % (uid, fingerprint) elif existing_uid == uid: pass - elif '@' not in existing_ui: + elif '@' not in existing_uid: q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id)) print "Promoting DM %s to DD %s with keyid 0x%s." % (existing_uid, uid, fingerprint) else: diff --git a/dak/make_maintainers.py b/dak/make_maintainers.py index 3f6fd4c5..55cc4a9b 100755 --- a/dak/make_maintainers.py +++ b/dak/make_maintainers.py @@ -131,8 +131,8 @@ def main(): # Process any additional Maintainer files (e.g. from pseudo packages) for filename in extra_files: - file = utils.open_file(filename) - for line in file.readlines(): + extrafile = utils.open_file(filename) + for line in extrafile.readlines(): line = re_comments.sub('', line).strip() if line == "": continue @@ -148,7 +148,7 @@ def main(): if not packages.has_key(package) or version == '*' \ or apt_pkg.VersionCompare(packages[package]["version"], version) < 0: packages[package] = { "maintainer": maintainer, "version": version } - file.close() + extrafile.close() package_keys = packages.keys() package_keys.sort() diff --git a/dak/make_overrides.py b/dak/make_overrides.py index 1087ce28..2ed4f4ba 100755 --- a/dak/make_overrides.py +++ b/dak/make_overrides.py @@ -105,13 +105,13 @@ def main (): override_suite = Cnf["Suite::%s::OverrideCodeName" % (suite)] for component in Cnf.SubTree("Component").List(): if component == "mixed": - continue; # Ick + continue # Ick for otype in Cnf.ValueList("OverrideType"): if otype == "deb": suffix = "" elif otype == "udeb": if component == "contrib": - continue; # Ick2 + continue # Ick2 suffix = ".debian-installer" elif otype == "dsc": suffix = ".src" diff --git a/dak/make_suite_file_list.py b/dak/make_suite_file_list.py index 41e6cb33..dbbab7ec 100755 --- a/dak/make_suite_file_list.py +++ b/dak/make_suite_file_list.py @@ -201,10 +201,10 @@ def write_legacy_mixed_filelist(suite, list, packages, dislocated_files): output = utils.open_file(filename, "w") # Generate the final list of files files = {} - for id in list: - path = packages[id]["path"] - filename = packages[id]["filename"] - file_id = packages[id]["file_id"] + for fileid in list: + path = packages[fileid]["path"] + filename = packages[fileid]["filename"] + file_id = packages[fileid]["file_id"] if suite == "stable" and dislocated_files.has_key(file_id): filename = dislocated_files[file_id] else: @@ -217,8 +217,8 @@ def write_legacy_mixed_filelist(suite, list, packages, dislocated_files): keys = files.keys() keys.sort() # Write the list of files out - for file in keys: - output.write(file+'\n') + for outfile in keys: + output.write(outfile+'\n') output.close() ############################################################ @@ -234,11 +234,11 @@ def write_filelist(suite, component, arch, type, list, packages, dislocated_file output = utils.open_file(filename, "w") # Generate the final list of files files = {} - for id in list: - path = packages[id]["path"] - filename = packages[id]["filename"] - file_id = packages[id]["file_id"] - pkg = packages[id]["pkg"] + for fileid in list: + path = packages[fileid]["path"] + filename = packages[fileid]["filename"] + file_id = packages[fileid]["file_id"] + pkg = packages[fileid]["pkg"] if suite == "stable" and dislocated_files.has_key(file_id): filename = dislocated_files[file_id] else: @@ -264,12 +264,12 @@ def write_filelists(packages, dislocated_files): suite = packages[unique_id]["suite"] component = packages[unique_id]["component"] arch = packages[unique_id]["arch"] - type = packages[unique_id]["type"] + packagetype = packages[unique_id]["type"] d.setdefault(suite, {}) d[suite].setdefault(component, {}) d[suite][component].setdefault(arch, {}) - d[suite][component][arch].setdefault(type, []) - d[suite][component][arch][type].append(unique_id) + d[suite][component][arch].setdefault(packagetype, []) + d[suite][component][arch][packagetype].append(unique_id) # Flesh out the index if not Options["Suite"]: suites = Cnf.SubTree("Suite").List() @@ -282,7 +282,6 @@ def write_filelists(packages, dislocated_files): else: components = utils.split_args(Options["Component"]) udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite)) - udeb_components = udeb_components for component in components: d[suite].setdefault(component, {}) if component in udeb_components: @@ -299,8 +298,8 @@ def write_filelists(packages, dislocated_files): types = [ "dsc" ] else: types = binary_types - for type in types: - d[suite][component][arch].setdefault(type, []) + for packagetype in types: + d[suite][component][arch].setdefault(packagetype, []) # Then walk it for suite in d.keys(): if Cnf.has_key("Suite::%s::Components" % (suite)): @@ -308,25 +307,25 @@ def write_filelists(packages, dislocated_files): for arch in d[suite][component].keys(): if arch == "all": continue - for type in d[suite][component][arch].keys(): - list = d[suite][component][arch][type] + for packagetype in d[suite][component][arch].keys(): + filelist = d[suite][component][arch][packagetype] # If it's a binary, we need to add in the arch: all debs too if arch != "source": archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite)) if archall_suite: - list.extend(d[archall_suite][component]["all"][type]) + filelist.extend(d[archall_suite][component]["all"][packagetype]) elif d[suite][component].has_key("all") and \ - d[suite][component]["all"].has_key(type): - list.extend(d[suite][component]["all"][type]) - write_filelist(suite, component, arch, type, list, + d[suite][component]["all"].has_key(packagetype): + filelist.extend(d[suite][component]["all"][packagetype]) + write_filelist(suite, component, arch, packagetype, filelist, packages, dislocated_files) else: # legacy-mixed suite - list = [] + filelist = [] for component in d[suite].keys(): for arch in d[suite][component].keys(): - for type in d[suite][component][arch].keys(): - list.extend(d[suite][component][arch][type]) - write_legacy_mixed_filelist(suite, list, packages, dislocated_files) + for packagetype in d[suite][component][arch].keys(): + filelist.extend(d[suite][component][arch][packagetype]) + write_legacy_mixed_filelist(suite, filelist, packages, dislocated_files) ################################################################################ @@ -369,13 +368,13 @@ SELECT s.id, s.source, 'source', s.version, l.path, f.filename, c.name, f.id, packages = {} unique_id = 0 for i in ql: - (id, pkg, arch, version, path, filename, component, file_id, suite, type) = i + (sourceid, pkg, arch, version, path, filename, component, file_id, suite, filetype) = i # 'id' comes from either 'binaries' or 'source', so it's not unique unique_id += 1 - packages[unique_id] = Dict(id=id, pkg=pkg, arch=arch, version=version, + packages[unique_id] = Dict(sourceid=sourceid, pkg=pkg, arch=arch, version=version, path=path, filename=filename, component=component, file_id=file_id, - suite=suite, type = type) + suite=suite, filetype = filetype) cleanup(packages) write_filelists(packages, dislocated_files) diff --git a/dak/mirror_split.py b/dak/mirror_split.py deleted file mode 100644 index 3f79020f..00000000 --- a/dak/mirror_split.py +++ /dev/null @@ -1,391 +0,0 @@ -#!/usr/bin/env python - -# Prepare and maintain partial trees by architecture -# Copyright (C) 2004, 2006 Daniel Silverstone - -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. - -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - -############################################################################### -## So Martin, do you have a quote for me yet? -## Make something damned stupid up and attribute it to me, that's okay -############################################################################### - -import sys -import apt_pkg - -from stat import S_ISDIR, S_ISLNK, S_ISREG -import os -import cPickle - -import daklib.utils - -## Master path is the main repository -#MASTER_PATH = "/org/ftp.debian.org/scratch/dsilvers/master" - -MASTER_PATH = "***Configure Mirror-Split::FTPPath Please***" -TREE_ROOT = "***Configure Mirror-Split::TreeRootPath Please***" -TREE_DB_ROOT = "***Configure Mirror-Split::TreeDatabasePath Please***" -trees = [] - -Cnf = None - -############################################################################### -# A MirrorSplitTarget is a representation of a target. It is a set of archs, a path -# and whether or not the target includes source. -################## - -class MirrorSplitTarget: - def __init__(self, name, archs, source): - self.name = name - self.root = "%s/%s" % (TREE_ROOT,name) - self.archs = archs.split(",") - self.source = source - self.dbpath = "%s/%s.db" % (TREE_DB_ROOT,name) - self.db = MirrorSplitDB() - if os.path.exists( self.dbpath ): - self.db.load_from_file( self.dbpath ) - - ## Save the db back to disk - def save_db(self): - self.db.save_to_file( self.dbpath ) - - ## Returns true if it's a poolish match - def poolish_match(self, path): - for a in self.archs: - if path.endswith( "_%s.deb" % (a) ): - return 1 - if path.endswith( "_%s.udeb" % (a) ): - return 1 - if self.source: - if (path.endswith( ".tar.gz" ) or - path.endswith( ".diff.gz" ) or - path.endswith( ".dsc" )): - return 1 - return 0 - - ## Returns false if it's a badmatch distswise - def distish_match(self,path): - for a in self.archs: - if path.endswith("/Contents-%s.gz" % (a)): - return 1 - if path.find("/binary-%s/" % (a)) != -1: - return 1 - if path.find("/installer-%s/" % (a)) != -1: - return 1 - if path.find("/source/") != -1: - if self.source: - return 1 - else: - return 0 - if path.find("/Contents-") != -1: - return 0 - if path.find("/binary-") != -1: - return 0 - if path.find("/installer-") != -1: - return 0 - return 1 - -############################################################################## -# The applicable function is basically a predicate. Given a path and a -# target object its job is to decide if the path conforms for the -# target and thus is wanted. -# -# 'verbatim' is a list of files which are copied regardless -# it should be loaded from a config file eventually -################## - -verbatim = [ - ] - -verbprefix = [ - "/tools/", - "/README", - "/doc/" - ] - -def applicable(path, target): - if path.startswith("/pool/"): - return target.poolish_match(path) - if (path.startswith("/dists/") or - path.startswith("/project/experimental/")): - return target.distish_match(path) - if path in verbatim: - return 1 - for prefix in verbprefix: - if path.startswith(prefix): - return 1 - return 0 - - -############################################################################## -# A MirrorSplitDir is a representation of a tree. -# It distinguishes files dirs and links -# Dirs are dicts of (name, MirrorSplitDir) -# Files are dicts of (name, inode) -# Links are dicts of (name, target) -############## - -class MirrorSplitDir: - def __init__(self): - self.dirs = {} - self.files = {} - self.links = {} - -############################################################################## -# A MirrorSplitDB is a container for a MirrorSplitDir... -############## - -class MirrorSplitDB: - ## Initialise a MirrorSplitDB as containing nothing - def __init__(self): - self.root = MirrorSplitDir() - - def _internal_recurse(self, path): - bdir = MirrorSplitDir() - dl = os.listdir( path ) - dl.sort() - dirs = [] - for ln in dl: - lnl = os.lstat( "%s/%s" % (path, ln) ) - if S_ISDIR(lnl[0]): - dirs.append(ln) - elif S_ISLNK(lnl[0]): - bdir.links[ln] = os.readlink( "%s/%s" % (path, ln) ) - elif S_ISREG(lnl[0]): - bdir.files[ln] = lnl[1] - else: - daklib.utils.fubar( "Confused by %s/%s -- not a dir, link or file" % - ( path, ln ) ) - for d in dirs: - bdir.dirs[d] = self._internal_recurse( "%s/%s" % (path,d) ) - - return bdir - - ## Recurse through a given path, setting the sequence accordingly - def init_from_dir(self, dirp): - self.root = self._internal_recurse( dirp ) - - ## Load this MirrorSplitDB from file - def load_from_file(self, fname): - f = open(fname, "r") - self.root = cPickle.load(f) - f.close() - - ## Save this MirrorSplitDB to a file - def save_to_file(self, fname): - f = open(fname, "w") - cPickle.dump( self.root, f, 1 ) - f.close() - - -############################################################################## -# Helper functions for the tree syncing... -################## - -def _pth(a,b): - return "%s/%s" % (a,b) - -def do_mkdir(targ,path): - if not os.path.exists( _pth(targ.root, path) ): - os.makedirs( _pth(targ.root, path) ) - -def do_mkdir_f(targ,path): - do_mkdir(targ, os.path.dirname(path)) - -def do_link(targ,path): - do_mkdir_f(targ,path) - os.link( _pth(MASTER_PATH, path), - _pth(targ.root, path)) - -def do_symlink(targ,path,link): - do_mkdir_f(targ,path) - os.symlink( link, _pth(targ.root, path) ) - -def do_unlink(targ,path): - os.unlink( _pth(targ.root, path) ) - -def do_unlink_dir(targ,path): - os.system( "rm -Rf '%s'" % _pth(targ.root, path) ) - -############################################################################## -# Reconciling a target with the sourcedb -################ - -def _internal_reconcile( path, srcdir, targdir, targ ): - # Remove any links in targdir which aren't in srcdir - # Or which aren't applicable - rm = [] - for k in targdir.links.keys(): - if applicable( _pth(path, k), targ ): - if not srcdir.links.has_key(k): - rm.append(k) - else: - rm.append(k) - for k in rm: - #print "-L-", _pth(path,k) - do_unlink(targ, _pth(path,k)) - del targdir.links[k] - - # Remove any files in targdir which aren't in srcdir - # Or which aren't applicable - rm = [] - for k in targdir.files.keys(): - if applicable( _pth(path, k), targ ): - if not srcdir.files.has_key(k): - rm.append(k) - else: - rm.append(k) - for k in rm: - #print "-F-", _pth(path,k) - do_unlink(targ, _pth(path,k)) - del targdir.files[k] - - # Remove any dirs in targdir which aren't in srcdir - rm = [] - for k in targdir.dirs.keys(): - if not srcdir.dirs.has_key(k): - rm.append(k) - for k in rm: - #print "-D-", _pth(path,k) - do_unlink_dir(targ, _pth(path,k)) - del targdir.dirs[k] - - # Add/update files - for k in srcdir.files.keys(): - if applicable( _pth(path,k), targ ): - if not targdir.files.has_key(k): - #print "+F+", _pth(path,k) - do_link( targ, _pth(path,k) ) - targdir.files[k] = srcdir.files[k] - else: - if targdir.files[k] != srcdir.files[k]: - #print "*F*", _pth(path,k) - do_unlink( targ, _pth(path,k) ) - do_link( targ, _pth(path,k) ) - targdir.files[k] = srcdir.files[k] - - # Add/update links - for k in srcdir.links.keys(): - if applicable( _pth(path,k), targ ): - if not targdir.links.has_key(k): - targdir.links[k] = srcdir.links[k]; - #print "+L+",_pth(path,k), "->", srcdir.links[k] - do_symlink( targ, _pth(path,k), targdir.links[k] ) - else: - if targdir.links[k] != srcdir.links[k]: - do_unlink( targ, _pth(path,k) ) - targdir.links[k] = srcdir.links[k] - #print "*L*", _pth(path,k), "to ->", srcdir.links[k] - do_symlink( targ, _pth(path,k), targdir.links[k] ) - - # Do dirs - for k in srcdir.dirs.keys(): - if not targdir.dirs.has_key(k): - targdir.dirs[k] = MirrorSplitDir() - #print "+D+", _pth(path,k) - _internal_reconcile( _pth(path,k), srcdir.dirs[k], - targdir.dirs[k], targ ) - - -def reconcile_target_db( src, targ ): - _internal_reconcile( "", src.root, targ.db.root, targ ) - -############################################################################### - -def load_config(): - global MASTER_PATH - global TREE_ROOT - global TREE_DB_ROOT - global trees - - MASTER_PATH = Cnf["Mirror-Split::FTPPath"] - TREE_ROOT = Cnf["Mirror-Split::TreeRootPath"] - TREE_DB_ROOT = Cnf["Mirror-Split::TreeDatabasePath"] - - for a in Cnf.ValueList("Mirror-Split::BasicTrees"): - trees.append( MirrorSplitTarget( a, "%s,all" % a, 1 ) ) - - for n in Cnf.SubTree("Mirror-Split::CombinationTrees").List(): - archs = Cnf.ValueList("Mirror-Split::CombinationTrees::%s" % n) - source = 0 - if "source" in archs: - source = 1 - archs.remove("source") - archs = ",".join(archs) - trees.append( MirrorSplitTarget( n, archs, source ) ) - -def do_list (): - print "Master path",MASTER_PATH - print "Trees at",TREE_ROOT - print "DBs at",TREE_DB_ROOT - - for tree in trees: - print tree.name,"contains",", ".join(tree.archs), - if tree.source: - print " [source]" - else: - print "" - -def do_help (): - print """Usage: dak mirror-split [OPTIONS] -Generate hardlink trees of certain architectures - - -h, --help show this help and exit - -l, --list list the configuration and exit -""" - - -def main (): - global Cnf - - Cnf = daklib.utils.get_conf() - - Arguments = [('h',"help","Mirror-Split::Options::Help"), - ('l',"list","Mirror-Split::Options::List"), - ] - - arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv) - Cnf["Mirror-Split::Options::cake"] = "" - Options = Cnf.SubTree("Mirror-Split::Options") - - print "Loading configuration..." - load_config() - print "Loaded." - - if Options.has_key("Help"): - do_help() - return - if Options.has_key("List"): - do_list() - return - - - src = MirrorSplitDB() - print "Scanning", MASTER_PATH - src.init_from_dir(MASTER_PATH) - print "Scanned" - - for tree in trees: - print "Reconciling tree:",tree.name - reconcile_target_db( src, tree ) - print "Saving updated DB...", - tree.save_db() - print "Done" - -############################################################################## - -if __name__ == '__main__': - main() diff --git a/dak/override.py b/dak/override.py index f98f03c5..0bda5e76 100755 --- a/dak/override.py +++ b/dak/override.py @@ -107,9 +107,9 @@ def main (): # Retrieve current section/priority... oldsection, oldsourcesection, oldpriority = None, None, None - for type in ['source', 'binary']: + for packagetype in ['source', 'binary']: eqdsc = '!=' - if type == 'source': + if packagetype == 'source': eqdsc = '=' q = projectB.query(""" SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type @@ -129,7 +129,7 @@ def main (): utils.fubar("%s is ambiguous. Matches %d packages" % (package,q.ntuples())) r = q.getresult() - if type == 'binary': + if packagetype == 'binary': oldsection = r[0][1] oldpriority = r[0][0] else: diff --git a/dak/process_accepted.py b/dak/process_accepted.py index f066614f..a29c8921 100755 --- a/dak/process_accepted.py +++ b/dak/process_accepted.py @@ -110,32 +110,32 @@ def reject (str, prefix="Rejected: "): def check(): propogate={} nopropogate={} - for file in files.keys(): + for checkfile in files.keys(): # The .orig.tar.gz can disappear out from under us is it's a # duplicate of one in the archive. - if not files.has_key(file): + if not files.has_key(checkfile): continue # Check that the source still exists - if files[file]["type"] == "deb": - source_version = files[file]["source version"] - source_package = files[file]["source package"] + if files[checkfile]["type"] == "deb": + source_version = files[checkfile]["source version"] + source_package = files[checkfile]["source package"] if not changes["architecture"].has_key("source") \ and not Upload.source_exists(source_package, source_version, changes["distribution"].keys()): - reject("no source found for %s %s (%s)." % (source_package, source_version, file)) + reject("no source found for %s %s (%s)." % (source_package, source_version, checkfile)) # Version and file overwrite checks if not installing_to_stable: - if files[file]["type"] == "deb": - reject(Upload.check_binary_against_db(file), "") - elif files[file]["type"] == "dsc": - reject(Upload.check_source_against_db(file), "") - (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(file) + if files[checkfile]["type"] == "deb": + reject(Upload.check_binary_against_db(checkfile), "") + elif files[checkfile]["type"] == "dsc": + reject(Upload.check_source_against_db(checkfile), "") + (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(checkfile) reject(reject_msg, "") # propogate in the case it is in the override tables: if changes.has_key("propdistribution"): for suite in changes["propdistribution"].keys(): - if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file): + if Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile): propogate[suite] = 1 else: nopropogate[suite] = 1 @@ -145,11 +145,11 @@ def check(): continue changes["distribution"][suite] = 1 - for file in files.keys(): + for checkfile in files.keys(): # Check the package is still in the override tables for suite in changes["distribution"].keys(): - if not Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file): - reject("%s is NEW for %s." % (file, suite)) + if not Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile): + reject("%s is NEW for %s." % (checkfile, suite)) ############################################################################### @@ -284,8 +284,8 @@ def install (): return # Add the .dsc file to the DB - for file in files.keys(): - if files[file]["type"] == "dsc": + for newfile in files.keys(): + if files[newfile]["type"] == "dsc": package = dsc["source"] version = dsc["version"] # NB: not files[file]["version"], that has no epoch maintainer = dsc["maintainer"] @@ -296,26 +296,26 @@ def install (): changedby_id = database.get_or_set_maintainer_id(changedby) fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"]) install_date = time.strftime("%Y-%m-%d") - filename = files[file]["pool name"] + file - dsc_component = files[file]["component"] - dsc_location_id = files[file]["location id"] + filename = files[newfile]["pool name"] + newfile + dsc_component = files[newfile]["component"] + dsc_location_id = files[newfile]["location id"] if dsc.has_key("dm-upload-allowed") and dsc["dm-upload-allowed"] == "yes": dm_upload_allowed = "true" else: dm_upload_allowed = "false" - if not files[file].has_key("files id") or not files[file]["files id"]: - files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id) + if not files[newfile].has_key("files id") or not files[newfile]["files id"]: + files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], dsc_location_id) projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)" - % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed)) + % (package, version, maintainer_id, changedby_id, files[newfile]["files id"], install_date, fingerprint_id, dm_upload_allowed)) for suite in changes["distribution"].keys(): suite_id = database.get_suite_id(suite) projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id)) # Add the source files to the DB (files and dsc_files) - projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"])) + projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[newfile]["files id"])) for dsc_file in dsc_files.keys(): - filename = files[file]["pool name"] + dsc_file + filename = files[newfile]["pool name"] + dsc_file # If the .orig.tar.gz is already in the pool, it's # files id is stored in dsc_files by check_dsc(). files_id = dsc_files[dsc_file].get("files id", None) @@ -344,30 +344,30 @@ def install (): # Add the .deb files to the DB - for file in files.keys(): - if files[file]["type"] == "deb": - package = files[file]["package"] - version = files[file]["version"] - maintainer = files[file]["maintainer"] + for newfile in files.keys(): + if files[newfile]["type"] == "deb": + package = files[newfile]["package"] + version = files[newfile]["version"] + maintainer = files[newfile]["maintainer"] maintainer = maintainer.replace("'", "\\'") maintainer_id = database.get_or_set_maintainer_id(maintainer) fingerprint_id = database.get_or_set_fingerprint_id(changes["fingerprint"]) - architecture = files[file]["architecture"] + architecture = files[newfile]["architecture"] architecture_id = database.get_architecture_id (architecture) - type = files[file]["dbtype"] - source = files[file]["source package"] - source_version = files[file]["source version"] - filename = files[file]["pool name"] + file - if not files[file].has_key("location id") or not files[file]["location id"]: - files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i()) - if not files[file].has_key("files id") or not files[file]["files id"]: - files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], files[file]["location id"]) + filetype = files[newfile]["dbtype"] + source = files[newfile]["source package"] + source_version = files[newfile]["source version"] + filename = files[newfile]["pool name"] + newfile + if not files[newfile].has_key("location id") or not files[newfile]["location id"]: + files[newfile]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[newfile]["component"],utils.where_am_i()) + if not files[newfile].has_key("files id") or not files[newfile]["files id"]: + files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], files[newfile]["location id"]) source_id = database.get_source_id (source, source_version) if source_id: projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)" - % (package, version, maintainer_id, source_id, architecture_id, files[file]["files id"], type, fingerprint_id)) + % (package, version, maintainer_id, source_id, architecture_id, files[newfile]["files id"], filetype, fingerprint_id)) else: - raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, file, type, sig_fpr) + raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, newfile, filetype, changes["fingerprint"]) for suite in changes["distribution"].keys(): suite_id = database.get_suite_id(suite) projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id)) @@ -387,7 +387,7 @@ def install (): continue # First move the files to the new location legacy_filename = qid["path"] + qid["filename"] - pool_location = utils.poolify (changes["source"], files[file]["component"]) + pool_location = utils.poolify (changes["source"], files[newfile]["component"]) pool_filename = pool_location + os.path.basename(qid["filename"]) destination = Cnf["Dir::Pool"] + pool_location utils.move(legacy_filename, destination) @@ -415,11 +415,11 @@ def install (): projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, database.get_source_id(changes["source"], changes["version"]), orig_tar_id)) # Install the files into the pool - for file in files.keys(): - destination = Cnf["Dir::Pool"] + files[file]["pool name"] + file - utils.move(file, destination) - Logger.log(["installed", file, files[file]["type"], files[file]["size"], files[file]["architecture"]]) - install_bytes += float(files[file]["size"]) + for newfile in files.keys(): + destination = Cnf["Dir::Pool"] + files[newfile]["pool name"] + newfile + utils.move(newfile, destination) + Logger.log(["installed", newfile, files[newfile]["type"], files[newfile]["size"], files[newfile]["architecture"]]) + install_bytes += float(files[newfile]["size"]) # Copy the .changes file across for suite which need it. copy_changes = {} @@ -458,14 +458,14 @@ def install (): dest_dir = Cnf["Dir::QueueBuild"] if Cnf.FindB("Dinstall::SecurityQueueBuild"): dest_dir = os.path.join(dest_dir, suite) - for file in files.keys(): - dest = os.path.join(dest_dir, file) + for newfile in files.keys(): + dest = os.path.join(dest_dir, newfile) # Remove it from the list of packages for later processing by apt-ftparchive projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, dest, suite_id)) if not Cnf.FindB("Dinstall::SecurityQueueBuild"): # Update the symlink to point to the new location in the pool - pool_location = utils.poolify (changes["source"], files[file]["component"]) - src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(file)) + pool_location = utils.poolify (changes["source"], files[newfile]["component"]) + src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(newfile)) if os.path.islink(dest): os.unlink(dest) os.symlink(src, dest) @@ -494,8 +494,8 @@ def stable_install (summary, short_summary): projectB.query("BEGIN WORK") # Add the source to stable (and remove it from proposed-updates) - for file in files.keys(): - if files[file]["type"] == "dsc": + for newfile in files.keys(): + if files[newfile]["type"] == "dsc": package = dsc["source"] version = dsc["version"]; # NB: not files[file]["version"], that has no epoch q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version)) @@ -509,11 +509,11 @@ def stable_install (summary, short_summary): projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id)) # Add the binaries to stable (and remove it/them from proposed-updates) - for file in files.keys(): - if files[file]["type"] == "deb": - package = files[file]["package"] - version = files[file]["version"] - architecture = files[file]["architecture"] + for newfile in files.keys(): + if files[newfile]["type"] == "deb": + package = files[newfile]["package"] + version = files[newfile]["version"] + architecture = files[newfile]["architecture"] q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture)) ql = q.getresult() if not ql: @@ -536,13 +536,13 @@ def stable_install (summary, short_summary): os.unlink (new_changelog_filename) new_changelog = utils.open_file(new_changelog_filename, 'w') - for file in files.keys(): - if files[file]["type"] == "deb": - new_changelog.write("stable/%s/binary-%s/%s\n" % (files[file]["component"], files[file]["architecture"], file)) - elif re_issource.match(file): - new_changelog.write("stable/%s/source/%s\n" % (files[file]["component"], file)) + for newfile in files.keys(): + if files[newfile]["type"] == "deb": + new_changelog.write("stable/%s/binary-%s/%s\n" % (files[newfile]["component"], files[newfile]["architecture"], newfile)) + elif re_issource.match(newfile): + new_changelog.write("stable/%s/source/%s\n" % (files[newfile]["component"], newfile)) else: - new_changelog.write("%s\n" % (file)) + new_changelog.write("%s\n" % (newfile)) chop_changes = re_fdnic.sub("\n", changes["changes"]) new_changelog.write(chop_changes + '\n\n') if os.access(changelog_filename, os.R_OK) != 0: diff --git a/dak/process_new.py b/dak/process_new.py index 462a2e63..9eea0ec5 100755 --- a/dak/process_new.py +++ b/dak/process_new.py @@ -231,6 +231,7 @@ def sort_changes(changes_files): class Section_Completer: def __init__ (self): self.sections = [] + self.matches = [] q = projectB.query("SELECT section FROM section") for i in q.getresult(): self.sections.append(i[0]) @@ -252,6 +253,7 @@ class Section_Completer: class Priority_Completer: def __init__ (self): self.priorities = [] + self.matches = [] q = projectB.query("SELECT priority FROM priority") for i in q.getresult(): self.priorities.append(i[0]) @@ -457,7 +459,7 @@ def edit_overrides (new): def edit_note(note): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() - temp_file = os.fdopen(temp_filename, 'w') + temp_file = os.fdopen(fd, 'w') temp_file.write(note) temp_file.close() editor = os.environ.get("EDITOR","vi") diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 28583c05..c1bdbe83 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -1301,14 +1301,14 @@ def is_stableupdate (): def do_stableupdate (summary, short_summary): print "Moving to PROPOSED-UPDATES holding area." - Logger.log(["Moving to proposed-updates", pkg.changes_file]); + Logger.log(["Moving to proposed-updates", pkg.changes_file]) - Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"]); + Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"]) move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664) # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary; - Upload.check_override(); + Upload.Subst["__SUMMARY__"] = summary + Upload.check_override() ################################################################################ @@ -1330,14 +1330,14 @@ def is_oldstableupdate (): def do_oldstableupdate (summary, short_summary): print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area." - Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file]); + Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file]) - Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"]); + Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"]) move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664) # Check for override disparities - Upload.Subst["__SUMMARY__"] = summary; - Upload.check_override(); + Upload.Subst["__SUMMARY__"] = summary + Upload.check_override() ################################################################################ diff --git a/dak/update_db.py b/dak/update_db.py index 6db74d9d..d4aefe24 100755 --- a/dak/update_db.py +++ b/dak/update_db.py @@ -29,6 +29,7 @@ import psycopg2, sys, fcntl, os import apt_pkg import time +import errno from daklib import database from daklib import utils @@ -63,7 +64,7 @@ Updates dak's database schema to the lastest version. You should disable crontab name TEXT UNIQUE NOT NULL, value TEXT );""") - c.execute("INSERT INTO config VALUES ( nextval('config_id_seq'), 'db_revision', '0')"); + c.execute("INSERT INTO config VALUES ( nextval('config_id_seq'), 'db_revision', '0')") self.db.commit() except psycopg2.ProgrammingError: @@ -84,7 +85,7 @@ Updates dak's database schema to the lastest version. You should disable crontab try: c = self.db.cursor() - q = c.execute("SELECT value FROM config WHERE name = 'db_revision';"); + q = c.execute("SELECT value FROM config WHERE name = 'db_revision';") return c.fetchone()[0] except psycopg2.ProgrammingError: diff --git a/daklib/dak_exceptions.py b/daklib/dak_exceptions.py index 4e795461..9404ee9e 100644 --- a/daklib/dak_exceptions.py +++ b/daklib/dak_exceptions.py @@ -29,6 +29,7 @@ class DakError(Exception): """ def __init__(self, message=""): + Exception.__init__(self) self.args = str(message) self.message = str(message) diff --git a/daklib/database.py b/daklib/database.py index 5c7bd838..e2c596a6 100755 --- a/daklib/database.py +++ b/daklib/database.py @@ -19,7 +19,9 @@ ################################################################################ -import sys, time, types +import sys +import time +import types ################################################################################ @@ -46,6 +48,7 @@ suite_version_cache = {} ################################################################################ def init (config, sql): + """ database module init. Just sets two variables""" global Cnf, projectB Cnf = config @@ -53,6 +56,11 @@ def init (config, sql): def do_query(q): + """ + Executes a database query q. Writes statistics to stderr and returns + the result. + + """ sys.stderr.write("query: \"%s\" ... " % (q)) before = time.time() r = projectB.query(q) @@ -69,6 +77,7 @@ def do_query(q): ################################################################################ def get_suite_id (suite): + """ Returns database suite_id for given suite, caches result. """ global suite_id_cache if suite_id_cache.has_key(suite): @@ -85,6 +94,7 @@ def get_suite_id (suite): return suite_id def get_section_id (section): + """ Returns database section_id for given section, caches result. """ global section_id_cache if section_id_cache.has_key(section): @@ -101,6 +111,7 @@ def get_section_id (section): return section_id def get_priority_id (priority): + """ Returns database priority_id for given priority, caches result. """ global priority_id_cache if priority_id_cache.has_key(priority): @@ -117,6 +128,7 @@ def get_priority_id (priority): return priority_id def get_override_type_id (type): + """ Returns database override_id for given override_type type, caches result. """ global override_type_id_cache if override_type_id_cache.has_key(type): @@ -133,6 +145,7 @@ def get_override_type_id (type): return override_type_id def get_architecture_id (architecture): + """ Returns database architecture_id for given architecture, caches result. """ global architecture_id_cache if architecture_id_cache.has_key(architecture): @@ -149,6 +162,7 @@ def get_architecture_id (architecture): return architecture_id def get_archive_id (archive): + """ Returns database archive_id for given archive, caches result. """ global archive_id_cache archive = archive.lower() @@ -167,6 +181,7 @@ def get_archive_id (archive): return archive_id def get_component_id (component): + """ Returns database component_id for given component, caches result. """ global component_id_cache component = component.lower() @@ -185,6 +200,18 @@ def get_component_id (component): return component_id def get_location_id (location, component, archive): + """ + Returns database location_id for given combination of + location + component + archive. + + The 3 parameters are the database ids returned by the respective + "get_foo_id" functions. + + The result will be cached. + + """ global location_id_cache cache_key = location + '_' + component + '_' + location @@ -208,6 +235,7 @@ def get_location_id (location, component, archive): return location_id def get_source_id (source, version): + """ Returns database source_id for given combination of source and version, caches result. """ global source_id_cache cache_key = source + '_' + version + '_' @@ -225,6 +253,7 @@ def get_source_id (source, version): return source_id def get_suite_version(source, suite): + """ Returns database version for a given source in a given suite, caches result. """ global suite_version_cache cache_key = "%s_%s" % (source, suite) @@ -250,6 +279,14 @@ def get_suite_version(source, suite): ################################################################################ def get_or_set_maintainer_id (maintainer): + """ + If maintainer does not have an entry in the maintainer table yet, create one + and return its id. + If maintainer already has an entry, simply return its id. + + Result is cached. + + """ global maintainer_id_cache if maintainer_id_cache.has_key(maintainer): @@ -267,6 +304,14 @@ def get_or_set_maintainer_id (maintainer): ################################################################################ def get_or_set_keyring_id (keyring): + """ + If keyring does not have an entry in the keyring table yet, create one + and return its id. + If keyring already has an entry, simply return its id. + + Result is cached. + + """ global keyring_id_cache if keyring_id_cache.has_key(keyring): @@ -284,6 +329,14 @@ def get_or_set_keyring_id (keyring): ################################################################################ def get_or_set_uid_id (uid): + """ + If uid does not have an entry in the uid table yet, create one + and return its id. + If uid already has an entry, simply return its id. + + Result is cached. + + """ global uid_id_cache if uid_id_cache.has_key(uid): @@ -301,6 +354,14 @@ def get_or_set_uid_id (uid): ################################################################################ def get_or_set_fingerprint_id (fingerprint): + """ + If fingerprintd does not have an entry in the fingerprint table yet, create one + and return its id. + If fingerprint already has an entry, simply return its id. + + Result is cached. + + """ global fingerprint_id_cache if fingerprint_id_cache.has_key(fingerprint): @@ -318,6 +379,25 @@ def get_or_set_fingerprint_id (fingerprint): ################################################################################ def get_files_id (filename, size, md5sum, location_id): + """ + Returns -1, -2 or the file_id for a given combination of + filename + size + md5sum + location_id. + + The database is queried using filename and location_id, size and md5sum are for + extra checks. + + Return values: + -1 - The given combination of arguments result in more (or less) than + one result from the database + -2 - The given size and md5sum do not match the values in the database + anything else is a file_id + + Result is cached. + + """ global files_id_cache cache_key = "%s_%d" % (filename, location_id) @@ -344,6 +424,14 @@ def get_files_id (filename, size, md5sum, location_id): ################################################################################ def get_or_set_queue_id (queue): + """ + If queue does not have an entry in the queue_name table yet, create one + and return its id. + If queue already has an entry, simply return its id. + + Result is cached. + + """ global queue_id_cache if queue_id_cache.has_key(queue): @@ -361,6 +449,12 @@ def get_or_set_queue_id (queue): ################################################################################ def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id): + """ + Insert a new entry into the files table. + + Returns the new file_id + + """ global files_id_cache projectB.query("INSERT INTO files (filename, size, md5sum, sha1sum, sha256sum, location) VALUES ('%s', %d, '%s', '%s', '%s', %d)" % (filename, long(size), md5sum, sha1sum, sha256sum, location_id)) @@ -380,6 +474,7 @@ def set_files_id (filename, size, md5sum, sha1sum, sha256sum, location_id): ################################################################################ def get_maintainer (maintainer_id): + """ Return the name of the maintainer behind maintainer_id """ global maintainer_cache if not maintainer_cache.has_key(maintainer_id): @@ -391,9 +486,27 @@ def get_maintainer (maintainer_id): ################################################################################ def get_suites(pkgname, src=False): + """ Return the suites in which pkgname is. If src is True, query for source package, else binary. """ if src: - sql = "select suite_name from source, src_associations,suite where source.id=src_associations.source and source.source='%s' and src_associations.suite = suite.id"%pkgname + sql = """ + SELECT suite_name + FROM source, + src_associations, + suite + WHERE source.id = src_associations.source + AND source.source = '%s' + AND src_associations.suite = suite.id + """ % (pkgname) else: - sql = "select suite_name from binaries, bin_associations,suite where binaries.id=bin_associations.bin and package='%s' and bin_associations.suite = suite.id"%pkgname + sql = """ + SELECT suite_name + FROM binaries, + bin_associations, + suite + WHERE binaries.id = bin_associations.bin + AND package = '%s' + AND bin_associations.suite = suite.id + """ % (pkgname) + q = projectB.query(sql) return map(lambda x: x[0], q.getresult()) diff --git a/daklib/queue.py b/daklib/queue.py index 4061c05d..5bebe8ef 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -150,6 +150,7 @@ class Upload: self.Cnf = Cnf self.accept_count = 0 self.accept_bytes = 0L + self.reject_message = "" self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {}, legacy_source_untouchable = {}) @@ -338,7 +339,7 @@ class Upload: if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType): changes["distribution"] = {} - override_summary =""; + override_summary ="" file_keys = files.keys() file_keys.sort() for file_entry in file_keys: @@ -494,17 +495,17 @@ distribution.""" dsc.has_key("bts changelog"): (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".") - version_history = os.fdopen(temp_filename, 'w') + version_history = os.fdopen(fd, 'w') version_history.write(dsc["bts changelog"]) version_history.close() filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"], changes_file[:-8]+".versions") os.rename(temp_filename, filename) - os.chmod(filename, "0644") + os.chmod(filename, 0644) # Write out the binary -> source mapping. (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".") - debinfo = os.fdopen(temp_filename, 'w') + debinfo = os.fdopen(fd, 'w') for file_entry in file_keys: f = files[file_entry] if f["type"] == "deb": @@ -516,7 +517,7 @@ distribution.""" filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"], changes_file[:-8]+".debinfo") os.rename(temp_filename, filename) - os.chmod(filename, "0644") + os.chmod(filename, 0644) self.queue_build("accepted", Cnf["Dir::Queue::Accepted"]) @@ -1026,7 +1027,8 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su # for example, the package was in potato but had an -sa # upload in woody. So we need to choose the right one. - x = ql[0]; # default to something sane in case we don't match any or have only one + # default to something sane in case we don't match any or have only one + x = ql[0] if len(ql) > 1: for i in ql: @@ -1047,7 +1049,8 @@ SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su actual_size = os.stat(old_file)[stat.ST_SIZE] found = old_file suite_type = x[2] - dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install() + # need this for updating dsc_files in install() + dsc_files[dsc_file]["files id"] = x[3] # See install() in process-accepted... self.pkg.orig_tar_id = x[3] self.pkg.orig_tar_gz = old_file diff --git a/daklib/utils.py b/daklib/utils.py index 4531b273..be4bcff7 100755 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -665,11 +665,11 @@ def regex_safe (s): # Perform a substition of template def TemplateSubst(map, filename): - file = open_file(filename) - template = file.read() + templatefile = open_file(filename) + template = templatefile.read() for x in map.keys(): template = template.replace(x,map[x]) - file.close() + templatefile.close() return template ################################################################################ @@ -765,13 +765,13 @@ def find_next_free (dest, too_many=100): ################################################################################ def result_join (original, sep = '\t'): - list = [] + resultlist = [] for i in xrange(len(original)): if original[i] == None: - list.append("") + resultlist.append("") else: - list.append(original[i]) - return sep.join(list) + resultlist.append(original[i]) + return sep.join(resultlist) ################################################################################ @@ -1098,7 +1098,7 @@ on error.""" return "%s: tainted filename" % (filename) # Invoke gpgv on the file - status_read, status_write = os.pipe(); + status_read, status_write = os.pipe() cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename) (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write) @@ -1170,7 +1170,7 @@ used.""" return None # Build the command line - status_read, status_write = os.pipe(); + status_read, status_write = os.pipe() cmd = "gpgv --status-fd %s %s %s %s" % ( status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)