import commands, os, pg, stat, string, sys, time
import apt_pkg, apt_inst
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
filename = os.path.abspath(i[0] + i[1])
db_files[filename] = ""
if os.access(filename, os.R_OK) == 0:
- utils.warn("'%s' doesn't exist." % (filename))
+ daklib.utils.warn("'%s' doesn't exist." % (filename))
filename = Cnf["Dir::Override"]+'override.unreferenced'
if os.path.exists(filename):
- file = utils.open_file(filename)
+ file = daklib.utils.open_file(filename)
for filename in file.readlines():
filename = filename[:-1]
excluded[filename] = ""
os.path.walk(Cnf["Dir::Root"]+'pool/', process_dir, None)
print
- print "%s wasted..." % (utils.size_type(waste))
+ print "%s wasted..." % (daklib.utils.size_type(waste))
################################################################################
continue
component = component.lower()
list_filename = '%s%s_%s_source.list' % (Cnf["Dir::Lists"], suite, component)
- list_file = utils.open_file(list_filename)
+ list_file = daklib.utils.open_file(list_filename)
for line in list_file.readlines():
file = line[:-1]
try:
- utils.parse_changes(file, signing_rules=1)
- except utils.invalid_dsc_format_exc, line:
- utils.warn("syntax error in .dsc file '%s', line %s." % (file, line))
+ daklib.utils.parse_changes(file, signing_rules=1)
+ except daklib.utils.invalid_dsc_format_exc, line:
+ daklib.utils.warn("syntax error in .dsc file '%s', line %s." % (file, line))
count += 1
if count:
- utils.warn("Found %s invalid .dsc files." % (count))
+ daklib.utils.warn("Found %s invalid .dsc files." % (count))
################################################################################
print suite
print "-"*len(suite)
print
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
q = projectB.query("""
SELECT DISTINCT b.package FROM binaries b, bin_associations ba
WHERE b.id = ba.bin AND ba.suite = %s AND NOT EXISTS
db_md5sum = i[2]
db_size = int(i[3])
try:
- file = utils.open_file(filename)
+ file = daklib.utils.open_file(filename)
except:
- utils.warn("can't open '%s'." % (filename))
+ daklib.utils.warn("can't open '%s'." % (filename))
continue
md5sum = apt_pkg.md5sum(file)
size = os.stat(filename)[stat.ST_SIZE]
if md5sum != db_md5sum:
- utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, db_md5sum))
+ daklib.utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, db_md5sum))
if size != db_size:
- utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, db_size))
+ daklib.utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, db_size))
print "Done."
for i in ql:
filename = os.path.abspath(i[0] + i[1])
if os.access(filename, os.R_OK):
- file = utils.open_file(filename)
+ file = daklib.utils.open_file(filename)
current_file = filename
sys.stderr.write("Processing %s.\n" % (filename))
apt_inst.debExtract(file,Ent,"control.tar.gz")
filename = os.path.abspath(i[0] + i[1])
try:
# NB: don't enforce .dsc syntax
- dsc = utils.parse_changes(filename)
+ dsc = daklib.utils.parse_changes(filename)
except:
- utils.fubar("error parsing .dsc file '%s'." % (filename))
- dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
+ daklib.utils.fubar("error parsing .dsc file '%s'." % (filename))
+ dsc_files = daklib.utils.build_file_list(dsc, is_a_dsc=1)
has_tar = 0
for file in dsc_files.keys():
- m = utils.re_issource.match(file)
+ m = daklib.utils.re_issource.match(file)
if not m:
- utils.fubar("%s not recognised as source." % (file))
+ daklib.utils.fubar("%s not recognised as source." % (file))
type = m.group(3)
if type == "orig.tar.gz" or type == "tar.gz":
has_tar = 1
if not has_tar:
- utils.warn("%s has no .tar.gz in the .dsc file." % (file))
+ daklib.utils.warn("%s has no .tar.gz in the .dsc file." % (file))
count += 1
if count:
- utils.warn("Found %s invalid .dsc files." % (count))
+ daklib.utils.warn("Found %s invalid .dsc files." % (count))
################################################################################
filename = "%s/dists/%s/%s/source/Sources.gz" % (Cnf["Dir::Root"], suite, component)
print "Processing %s..." % (filename)
# apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- sources = utils.open_file(temp_filename)
+ sources = daklib.utils.open_file(temp_filename)
Sources = apt_pkg.ParseTagFile(sources)
while Sources.Step():
source = Sources.Section.Find('Package')
if directory.find("potato") == -1:
print "W: %s missing." % (filename)
else:
- pool_location = utils.poolify (source, component)
+ pool_location = daklib.utils.poolify (source, component)
pool_filename = "%s/%s/%s" % (Cnf["Dir::Pool"], pool_location, name)
if not os.path.exists(pool_filename):
print "E: %s missing (%s)." % (filename, pool_filename)
# Create symlink
pool_filename = os.path.normpath(pool_filename)
filename = os.path.normpath(filename)
- src = utils.clean_symlink(pool_filename, filename, Cnf["Dir::Root"])
+ src = daklib.utils.clean_symlink(pool_filename, filename, Cnf["Dir::Root"])
print "Symlinking: %s -> %s" % (filename, src)
#os.symlink(src, filename)
sources.close()
% (Cnf["Dir::Root"], suite, component, architecture)
print "Processing %s..." % (filename)
# apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- packages = utils.open_file(temp_filename)
+ packages = daklib.utils.open_file(temp_filename)
Packages = apt_pkg.ParseTagFile(packages)
while Packages.Step():
filename = "%s/%s" % (Cnf["Dir::Root"], Packages.Section.Find('Filename'))
filename = os.path.normpath(i[0] + i[1])
# file_id = i[2]
if os.access(filename, os.R_OK) == 0:
- utils.warn("%s: doesn't exist." % (filename))
+ daklib.utils.warn("%s: doesn't exist." % (filename))
else:
if os.path.islink(filename):
- utils.warn("%s: is a symlink." % (filename))
+ daklib.utils.warn("%s: is a symlink." % (filename))
# You probably don't want to use the rest of this...
# print "%s: is a symlink." % (filename)
# dest = os.readlink(filename)
# (location, location_id) = locations[path]
# break
# if not location_id:
-# utils.fubar("Can't find location for %s (%s)." % (dest, filename))
+# daklib.utils.fubar("Can't find location for %s (%s)." % (dest, filename))
# new_filename = dest.replace(location, "")
# q = projectB.query("UPDATE files SET filename = '%s', location = %s WHERE id = %s" % (new_filename, location_id, file_id))
# q = projectB.query("COMMIT WORK")
if not name.endswith(".dsc"):
continue
filename = os.path.abspath(dirname+'/'+name)
- dsc = utils.parse_changes(filename)
+ dsc = daklib.utils.parse_changes(filename)
for field_name in [ "build-depends", "build-depends-indep" ]:
field = dsc.get(field_name)
if field:
def main ():
global Cnf, projectB, db_files, waste, excluded
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Check-Archive::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Check-Archive::Options::%s" % (i)):
usage()
if len(args) < 1:
- utils.warn("dak check-archive requires at least one argument")
+ daklib.utils.warn("dak check-archive requires at least one argument")
usage(1)
elif len(args) > 1:
- utils.warn("dak check-archive accepts only one argument")
+ daklib.utils.warn("dak check-archive accepts only one argument")
usage(1)
mode = args[0].lower()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
if mode == "md5sums":
check_md5sums()
elif mode == "validate-builddeps":
check_build_depends()
else:
- utils.warn("unknown mode '%s'" % (mode))
+ daklib.utils.warn("unknown mode '%s'" % (mode))
usage(1)
################################################################################
import pg, sys, os
import apt_pkg
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.utils
################################################################################
def process(osuite, affected_suites, originosuite, component, type):
global Logger, Options, projectB, sections, priorities
- osuite_id = database.get_suite_id(osuite)
+ osuite_id = daklib.database.get_suite_id(osuite)
if osuite_id == -1:
- utils.fubar("Suite '%s' not recognised." % (osuite))
+ daklib.utils.fubar("Suite '%s' not recognised." % (osuite))
originosuite_id = None
if originosuite:
- originosuite_id = database.get_suite_id(originosuite)
+ originosuite_id = daklib.database.get_suite_id(originosuite)
if originosuite_id == -1:
- utils.fubar("Suite '%s' not recognised." % (originosuite))
+ daklib.utils.fubar("Suite '%s' not recognised." % (originosuite))
- component_id = database.get_component_id(component)
+ component_id = daklib.database.get_component_id(component)
if component_id == -1:
- utils.fubar("Component '%s' not recognised." % (component))
+ daklib.utils.fubar("Component '%s' not recognised." % (component))
- type_id = database.get_override_type_id(type)
+ type_id = daklib.database.get_override_type_id(type)
if type_id == -1:
- utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (type))
- dsc_type_id = database.get_override_type_id("dsc")
- deb_type_id = database.get_override_type_id("deb")
+ daklib.utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (type))
+ dsc_type_id = daklib.database.get_override_type_id("dsc")
+ deb_type_id = daklib.database.get_override_type_id("deb")
- source_priority_id = database.get_priority_id("source")
+ source_priority_id = daklib.database.get_priority_id("source")
if type == "deb" or type == "udeb":
packages = {}
src_packages[package] = 1
else:
if blacklist.has_key(package):
- utils.warn("%s in incoming, not touching" % package)
+ daklib.utils.warn("%s in incoming, not touching" % package)
continue
Logger.log(["removing unused override", osuite, component,
type, package, priorities[i[1]], sections[i[2]], i[3]])
for package, hasoverride in src_packages.items():
if not hasoverride:
- utils.warn("%s has no override!" % package)
+ daklib.utils.warn("%s has no override!" % package)
else: # binary override
for i in q.getresult():
packages[package] = 1
else:
if blacklist.has_key(package):
- utils.warn("%s in incoming, not touching" % package)
+ daklib.utils.warn("%s in incoming, not touching" % package)
continue
Logger.log(["removing unused override", osuite, component,
type, package, priorities[i[1]], sections[i[2]], i[3]])
for package, hasoverride in packages.items():
if not hasoverride:
- utils.warn("%s has no override!" % package)
+ daklib.utils.warn("%s has no override!" % package)
projectB.query("COMMIT WORK")
sys.stdout.flush()
def main ():
global Logger, Options, projectB, sections, priorities
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Check-Overrides::Options::Help"),
('n',"no-action", "Check-Overrides::Options::No-Action")]
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
# init sections, priorities:
q = projectB.query("SELECT id, section FROM section")
priorities[i[0]] = i[1]
if not Options["No-Action"]:
- Logger = logging.Logger(Cnf, "check-overrides")
+ Logger = daklib.logging.Logger(Cnf, "check-overrides")
else:
- Logger = logging.Logger(Cnf, "check-overrides", 1)
+ Logger = daklib.logging.Logger(Cnf, "check-overrides", 1)
gen_blacklist(Cnf["Dir::Queue::Accepted"])
suiteids.append(i[0])
if len(suiteids) != len(suites) or len(suiteids) < 1:
- utils.fubar("Couldn't find id's of all suites: %s" % suites)
+ daklib.utils.fubar("Couldn't find id's of all suites: %s" % suites)
for component in Cnf.SubTree("Component").List():
if component == "mixed":
import pg, sys, os
import apt_pkg, apt_inst
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
if stable[dep].has_key(arch):
if apt_pkg.CheckDep(stable[dep][arch], constraint, version):
if Options["debug"]:
- print "Found %s as a real package." % (utils.pp_deps(parsed_dep))
+ print "Found %s as a real package." % (daklib.utils.pp_deps(parsed_dep))
unsat = 0
break
# As a virtual?
if stable_virtual[dep].has_key(arch):
if not constraint and not version:
if Options["debug"]:
- print "Found %s as a virtual package." % (utils.pp_deps(parsed_dep))
+ print "Found %s as a virtual package." % (daklib.utils.pp_deps(parsed_dep))
unsat = 0
break
# As part of the same .changes?
- epochless_version = utils.re_no_epoch.sub('', version)
+ epochless_version = daklib.utils.re_no_epoch.sub('', version)
dep_filename = "%s_%s_%s.deb" % (dep, epochless_version, arch)
if files.has_key(dep_filename):
if Options["debug"]:
- print "Found %s in the same upload." % (utils.pp_deps(parsed_dep))
+ print "Found %s in the same upload." % (daklib.utils.pp_deps(parsed_dep))
unsat = 0
break
# Not found...
# [FIXME: must be a better way ... ]
- error = "%s not found. [Real: " % (utils.pp_deps(parsed_dep))
+ error = "%s not found. [Real: " % (daklib.utils.pp_deps(parsed_dep))
if stable.has_key(dep):
if stable[dep].has_key(arch):
error += "%s:%s:%s" % (dep, arch, stable[dep][arch])
unsat.append(error)
if unsat:
- sys.stderr.write("MWAAP! %s: '%s' %s can not be satisifed:\n" % (filename, utils.pp_deps(parsed_dep), dep_type))
+ sys.stderr.write("MWAAP! %s: '%s' %s can not be satisifed:\n" % (filename, daklib.utils.pp_deps(parsed_dep), dep_type))
for error in unsat:
sys.stderr.write(" %s\n" % (error))
pkg_unsat = 1
def check_package(filename, files):
try:
- control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(filename)))
+ control = apt_pkg.ParseSection(apt_inst.debExtractControl(daklib.utils.open_file(filename)))
except:
- utils.warn("%s: debExtractControl() raised %s." % (filename, sys.exc_type))
+ daklib.utils.warn("%s: debExtractControl() raised %s." % (filename, sys.exc_type))
return 1
Depends = control.Find("Depends")
Pre_Depends = control.Find("Pre-Depends")
def check_changes (filename):
try:
- changes = utils.parse_changes(filename)
- files = utils.build_file_list(changes)
+ changes = daklib.utils.parse_changes(filename)
+ files = daklib.utils.build_file_list(changes)
except:
- utils.warn("Error parsing changes file '%s'" % (filename))
+ daklib.utils.warn("Error parsing changes file '%s'" % (filename))
return
result = 0
# Move to the pool directory
cwd = os.getcwd()
file = files.keys()[0]
- pool_dir = Cnf["Dir::Pool"] + '/' + utils.poolify(changes["source"], files[file]["component"])
+ pool_dir = Cnf["Dir::Pool"] + '/' + daklib.utils.poolify(changes["source"], files[file]["component"])
os.chdir(pool_dir)
changes_result = 0
################################################################################
def check_joey (filename):
- file = utils.open_file(filename)
+ file = daklib.utils.open_file(filename)
cwd = os.getcwd()
os.chdir("%s/dists/proposed-updates" % (Cnf["Dir::Root"]))
if line.find('install') != -1:
split_line = line.split()
if len(split_line) != 2:
- utils.fubar("Parse error (not exactly 2 elements): %s" % (line))
+ daklib.utils.fubar("Parse error (not exactly 2 elements): %s" % (line))
install_type = split_line[0]
if install_type not in [ "install", "install-u", "sync-install" ]:
- utils.fubar("Unknown install type ('%s') from: %s" % (install_type, line))
+ daklib.utils.fubar("Unknown install type ('%s') from: %s" % (install_type, line))
changes_filename = split_line[1]
if Options["debug"]:
print "Processing %s..." % (changes_filename)
suite = "stable"
stable = {}
components = Cnf.ValueList("Suite::%s::Components" % (suite))
- architectures = filter(utils.real_arch, Cnf.ValueList("Suite::%s::Architectures" % (suite)))
+ architectures = filter(daklib.utils.real_arch, Cnf.ValueList("Suite::%s::Architectures" % (suite)))
for component in components:
for architecture in architectures:
filename = "%s/dists/%s/%s/binary-%s/Packages" % (Cnf["Dir::Root"], suite, component, architecture)
- packages = utils.open_file(filename, 'r')
+ packages = daklib.utils.open_file(filename, 'r')
Packages = apt_pkg.ParseTagFile(packages)
while Packages.Step():
package = Packages.Section.Find('Package')
def main ():
global Cnf, projectB, Options
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('d', "debug", "Check-Proposed-Updates::Options::Debug"),
('q',"quiet","Check-Proposed-Updates::Options::Quiet"),
if Options["Help"]:
usage(0)
if not arguments:
- utils.fubar("need at least one package name as an argument.")
+ daklib.utils.fubar("need at least one package name as an argument.")
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
print "Parsing packages files...",
parse_packages()
elif file.endswith(".joey"):
check_joey(file)
else:
- utils.fubar("Unrecognised file type: '%s'." % (file))
+ daklib.utils.fubar("Unrecognised file type: '%s'." % (file))
#######################################################################################
import os, pg, re, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def check_changes (filename):
try:
- changes = utils.parse_changes(filename)
- files = utils.build_file_list(changes)
+ changes = daklib.utils.parse_changes(filename)
+ files = daklib.utils.build_file_list(changes)
except:
- utils.warn("Couldn't read changes file '%s'." % (filename))
+ daklib.utils.warn("Couldn't read changes file '%s'." % (filename))
return
num_files = len(files.keys())
for file in files.keys():
- if utils.re_isadeb.match(file):
+ if daklib.utils.re_isadeb.match(file):
m = re_isdeb.match(file)
pkg = m.group(1)
version = m.group(2)
if Options["debug"]:
print "BINARY: %s ==> %s_%s_%s" % (file, pkg, version, arch)
else:
- m = utils.re_issource.match(file)
+ m = daklib.utils.re_issource.match(file)
if m:
pkg = m.group(1)
version = m.group(2)
if Options["debug"]:
print "SOURCE: %s ==> %s_%s_%s" % (file, pkg, version, arch)
else:
- utils.fubar("unknown type, fix me")
+ daklib.utils.fubar("unknown type, fix me")
if not pu.has_key(pkg):
# FIXME
- utils.warn("%s doesn't seem to exist in p-u?? (from %s [%s])" % (pkg, file, filename))
+ daklib.utils.warn("%s doesn't seem to exist in p-u?? (from %s [%s])" % (pkg, file, filename))
continue
if not pu[pkg].has_key(arch):
# FIXME
- utils.warn("%s doesn't seem to exist for %s in p-u?? (from %s [%s])" % (pkg, arch, file, filename))
+ daklib.utils.warn("%s doesn't seem to exist for %s in p-u?? (from %s [%s])" % (pkg, arch, file, filename))
continue
- pu_version = utils.re_no_epoch.sub('', pu[pkg][arch])
+ pu_version = daklib.utils.re_no_epoch.sub('', pu[pkg][arch])
if pu_version == version:
if Options["verbose"]:
print "%s: ok" % (file)
if new_num_files == 0:
print "%s: no files left, superseded by %s" % (filename, pu_version)
dest = Cnf["Dir::Morgue"] + "/misc/"
- utils.move(filename, dest)
+ daklib.utils.move(filename, dest)
elif new_num_files < num_files:
print "%s: lost files, MWAAP." % (filename)
else:
################################################################################
def check_joey (filename):
- file = utils.open_file(filename)
+ file = daklib.utils.open_file(filename)
cwd = os.getcwd()
os.chdir("%s/dists/proposed-updates" % (Cnf["Dir::Root"]))
if line.find('install') != -1:
split_line = line.split()
if len(split_line) != 2:
- utils.fubar("Parse error (not exactly 2 elements): %s" % (line))
+ daklib.utils.fubar("Parse error (not exactly 2 elements): %s" % (line))
install_type = split_line[0]
if install_type not in [ "install", "install-u", "sync-install" ]:
- utils.fubar("Unknown install type ('%s') from: %s" % (install_type, line))
+ daklib.utils.fubar("Unknown install type ('%s') from: %s" % (install_type, line))
changes_filename = split_line[1]
if Options["debug"]:
print "Processing %s..." % (changes_filename)
def main ():
global Cnf, projectB, Options
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('d', "debug", "Clean-Proposed-Updates::Options::Debug"),
('v',"verbose","Clean-Proposed-Updates::Options::Verbose"),
if Options["Help"]:
usage(0)
if not arguments:
- utils.fubar("need at least one package name as an argument.")
+ daklib.utils.fubar("need at least one package name as an argument.")
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
init_pu()
elif file.endswith(".joey"):
check_joey(file)
else:
- utils.fubar("Unrecognised file type: '%s'." % (file))
+ daklib.utils.fubar("Unrecognised file type: '%s'." % (file))
#######################################################################################
import os, stat, sys, time
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
if not os.path.exists(del_dir):
os.makedirs(del_dir, 02775)
if not os.path.isdir(del_dir):
- utils.fubar("%s must be a directory." % (del_dir))
+ daklib.utils.fubar("%s must be a directory." % (del_dir))
# Move to the directory to clean
incoming = Options["Incoming"]
dest_filename = del_dir + '/' + os.path.basename(file)
# If the destination file exists; try to find another filename to use
if os.path.exists(dest_filename):
- dest_filename = utils.find_next_free(dest_filename, 10)
- utils.move(file, dest_filename, 0660)
+ dest_filename = daklib.utils.find_next_free(dest_filename, 10)
+ daklib.utils.move(file, dest_filename, 0660)
else:
- utils.warn("skipping '%s', permission denied." % (os.path.basename(file)))
+ daklib.utils.warn("skipping '%s', permission denied." % (os.path.basename(file)))
# Removes any old files.
# [Used for Incoming/REJECT]
# Proces all .changes and .dsc files.
for changes_filename in changes_files:
try:
- changes = utils.parse_changes(changes_filename)
- files = utils.build_file_list(changes)
+ changes = daklib.utils.parse_changes(changes_filename)
+ files = daklib.utils.build_file_list(changes)
except:
- utils.warn("error processing '%s'; skipping it. [Got %s]" % (changes_filename, sys.exc_type))
+ daklib.utils.warn("error processing '%s'; skipping it. [Got %s]" % (changes_filename, sys.exc_type))
continue
dsc_files = {}
for file in files.keys():
if file.endswith(".dsc"):
try:
- dsc = utils.parse_changes(file)
- dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
+ dsc = daklib.utils.parse_changes(file)
+ dsc_files = daklib.utils.build_file_list(dsc, is_a_dsc=1)
except:
- utils.warn("error processing '%s'; skipping it. [Got %s]" % (file, sys.exc_type))
+ daklib.utils.warn("error processing '%s'; skipping it. [Got %s]" % (file, sys.exc_type))
continue
# Ensure all the files we've seen aren't deleted
def main ():
global Cnf, Options
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
for i in ["Help", "Incoming", "No-Action", "Verbose" ]:
if not Cnf.has_key("Clean-Queues::Options::%s" % (i)):
import os, pg, stat, sys, time
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
for i in q.getresult():
filename = i[0] + i[1]
if not os.path.exists(filename):
- utils.warn("can not find '%s'." % (filename))
+ daklib.utils.warn("can not find '%s'." % (filename))
continue
if os.path.isfile(filename):
if os.path.islink(filename):
dest_filename = dest + '/' + os.path.basename(filename)
# If the destination file exists; try to find another filename to use
if os.path.exists(dest_filename):
- dest_filename = utils.find_next_free(dest_filename)
+ dest_filename = daklib.utils.find_next_free(dest_filename)
if Options["No-Action"]:
print "Cleaning %s -> %s ..." % (filename, dest_filename)
else:
- utils.move(filename, dest_filename)
+ daklib.utils.move(filename, dest_filename)
else:
- utils.fubar("%s is neither symlink nor file?!" % (filename))
+ daklib.utils.fubar("%s is neither symlink nor file?!" % (filename))
# Delete from the 'files' table
if not Options["No-Action"]:
projectB.query("DELETE FROM files WHERE last_used <= '%s'" % (delete_date))
sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before)))
if count > 0:
- sys.stderr.write("Cleaned %d files, %s.\n" % (count, utils.size_type(size)))
+ sys.stderr.write("Cleaned %d files, %s.\n" % (count, daklib.utils.size_type(size)))
################################################################################
for i in q.getresult():
filename = i[0]
if not os.path.exists(filename):
- utils.warn("%s (from queue_build) doesn't exist." % (filename))
+ daklib.utils.warn("%s (from queue_build) doesn't exist." % (filename))
continue
if not Cnf.FindB("Dinstall::SecurityQueueBuild") and not os.path.islink(filename):
- utils.fubar("%s (from queue_build) should be a symlink but isn't." % (filename))
+ daklib.utils.fubar("%s (from queue_build) should be a symlink but isn't." % (filename))
os.unlink(filename)
count += 1
projectB.query("DELETE FROM queue_build WHERE last_used <= '%s'" % (our_delete_date))
def main():
global Cnf, Options, projectB, delete_date, now_date
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
for i in ["Help", "No-Action" ]:
if not Cnf.has_key("Clean-Suites::Options::%s" % (i)):
Cnf["Clean-Suites::Options::%s" % (i)] = ""
import pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Compare-Suites::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Compare-Suites::Options::%s" % (i)):
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
src_suite = "stable"
dst_suite = "unstable"
- src_suite_id = database.get_suite_id(src_suite)
- dst_suite_id = database.get_suite_id(dst_suite)
- arch_all_id = database.get_architecture_id("all")
- dsc_type_id = database.get_override_type_id("dsc")
+ src_suite_id = daklib.database.get_suite_id(src_suite)
+ dst_suite_id = daklib.database.get_suite_id(dst_suite)
+ arch_all_id = daklib.database.get_architecture_id("all")
+ dsc_type_id = daklib.database.get_override_type_id("dsc")
for arch in Cnf.ValueList("Suite::%s::Architectures" % (src_suite)):
if arch == "source":
if arch == "all":
continue
- arch_id = database.get_architecture_id(arch)
+ arch_id = daklib.database.get_architecture_id(arch)
q = projectB.query("""
SELECT b_src.package, b_src.version, a.arch_string
FROM binaries b_src, bin_associations ba, override o, architecture a
################################################################################
def process_file (file, suite, component, type, action):
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
if suite_id == -1:
- utils.fubar("Suite '%s' not recognised." % (suite))
+ daklib.utils.fubar("Suite '%s' not recognised." % (suite))
- component_id = database.get_component_id(component)
+ component_id = daklib.database.get_component_id(component)
if component_id == -1:
- utils.fubar("Component '%s' not recognised." % (component))
+ daklib.utils.fubar("Component '%s' not recognised." % (component))
- type_id = database.get_override_type_id(type)
+ type_id = daklib.database.get_override_type_id(type)
if type_id == -1:
- utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc.)" % (type))
+ daklib.utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc.)" % (type))
# --set is done mostly internal for performance reasons; most
# invocations of --set will be updates and making people wait 2-3
start_time = time.time()
projectB.query("BEGIN WORK")
for line in file.readlines():
- line = utils.re_comments.sub('', line).strip()
+ line = daklib.utils.re_comments.sub('', line).strip()
if line == "":
continue
elif len(split_line) == 3:
(package, section, maintainer_override) = split_line
else:
- utils.warn("'%s' does not break into 'package section [maintainer-override]'." % (line))
+ daklib.utils.warn("'%s' does not break into 'package section [maintainer-override]'." % (line))
c_error += 1
continue
priority = "source"
elif len(split_line) == 4:
(package, priority, section, maintainer_override) = split_line
else:
- utils.warn("'%s' does not break into 'package priority section [maintainer-override]'." % (line))
+ daklib.utils.warn("'%s' does not break into 'package priority section [maintainer-override]'." % (line))
c_error += 1
continue
- section_id = database.get_section_id(section)
+ section_id = daklib.database.get_section_id(section)
if section_id == -1:
- utils.warn("'%s' is not a valid section. ['%s' in suite %s, component %s]." % (section, package, suite, component))
+ daklib.utils.warn("'%s' is not a valid section. ['%s' in suite %s, component %s]." % (section, package, suite, component))
c_error += 1
continue
- priority_id = database.get_priority_id(priority)
+ priority_id = daklib.database.get_priority_id(priority)
if priority_id == -1:
- utils.warn("'%s' is not a valid priority. ['%s' in suite %s, component %s]." % (priority, package, suite, component))
+ daklib.utils.warn("'%s' is not a valid priority. ['%s' in suite %s, component %s]." % (priority, package, suite, component))
c_error += 1
continue
if new.has_key(package):
- utils.warn("Can't insert duplicate entry for '%s'; ignoring all but the first. [suite %s, component %s]" % (package, suite, component))
+ daklib.utils.warn("Can't insert duplicate entry for '%s'; ignoring all but the first. [suite %s, component %s]" % (package, suite, component))
c_error += 1
continue
new[package] = ""
################################################################################
def list(suite, component, type):
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
if suite_id == -1:
- utils.fubar("Suite '%s' not recognised." % (suite))
+ daklib.utils.fubar("Suite '%s' not recognised." % (suite))
- component_id = database.get_component_id(component)
+ component_id = daklib.database.get_component_id(component)
if component_id == -1:
- utils.fubar("Component '%s' not recognised." % (component))
+ daklib.utils.fubar("Component '%s' not recognised." % (component))
- type_id = database.get_override_type_id(type)
+ type_id = daklib.database.get_override_type_id(type)
if type_id == -1:
- utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (type))
+ daklib.utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (type))
if type == "dsc":
q = projectB.query("SELECT o.package, s.section, o.maintainer FROM override o, section s WHERE o.suite = %s AND o.component = %s AND o.type = %s AND o.section = s.id ORDER BY s.section, o.package" % (suite_id, component_id, type_id))
for i in q.getresult():
- print utils.result_join(i)
+ print daklib.utils.result_join(i)
else:
q = projectB.query("SELECT o.package, p.priority, s.section, o.maintainer, p.level FROM override o, priority p, section s WHERE o.suite = %s AND o.component = %s AND o.type = %s AND o.priority = p.id AND o.section = s.id ORDER BY s.section, p.level, o.package" % (suite_id, component_id, type_id))
for i in q.getresult():
- print utils.result_join(i[:-1])
+ print daklib.utils.result_join(i[:-1])
################################################################################
def main ():
global Cnf, projectB, Logger
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a', "add", "Control-Overrides::Options::Add"),
('c', "component", "Control-Overrides::Options::Component", "HasArg"),
('h', "help", "Control-Overrides::Options::Help"),
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
action = None
for i in [ "add", "list", "set" ]:
if Cnf["Control-Overrides::Options::%s" % (i)]:
if action:
- utils.fubar("Can not perform more than one action at once.")
+ daklib.utils.fubar("Can not perform more than one action at once.")
action = i
(suite, component, type) = (Cnf["Control-Overrides::Options::Suite"],
if action == "list":
list(suite, component, type)
else:
- Logger = logging.Logger(Cnf, "control-overrides")
+ Logger = daklib.logging.Logger(Cnf, "control-overrides")
if file_list:
for file in file_list:
- process_file(utils.open_file(file), suite, component, type, action)
+ process_file(daklib.utils.open_file(file), suite, component, type, action)
else:
process_file(sys.stdin, suite, component, type, action)
Logger.close()
import pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.utils
#######################################################################################
ql = q.getresult()
if not ql:
- utils.warn("Couldn't find '%s~%s~%s'." % (package, version, architecture))
+ daklib.utils.warn("Couldn't find '%s~%s~%s'." % (package, version, architecture))
return None
if len(ql) > 1:
- utils.warn("Found more than one match for '%s~%s~%s'." % (package, version, architecture))
+ daklib.utils.warn("Found more than one match for '%s~%s~%s'." % (package, version, architecture))
return None
id = ql[0][0]
return id
for line in lines:
split_line = line.strip().split()
if len(split_line) != 3:
- utils.warn("'%s' does not break into 'package version architecture'." % (line[:-1]))
+ daklib.utils.warn("'%s' does not break into 'package version architecture'." % (line[:-1]))
continue
key = " ".join(split_line)
desired[key] = ""
def process_file (file, suite, action):
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
if action == "set":
set_suite (file, suite_id)
for line in lines:
split_line = line.strip().split()
if len(split_line) != 3:
- utils.warn("'%s' does not break into 'package version architecture'." % (line[:-1]))
+ daklib.utils.warn("'%s' does not break into 'package version architecture'." % (line[:-1]))
continue
(package, version, architecture) = split_line
# Take action
if action == "add":
if assoication_id:
- utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("INSERT INTO src_associations (suite, source) VALUES (%s, %s)" % (suite_id, id))
elif action == "remove":
if assoication_id == None:
- utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("DELETE FROM src_associations WHERE id = %s" % (assoication_id))
# Take action
if action == "add":
if assoication_id:
- utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s~%s~%s' already exists in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%s, %s)" % (suite_id, id))
elif action == "remove":
if assoication_id == None:
- utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite))
+ daklib.utils.warn("'%s~%s~%s' doesn't exist in suite %s." % (package, version, architecture, suite))
continue
else:
q = projectB.query("DELETE FROM bin_associations WHERE id = %s" % (assoication_id))
#######################################################################################
def get_list (suite):
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
# List binaries
q = projectB.query("SELECT b.package, b.version, a.arch_string FROM binaries b, bin_associations ba, architecture a WHERE ba.suite = %s AND ba.bin = b.id AND b.architecture = a.id" % (suite_id))
ql = q.getresult()
def main ():
global Cnf, projectB, Logger
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a',"add","Control-Suite::Options::Add", "HasArg"),
('h',"help","Control-Suite::Options::Help"),
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"],int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
action = None
for i in ("add", "list", "remove", "set"):
if Cnf["Control-Suite::Options::%s" % (i)] != "":
suite = Cnf["Control-Suite::Options::%s" % (i)]
- if database.get_suite_id(suite) == -1:
- utils.fubar("Unknown suite '%s'." %(suite))
+ if daklib.database.get_suite_id(suite) == -1:
+ daklib.utils.fubar("Unknown suite '%s'." %(suite))
else:
if action:
- utils.fubar("Can only perform one action at a time.")
+ daklib.utils.fubar("Can only perform one action at a time.")
action = i
# Need an action...
if action == None:
- utils.fubar("No action specified.")
+ daklib.utils.fubar("No action specified.")
# Safety/Sanity check
if action == "set" and suite != "testing":
- utils.fubar("Will not reset a suite other than testing.")
+ daklib.utils.fubar("Will not reset a suite other than testing.")
if action == "list":
get_list(suite)
else:
- Logger = logging.Logger(Cnf, "control-suite")
+ Logger = daklib.logging.Logger(Cnf, "control-suite")
if file_list:
for file in file_list:
- process_file(utils.open_file(file), suite, action)
+ process_file(daklib.utils.open_file(file), suite, action)
else:
process_file(sys.stdin, suite, action)
Logger.close()
import commands, pg, os, string, sys, time
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
################################################################################
def do_nviu():
- experimental_id = database.get_suite_id("experimental")
+ experimental_id = daklib.database.get_suite_id("experimental")
if experimental_id == -1:
return
# Check for packages in experimental obsoleted by versions in unstable
WHERE sa.suite = %s AND sa2.suite = %d AND sa.source = s.id
AND sa2.source = s2.id AND s.source = s2.source
AND versioncmp(s.version, s2.version) < 0""" % (experimental_id,
- database.get_suite_id("unstable")))
+ daklib.database.get_suite_id("unstable")))
ql = q.getresult()
if ql:
nviu_to_remove = []
def main ():
global Cnf, projectB, suite_id, source_binaries, source_versions
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Cruft-Report::Options::Help"),
('m',"mode","Cruft-Report::Options::Mode", "HasArg"),
elif Options["Mode"] == "full":
checks = [ "nbs", "nviu", "obsolete source", "dubious nbs", "bnb", "bms", "anais" ]
else:
- utils.warn("%s is not a recognised mode - only 'full' or 'daily' are understood." % (Options["Mode"]))
+ daklib.utils.warn("%s is not a recognised mode - only 'full' or 'daily' are understood." % (Options["Mode"]))
usage(1)
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
bin_pkgs = {}
src_pkgs = {}
duplicate_bins = {}
suite = Options["Suite"]
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
bin_not_built = {}
for component in components:
filename = "%s/dists/%s/%s/source/Sources.gz" % (Cnf["Dir::Root"], suite, component)
# apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- sources = utils.open_file(temp_filename)
+ sources = daklib.utils.open_file(temp_filename)
Sources = apt_pkg.ParseTagFile(sources)
while Sources.Step():
source = Sources.Section.Find('Package')
# Checks based on the Packages files
for component in components + ['main/debian-installer']:
- architectures = filter(utils.real_arch, Cnf.ValueList("Suite::%s::Architectures" % (suite)))
+ architectures = filter(daklib.utils.real_arch, Cnf.ValueList("Suite::%s::Architectures" % (suite)))
for architecture in architectures:
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suite, component, architecture)
# apt_pkg.ParseTagFile needs a real file handle
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- packages = utils.open_file(temp_filename)
+ packages = daklib.utils.open_file(temp_filename)
Packages = apt_pkg.ParseTagFile(packages)
while Packages.Step():
package = Packages.Section.Find('Package')
bin2source[package]["version"] = version
bin2source[package]["source"] = source
if source.find("(") != -1:
- m = utils.re_extract_src_version.match(source)
+ m = daklib.utils.re_extract_src_version.match(source)
source = m.group(1)
version = m.group(2)
if not bin_pkgs.has_key(package):
import sys
import apt_pkg
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.queue
+import daklib.utils
################################################################################
################################################################################
def main():
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Decode-Dot-Dak::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Decode-Dot-Dak::Options::%s" % (i)):
if Options["Help"]:
usage()
- k = queue.Upload(Cnf)
+ k = daklib.queue.Upload(Cnf)
for arg in sys.argv[1:]:
- arg = utils.validate_changes_file_arg(arg,require_changes=-1)
+ arg = daklib.utils.validate_changes_file_arg(arg,require_changes=-1)
k.pkg.changes_file = arg
print "%s:" % (arg)
k.init_vars()
del changes[i]
print
if changes:
- utils.warn("changes still has following unrecognised keys: %s" % (changes.keys()))
+ daklib.utils.warn("changes still has following unrecognised keys: %s" % (changes.keys()))
dsc = k.pkg.dsc
print " Dsc:"
del dsc[i]
print
if dsc:
- utils.warn("dsc still has following unrecognised keys: %s" % (dsc.keys()))
+ daklib.utils.warn("dsc still has following unrecognised keys: %s" % (dsc.keys()))
files = k.pkg.files
print " Files:"
print " %s: %s" % (i.capitalize(), files[file][i])
del files[file][i]
if files[file]:
- utils.warn("files[%s] still has following unrecognised keys: %s" % (file, files[file].keys()))
+ daklib.utils.warn("files[%s] still has following unrecognised keys: %s" % (file, files[file].keys()))
print
dsc_files = k.pkg.dsc_files
print " %s: %s" % (i.capitalize(), dsc_files[file][i])
del dsc_files[file][i]
if dsc_files[file]:
- utils.warn("dsc_files[%s] still has following unrecognised keys: %s" % (file, dsc_files[file].keys()))
+ daklib.utils.warn("dsc_files[%s] still has following unrecognised keys: %s" % (file, dsc_files[file].keys()))
################################################################################
Cnf = None
projectB = None
-Cnf = utils.get_conf()
+Cnf = daklib.utils.get_conf()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
-database.init(Cnf, projectB)
+daklib.database.init(Cnf, projectB)
################################################################################
maintainer = ''
arch = ''
- deb_file = utils.open_file(filename)
+ deb_file = daklib.utils.open_file(filename)
try:
extracts = apt_inst.debExtractControl(deb_file)
control = apt_pkg.ParseSection(extracts)
def read_dsc (dsc_filename):
dsc = {}
- dsc_file = utils.open_file(dsc_filename)
+ dsc_file = daklib.utils.open_file(dsc_filename)
try:
- dsc = utils.parse_changes(dsc_filename)
+ dsc = daklib.utils.parse_changes(dsc_filename)
except:
print "can't parse control info"
dsc_file.close()
# Read a file, strip the signature and return the modified contents as
# a string.
def strip_pgp_signature (filename):
- file = utils.open_file (filename)
+ file = daklib.utils.open_file (filename)
contents = ""
inside_signature = 0
skip_next = 0
def check_changes (changes_filename):
display_changes(changes_filename)
- changes = utils.parse_changes (changes_filename)
- files = utils.build_file_list(changes)
+ changes = daklib.utils.parse_changes (changes_filename)
+ files = daklib.utils.build_file_list(changes)
for file in files.keys():
if file.endswith(".deb") or file.endswith(".udeb"):
check_deb(file)
def main ():
global Cnf, projectB, db_files, waste, excluded
-# Cnf = utils.get_conf()
+# Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Examine-Package::Options::Help")]
for i in [ "help" ]:
elif file.endswith(".dsc"):
check_dsc(file)
else:
- utils.fubar("Unrecognised file type: '%s'." % (file))
+ daklib.utils.fubar("Unrecognised file type: '%s'." % (file))
finally:
# Reset stdout here so future less invocations aren't FUBAR
less_fd.close()
sys.stdout = stdout_fd
except IOError, e:
if errno.errorcode[e.errno] == 'EPIPE':
- utils.warn("[examine-package] Caught EPIPE; skipping.")
+ daklib.utils.warn("[examine-package] Caught EPIPE; skipping.")
pass
else:
raise
except KeyboardInterrupt:
- utils.warn("[examine-package] Caught C-c; skipping.")
+ daklib.utils.warn("[examine-package] Caught C-c; skipping.")
pass
#######################################################################################
import ldap, pg, sys, time
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
def main():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Find-Null-Maintainers::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Find-Null-Maintainers::Options::%s" % (i)):
import sys, os, tempfile
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
os.umask(0002)
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [ ('h', "help", "Generate-Index-Diffs::Options::Help"),
('c', None, "Generate-Index-Diffs::Options::CanonicalPath", "hasArg"),
('p', "patchname", "Generate-Index-Diffs::Options::PatchName", "hasArg"),
o.close()
AptCnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(AptCnf,utils.which_apt_conf_file())
+ apt_pkg.ReadConfigFileISC(AptCnf,daklib.utils.which_apt_conf_file())
if Options.has_key("RootDir"): Cnf["Dir::Root"] = Options["RootDir"]
elif AptCnf.has_key("bindirectory::%s" % (tree)):
sections = AptCnf["bindirectory::%s::Sections" % (tree)].split()
else:
- aptcnf_filename = os.path.basename(utils.which_apt_conf_file())
+ aptcnf_filename = os.path.basename(daklib.utils.which_apt_conf_file())
print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
continue
import sys, os, popen2, tempfile, stat, time
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
(cat, path, name, ext))
else:
size = os.stat(path + name)[stat.ST_SIZE]
- file_handle = utils.open_file(path + name)
- except utils.cant_open_exc:
+ file_handle = daklib.utils.open_file(path + name)
+ except daklib.utils.cant_open_exc:
print "ALERT: Couldn't open " + path + name
else:
hash = hashop(file_handle)
global Cnf, AptCnf, projectB, out
out = sys.stdout
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Generate-Releases::Options::Help")]
for i in [ "help" ]:
usage()
AptCnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(AptCnf,utils.which_apt_conf_file())
+ apt_pkg.ReadConfigFileISC(AptCnf,daklib.utils.which_apt_conf_file())
if not suites:
suites = Cnf.SubTree("Suite").List()
elif AptCnf.has_key("bindirectory::%s" % (tree)):
pass
else:
- aptcnf_filename = os.path.basename(utils.which_apt_conf_file())
+ aptcnf_filename = os.path.basename(daklib.utils.which_apt_conf_file())
print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
continue
out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
if notautomatic != "":
out.write("NotAutomatic: %s\n" % (notautomatic))
- out.write("Architectures: %s\n" % (" ".join(filter(utils.real_arch, SuiteBlock.ValueList("Architectures")))))
+ out.write("Architectures: %s\n" % (" ".join(filter(daklib.utils.real_arch, SuiteBlock.ValueList("Architectures")))))
if components:
out.write("Components: %s\n" % (" ".join(components)))
release = open(relpath, "w")
#release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
except IOError:
- utils.fubar("Couldn't write to " + relpath)
+ daklib.utils.fubar("Couldn't write to " + relpath)
release.write("Archive: %s\n" % (suite))
if version != "":
import commands, os, pg, re, sys, time
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
###############################################################################
###############################################################################
def check_signature (filename):
- if not utils.re_taint_free.match(os.path.basename(filename)):
+ if not daklib.utils.re_taint_free.match(os.path.basename(filename)):
reject("!!WARNING!! tainted filename: '%s'." % (filename))
return None
status_read, status_write = os.pipe()
cmd = "gpgv --status-fd %s --keyring %s --keyring %s %s" \
% (status_write, Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"], filename)
- (output, status, exit_status) = utils.gpgv_get_status_output(cmd, status_read, status_write)
+ (output, status, exit_status) = daklib.utils.gpgv_get_status_output(cmd, status_read, status_write)
# Process the status-fd output
keywords = {}
# Now check for obviously bad things in the processed output
if keywords.has_key("SIGEXPIRED"):
- utils.warn("%s: signing key has expired." % (filename))
+ daklib.utils.warn("%s: signing key has expired." % (filename))
if keywords.has_key("KEYREVOKED"):
reject("key used to sign %s has been revoked." % (filename))
bad = 1
reject("ascii armour of signature was corrupt in %s." % (filename))
bad = 1
if keywords.has_key("NODATA"):
- utils.warn("no signature found for %s." % (filename))
+ daklib.utils.warn("no signature found for %s." % (filename))
return "NOSIG"
#reject("no signature found in %s." % (filename))
#bad = 1
if exit_status and not keywords.has_key("NO_PUBKEY"):
reject("gpgv failed while checking %s." % (filename))
if status.strip():
- reject(utils.prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
+ reject(daklib.utils.prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
else:
- reject(utils.prefix_multi_line_string(output, " [GPG output:] "), "")
+ reject(daklib.utils.prefix_multi_line_string(output, " [GPG output:] "), "")
return None
# Sanity check the good stuff we expect
bad = 1
# Finally ensure there's not something we don't recognise
- known_keywords = utils.Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
+ known_keywords = daklib.utils.Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
NODATA="")
projectB.query("DELETE FROM location")
for location in Cnf.SubTree("Location").List():
SubSec = Cnf.SubTree("Location::%s" % (location))
- archive_id = database.get_archive_id(SubSec["archive"])
+ archive_id = daklib.database.get_archive_id(SubSec["archive"])
type = SubSec.Find("type")
if type == "legacy-mixed":
projectB.query("INSERT INTO location (path, archive, type) VALUES ('%s', %d, '%s')" % (location, archive_id, SubSec["type"]))
else:
for component in Cnf.SubTree("Component").List():
- component_id = database.get_component_id(component)
+ component_id = daklib.database.get_component_id(component)
projectB.query("INSERT INTO location (path, component, archive, type) VALUES ('%s', %d, %d, '%s')" %
(location, component_id, archive_id, SubSec["type"]))
if SubSec.has_key(i):
projectB.query("UPDATE suite SET %s = '%s' WHERE suite_name = '%s'" % (i.lower(), SubSec[i], suite.lower()))
for architecture in Cnf.ValueList("Suite::%s::Architectures" % (suite)):
- architecture_id = database.get_architecture_id (architecture)
+ architecture_id = daklib.database.get_architecture_id (architecture)
projectB.query("INSERT INTO suite_architectures (suite, architecture) VALUES (currval('suite_id_seq'), %d)" % (architecture_id))
def update_override_type():
try:
path = q.getresult()[0][0]
except:
- utils.fubar("[import-archive] get_location_path(): Couldn't get path for %s" % (directory))
+ daklib.utils.fubar("[import-archive] get_location_path(): Couldn't get path for %s" % (directory))
location_path_cache[directory] = path
return path
global source_cache, source_query_cache, src_associations_query_cache, dsc_files_query_cache, source_id_serial, src_associations_id_serial, dsc_files_id_serial, source_cache_for_binaries, orig_tar_gz_cache, reject_message
suite = suite.lower()
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
try:
- file = utils.open_file (filename)
- except utils.cant_open_exc:
- utils.warn("can't open '%s'" % (filename))
+ file = daklib.utils.open_file (filename)
+ except daklib.utils.cant_open_exc:
+ daklib.utils.warn("can't open '%s'" % (filename))
return
Scanner = apt_pkg.ParseTagFile(file)
while Scanner.Step() != 0:
package = Scanner.Section["package"]
version = Scanner.Section["version"]
directory = Scanner.Section["directory"]
- dsc_file = os.path.join(Cnf["Dir::Root"], directory, "%s_%s.dsc" % (package, utils.re_no_epoch.sub('', version)))
+ dsc_file = os.path.join(Cnf["Dir::Root"], directory, "%s_%s.dsc" % (package, daklib.utils.re_no_epoch.sub('', version)))
# Sometimes the Directory path is a lie; check in the pool
if not os.path.exists(dsc_file):
if directory.split('/')[0] == "dists":
- directory = Cnf["Dir::PoolRoot"] + utils.poolify(package, component)
- dsc_file = os.path.join(Cnf["Dir::Root"], directory, "%s_%s.dsc" % (package, utils.re_no_epoch.sub('', version)))
+ directory = Cnf["Dir::PoolRoot"] + daklib.utils.poolify(package, component)
+ dsc_file = os.path.join(Cnf["Dir::Root"], directory, "%s_%s.dsc" % (package, daklib.utils.re_no_epoch.sub('', version)))
if not os.path.exists(dsc_file):
- utils.fubar("%s not found." % (dsc_file))
+ daklib.utils.fubar("%s not found." % (dsc_file))
install_date = time.strftime("%Y-%m-%d", time.localtime(os.path.getmtime(dsc_file)))
fingerprint = check_signature(dsc_file)
- fingerprint_id = database.get_or_set_fingerprint_id(fingerprint)
+ fingerprint_id = daklib.database.get_or_set_fingerprint_id(fingerprint)
if reject_message:
- utils.fubar("%s: %s" % (dsc_file, reject_message))
+ daklib.utils.fubar("%s: %s" % (dsc_file, reject_message))
maintainer = Scanner.Section["maintainer"]
maintainer = maintainer.replace("'", "\\'")
- maintainer_id = database.get_or_set_maintainer_id(maintainer)
+ maintainer_id = daklib.database.get_or_set_maintainer_id(maintainer)
location = get_location_path(directory.split('/')[0])
- location_id = database.get_location_id (location, component, archive)
+ location_id = daklib.database.get_location_id (location, component, archive)
if not directory.endswith("/"):
directory += '/'
directory = poolify (directory, location)
if directory != "" and not directory.endswith("/"):
directory += '/'
- no_epoch_version = utils.re_no_epoch.sub('', version)
+ no_epoch_version = daklib.utils.re_no_epoch.sub('', version)
# Add all files referenced by the .dsc to the files table
ids = []
for line in Scanner.Section["files"].split('\n'):
count_total = 0
count_bad = 0
suite = suite.lower()
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
try:
- file = utils.open_file (filename)
- except utils.cant_open_exc:
- utils.warn("can't open '%s'" % (filename))
+ file = daklib.utils.open_file (filename)
+ except daklib.utils.cant_open_exc:
+ daklib.utils.warn("can't open '%s'" % (filename))
return
Scanner = apt_pkg.ParseTagFile(file)
while Scanner.Step() != 0:
version = Scanner.Section["version"]
maintainer = Scanner.Section["maintainer"]
maintainer = maintainer.replace("'", "\\'")
- maintainer_id = database.get_or_set_maintainer_id(maintainer)
+ maintainer_id = daklib.database.get_or_set_maintainer_id(maintainer)
architecture = Scanner.Section["architecture"]
- architecture_id = database.get_architecture_id (architecture)
+ architecture_id = daklib.database.get_architecture_id (architecture)
fingerprint = "NOSIG"
- fingerprint_id = database.get_or_set_fingerprint_id(fingerprint)
+ fingerprint_id = daklib.database.get_or_set_fingerprint_id(fingerprint)
if not Scanner.Section.has_key("source"):
source = package
else:
source = Scanner.Section["source"]
source_version = ""
if source.find("(") != -1:
- m = utils.re_extract_src_version.match(source)
+ m = daklib.utils.re_extract_src_version.match(source)
source = m.group(1)
source_version = m.group(2)
if not source_version:
source_version = version
filename = Scanner.Section["filename"]
location = get_location_path(filename.split('/')[0])
- location_id = database.get_location_id (location, component, archive)
+ location_id = daklib.database.get_location_id (location, component, archive)
filename = poolify (filename, location)
if architecture == "all":
filename = re_arch_from_filename.sub("binary-all", filename)
###############################################################################
def do_sources(sources, suite, component, server):
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (sources, temp_filename))
if (result != 0):
- utils.fubar("Gunzip invocation failed!\n%s" % (output), result)
+ daklib.utils.fubar("Gunzip invocation failed!\n%s" % (output), result)
print 'Processing '+sources+'...'
process_sources (temp_filename, suite, component, server)
os.unlink(temp_filename)
def do_da_do_da ():
global Cnf, projectB, query_cache, files_query_cache, source_query_cache, src_associations_query_cache, dsc_files_query_cache, bin_associations_query_cache, binaries_query_cache
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a', "action", "Import-Archive::Options::Action"),
('h', "help", "Import-Archive::Options::Help")]
for i in [ "action", "help" ]:
usage()
if not Options["Action"]:
- utils.warn("""no -a/--action given; not doing anything.
+ daklib.utils.warn("""no -a/--action given; not doing anything.
Please read the documentation before running this script.
""")
usage(1)
print "Re-Creating DB..."
(result, output) = commands.getstatusoutput("psql -f init_pool.sql template1")
if (result != 0):
- utils.fubar("psql invocation failed!\n", result)
+ daklib.utils.fubar("psql invocation failed!\n", result)
print output
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init (Cnf, projectB)
+ daklib.database.init (Cnf, projectB)
print "Adding static tables from conf file..."
projectB.query("BEGIN WORK")
update_section()
projectB.query("COMMIT WORK")
- files_query_cache = utils.open_file(Cnf["Import-Archive::ExportDir"]+"files","w")
- source_query_cache = utils.open_file(Cnf["Import-Archive::ExportDir"]+"source","w")
- src_associations_query_cache = utils.open_file(Cnf["Import-Archive::ExportDir"]+"src_associations","w")
- dsc_files_query_cache = utils.open_file(Cnf["Import-Archive::ExportDir"]+"dsc_files","w")
- binaries_query_cache = utils.open_file(Cnf["Import-Archive::ExportDir"]+"binaries","w")
- bin_associations_query_cache = utils.open_file(Cnf["Import-Archive::ExportDir"]+"bin_associations","w")
+ files_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"files","w")
+ source_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"source","w")
+ src_associations_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"src_associations","w")
+ dsc_files_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"dsc_files","w")
+ binaries_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"binaries","w")
+ bin_associations_query_cache = daklib.utils.open_file(Cnf["Import-Archive::ExportDir"]+"bin_associations","w")
projectB.query("BEGIN WORK")
# Process Sources files to popoulate `source' and friends
sources = Cnf["Dir::Root"] + "dists/" + Cnf["Suite::%s::CodeName" % (suite)] + '/' + component + '/source/' + 'Sources.gz'
do_sources(sources, suite, component, server)
else:
- utils.fubar("Unknown location type ('%s')." % (type))
+ daklib.utils.fubar("Unknown location type ('%s')." % (type))
# Process Packages files to populate `binaries' and friends
elif type == "legacy" or type == "pool":
for suite in Cnf.ValueList("Location::%s::Suites" % (location)):
for component in Cnf.SubTree("Component").List():
- architectures = filter(utils.real_arch,
+ architectures = filter(daklib.utils.real_arch,
Cnf.ValueList("Suite::%s::Architectures" % (suite)))
for architecture in architectures:
packages = Cnf["Dir::Root"] + "dists/" + Cnf["Suite::%s::CodeName" % (suite)] + '/' + component + '/binary-' + architecture + '/Packages'
(result, output) = commands.getstatusoutput("psql %s < add_constraints.sql" % (Cnf["DB::Name"]))
print output
if (result != 0):
- utils.fubar("psql invocation failed!\n%s" % (output), result)
+ daklib.utils.fubar("psql invocation failed!\n%s" % (output), result)
return
################################################################################
def main():
- utils.try_with_debug(do_da_do_da)
+ daklib.utils.try_with_debug(do_da_do_da)
################################################################################
import commands, ldap, pg, re, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def main():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Import-LDAP-Fingerprints::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Import-LDAP-Fingerprints::Options::%s" % (i)):
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
LDAPServer = Cnf["Import-LDAP-Fingerprints::LDAPServer"]
entry = i[1]
fingerprints = entry["keyFingerPrint"]
uid = entry["uid"][0]
- uid_id = database.get_or_set_uid_id(uid)
+ uid_id = daklib.database.get_or_set_uid_id(uid)
for fingerprint in fingerprints:
ldap_fin_uid_id[fingerprint] = (uid, uid_id)
if db_fin_uid.has_key(fingerprint):
print "Assigning %s to 0x%s." % (uid, fingerprint)
else:
if existing_uid != uid:
- utils.fubar("%s has %s in LDAP, but projectB says it should be %s." % (uid, fingerprint, existing_uid))
+ daklib.utils.fubar("%s has %s in LDAP, but projectB says it should be %s." % (uid, fingerprint, existing_uid))
# Try to update people who sign with non-primary key
q = projectB.query("SELECT fingerprint, id FROM fingerprint WHERE uid is null")
m = re_gpg_fingerprint.search(output)
if not m:
print output
- utils.fubar("0x%s: No fingerprint found in gpg output but it returned 0?\n%s" % (fingerprint, utils.prefix_multi_line_string(output, " [GPG output:] ")))
+ daklib.utils.fubar("0x%s: No fingerprint found in gpg output but it returned 0?\n%s" % (fingerprint, daklib.utils.prefix_multi_line_string(output, " [GPG output:] ")))
primary_key = m.group(1)
primary_key = primary_key.replace(" ","")
if not ldap_fin_uid_id.has_key(primary_key):
- utils.fubar("0x%s (from 0x%s): no UID found in LDAP" % (primary_key, fingerprint))
+ daklib.utils.fubar("0x%s (from 0x%s): no UID found in LDAP" % (primary_key, fingerprint))
(uid, uid_id) = ldap_fin_uid_id[primary_key]
q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
print "Assigning %s to 0x%s." % (uid, fingerprint)
# FIXME: default to the guessed ID
uid = None
while not uid:
- uid = utils.our_raw_input("Map to which UID ? ")
+ uid = daklib.utils.our_raw_input("Map to which UID ? ")
Attrs = l.search_s(LDAPDn,ldap.SCOPE_ONELEVEL,"(uid=%s)" % (uid), ["cn","mn","sn"])
if not Attrs:
print "That UID doesn't exist in LDAP!"
get_ldap_value(entry, "mn"),
get_ldap_value(entry, "sn")])
prompt = "Map to %s - %s (y/N) ? " % (uid, name.replace(" "," "))
- yn = utils.our_raw_input(prompt).lower()
+ yn = daklib.utils.our_raw_input(prompt).lower()
if yn == "y":
- uid_id = database.get_or_set_uid_id(uid)
+ uid_id = daklib.database.get_or_set_uid_id(uid)
projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
print "Assigning %s to 0x%s." % (uid, fingerprint)
else:
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('n', "no-action", "Import-Users-From-Passwd::Options::No-Action"),
('q', "quiet", "Import-Users-From-Passwd::Options::Quiet"),
if Options["Help"]:
usage()
elif arguments:
- utils.warn("dak import-users-from-passwd takes no non-option arguments.")
+ daklib.utils.warn("dak import-users-from-passwd takes no non-option arguments.")
usage(1)
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
import pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Init-DB::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Init-DB::Options::%s" % (i)):
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
# archive
projectB.query("DELETE FROM location")
for location in Cnf.SubTree("Location").List():
Location = Cnf.SubTree("Location::%s" % (location))
- archive_id = database.get_archive_id(Location["Archive"])
+ archive_id = daklib.database.get_archive_id(Location["Archive"])
type = Location.get("type")
if type == "legacy-mixed":
projectB.query("INSERT INTO location (path, archive, type) VALUES ('%s', %d, '%s')" % (location, archive_id, Location["type"]))
elif type == "legacy" or type == "pool":
for component in Cnf.SubTree("Component").List():
- component_id = database.get_component_id(component)
+ component_id = daklib.database.get_component_id(component)
projectB.query("INSERT INTO location (path, component, archive, type) VALUES ('%s', %d, %d, '%s')" %
(location, component_id, archive_id, type))
else:
- utils.fubar("E: type '%s' not recognised in location %s." % (type, location))
+ daklib.utils.fubar("E: type '%s' not recognised in location %s." % (type, location))
projectB.query("COMMIT WORK")
# suite
projectB.query("INSERT INTO suite (suite_name, version, origin, description) VALUES ('%s', %s, %s, %s)"
% (suite.lower(), version, origin, description))
for architecture in Cnf.ValueList("Suite::%s::Architectures" % (suite)):
- architecture_id = database.get_architecture_id (architecture)
+ architecture_id = daklib.database.get_architecture_id (architecture)
if architecture_id < 0:
- utils.fubar("architecture '%s' not found in architecture table for suite %s." % (architecture, suite))
+ daklib.utils.fubar("architecture '%s' not found in architecture table for suite %s." % (architecture, suite))
projectB.query("INSERT INTO suite_architectures (suite, architecture) VALUES (currval('suite_id_seq'), %d)" % (architecture_id))
projectB.query("COMMIT WORK")
import os, sys
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
def do_dir(target, config_name):
if os.path.exists(target):
if not os.path.isdir(target):
- utils.fubar("%s (%s) is not a directory." % (target, config_name))
+ daklib.utils.fubar("%s (%s) is not a directory." % (target, config_name))
else:
print "Creating %s ..." % (target)
os.makedirs(target)
def main ():
global AptCnf, Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Init-Dirs::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Init-Dirs::Options::%s" % (i)):
usage()
AptCnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(AptCnf,utils.which_apt_conf_file())
+ apt_pkg.ReadConfigFileISC(AptCnf,daklib.utils.which_apt_conf_file())
create_directories()
+++ /dev/null
-#!/usr/bin/env python
-
-# DB access fucntions
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-
-################################################################################
-
-import sys, time, types
-
-################################################################################
-
-Cnf = None
-projectB = None
-suite_id_cache = {}
-section_id_cache = {}
-priority_id_cache = {}
-override_type_id_cache = {}
-architecture_id_cache = {}
-archive_id_cache = {}
-component_id_cache = {}
-location_id_cache = {}
-maintainer_id_cache = {}
-source_id_cache = {}
-files_id_cache = {}
-maintainer_cache = {}
-fingerprint_id_cache = {}
-queue_id_cache = {}
-uid_id_cache = {}
-
-################################################################################
-
-def init (config, sql):
- global Cnf, projectB
-
- Cnf = config
- projectB = sql
-
-
-def do_query(q):
- sys.stderr.write("query: \"%s\" ... " % (q))
- before = time.time()
- r = projectB.query(q)
- time_diff = time.time()-before
- sys.stderr.write("took %.3f seconds.\n" % (time_diff))
- if type(r) is int:
- sys.stderr.write("int result: %s\n" % (r))
- elif type(r) is types.NoneType:
- sys.stderr.write("result: None\n")
- else:
- sys.stderr.write("pgresult: %s\n" % (r.getresult()))
- return r
-
-################################################################################
-
-def get_suite_id (suite):
- global suite_id_cache
-
- if suite_id_cache.has_key(suite):
- return suite_id_cache[suite]
-
- q = projectB.query("SELECT id FROM suite WHERE suite_name = '%s'" % (suite))
- ql = q.getresult()
- if not ql:
- return -1
-
- suite_id = ql[0][0]
- suite_id_cache[suite] = suite_id
-
- return suite_id
-
-def get_section_id (section):
- global section_id_cache
-
- if section_id_cache.has_key(section):
- return section_id_cache[section]
-
- q = projectB.query("SELECT id FROM section WHERE section = '%s'" % (section))
- ql = q.getresult()
- if not ql:
- return -1
-
- section_id = ql[0][0]
- section_id_cache[section] = section_id
-
- return section_id
-
-def get_priority_id (priority):
- global priority_id_cache
-
- if priority_id_cache.has_key(priority):
- return priority_id_cache[priority]
-
- q = projectB.query("SELECT id FROM priority WHERE priority = '%s'" % (priority))
- ql = q.getresult()
- if not ql:
- return -1
-
- priority_id = ql[0][0]
- priority_id_cache[priority] = priority_id
-
- return priority_id
-
-def get_override_type_id (type):
- global override_type_id_cache
-
- if override_type_id_cache.has_key(type):
- return override_type_id_cache[type]
-
- q = projectB.query("SELECT id FROM override_type WHERE type = '%s'" % (type))
- ql = q.getresult()
- if not ql:
- return -1
-
- override_type_id = ql[0][0]
- override_type_id_cache[type] = override_type_id
-
- return override_type_id
-
-def get_architecture_id (architecture):
- global architecture_id_cache
-
- if architecture_id_cache.has_key(architecture):
- return architecture_id_cache[architecture]
-
- q = projectB.query("SELECT id FROM architecture WHERE arch_string = '%s'" % (architecture))
- ql = q.getresult()
- if not ql:
- return -1
-
- architecture_id = ql[0][0]
- architecture_id_cache[architecture] = architecture_id
-
- return architecture_id
-
-def get_archive_id (archive):
- global archive_id_cache
-
- archive = archive.lower()
-
- if archive_id_cache.has_key(archive):
- return archive_id_cache[archive]
-
- q = projectB.query("SELECT id FROM archive WHERE lower(name) = '%s'" % (archive))
- ql = q.getresult()
- if not ql:
- return -1
-
- archive_id = ql[0][0]
- archive_id_cache[archive] = archive_id
-
- return archive_id
-
-def get_component_id (component):
- global component_id_cache
-
- component = component.lower()
-
- if component_id_cache.has_key(component):
- return component_id_cache[component]
-
- q = projectB.query("SELECT id FROM component WHERE lower(name) = '%s'" % (component))
- ql = q.getresult()
- if not ql:
- return -1
-
- component_id = ql[0][0]
- component_id_cache[component] = component_id
-
- return component_id
-
-def get_location_id (location, component, archive):
- global location_id_cache
-
- cache_key = location + '~' + component + '~' + location
- if location_id_cache.has_key(cache_key):
- return location_id_cache[cache_key]
-
- archive_id = get_archive_id (archive)
- if component != "":
- component_id = get_component_id (component)
- if component_id != -1:
- q = projectB.query("SELECT id FROM location WHERE path = '%s' AND component = %d AND archive = %d" % (location, component_id, archive_id))
- else:
- q = projectB.query("SELECT id FROM location WHERE path = '%s' AND archive = %d" % (location, archive_id))
- ql = q.getresult()
- if not ql:
- return -1
-
- location_id = ql[0][0]
- location_id_cache[cache_key] = location_id
-
- return location_id
-
-def get_source_id (source, version):
- global source_id_cache
-
- cache_key = source + '~' + version + '~'
- if source_id_cache.has_key(cache_key):
- return source_id_cache[cache_key]
-
- q = projectB.query("SELECT id FROM source s WHERE s.source = '%s' AND s.version = '%s'" % (source, version))
-
- if not q.getresult():
- return None
-
- source_id = q.getresult()[0][0]
- source_id_cache[cache_key] = source_id
-
- return source_id
-
-################################################################################
-
-def get_or_set_maintainer_id (maintainer):
- global maintainer_id_cache
-
- if maintainer_id_cache.has_key(maintainer):
- return maintainer_id_cache[maintainer]
-
- q = projectB.query("SELECT id FROM maintainer WHERE name = '%s'" % (maintainer))
- if not q.getresult():
- projectB.query("INSERT INTO maintainer (name) VALUES ('%s')" % (maintainer))
- q = projectB.query("SELECT id FROM maintainer WHERE name = '%s'" % (maintainer))
- maintainer_id = q.getresult()[0][0]
- maintainer_id_cache[maintainer] = maintainer_id
-
- return maintainer_id
-
-################################################################################
-
-def get_or_set_uid_id (uid):
- global uid_id_cache
-
- if uid_id_cache.has_key(uid):
- return uid_id_cache[uid]
-
- q = projectB.query("SELECT id FROM uid WHERE uid = '%s'" % (uid))
- if not q.getresult():
- projectB.query("INSERT INTO uid (uid) VALUES ('%s')" % (uid))
- q = projectB.query("SELECT id FROM uid WHERE uid = '%s'" % (uid))
- uid_id = q.getresult()[0][0]
- uid_id_cache[uid] = uid_id
-
- return uid_id
-
-################################################################################
-
-def get_or_set_fingerprint_id (fingerprint):
- global fingerprint_id_cache
-
- if fingerprint_id_cache.has_key(fingerprint):
- return fingerprint_id_cache[fingerprint]
-
- q = projectB.query("SELECT id FROM fingerprint WHERE fingerprint = '%s'" % (fingerprint))
- if not q.getresult():
- projectB.query("INSERT INTO fingerprint (fingerprint) VALUES ('%s')" % (fingerprint))
- q = projectB.query("SELECT id FROM fingerprint WHERE fingerprint = '%s'" % (fingerprint))
- fingerprint_id = q.getresult()[0][0]
- fingerprint_id_cache[fingerprint] = fingerprint_id
-
- return fingerprint_id
-
-################################################################################
-
-def get_files_id (filename, size, md5sum, location_id):
- global files_id_cache
-
- cache_key = "%s~%d" % (filename, location_id)
-
- if files_id_cache.has_key(cache_key):
- return files_id_cache[cache_key]
-
- size = int(size)
- q = projectB.query("SELECT id, size, md5sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
- ql = q.getresult()
- if ql:
- if len(ql) != 1:
- return -1
- ql = ql[0]
- orig_size = int(ql[1])
- orig_md5sum = ql[2]
- if orig_size != size or orig_md5sum != md5sum:
- return -2
- files_id_cache[cache_key] = ql[0]
- return files_id_cache[cache_key]
- else:
- return None
-
-################################################################################
-
-def get_or_set_queue_id (queue):
- global queue_id_cache
-
- if queue_id_cache.has_key(queue):
- return queue_id_cache[queue]
-
- q = projectB.query("SELECT id FROM queue WHERE queue_name = '%s'" % (queue))
- if not q.getresult():
- projectB.query("INSERT INTO queue (queue_name) VALUES ('%s')" % (queue))
- q = projectB.query("SELECT id FROM queue WHERE queue_name = '%s'" % (queue))
- queue_id = q.getresult()[0][0]
- queue_id_cache[queue] = queue_id
-
- return queue_id
-
-################################################################################
-
-def set_files_id (filename, size, md5sum, location_id):
- global files_id_cache
-
- projectB.query("INSERT INTO files (filename, size, md5sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, location_id))
-
- return get_files_id (filename, size, md5sum, location_id)
-
- ### currval has issues with postgresql 7.1.3 when the table is big
- ### it was taking ~3 seconds to return on auric which is very Not
- ### Cool(tm).
- ##
- ##q = projectB.query("SELECT id FROM files WHERE id = currval('files_id_seq')")
- ##ql = q.getresult()[0]
- ##cache_key = "%s~%d" % (filename, location_id)
- ##files_id_cache[cache_key] = ql[0]
- ##return files_id_cache[cache_key]
-
-################################################################################
-
-def get_maintainer (maintainer_id):
- global maintainer_cache
-
- if not maintainer_cache.has_key(maintainer_id):
- q = projectB.query("SELECT name FROM maintainer WHERE id = %s" % (maintainer_id))
- maintainer_cache[maintainer_id] = q.getresult()[0][0]
-
- return maintainer_cache[maintainer_id]
-
-################################################################################
+++ /dev/null
-#!/usr/bin/env python
-
-# Logging functions
-# Copyright (C) 2001, 2002 James Troup <james@nocrew.org>
-# $Id: logging.py,v 1.4 2005-11-15 09:50:32 ajt Exp $
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-
-################################################################################
-
-import os, pwd, time, sys
-import utils
-
-################################################################################
-
-class Logger:
- "Logger object"
- Cnf = None
- logfile = None
- program = None
-
- def __init__ (self, Cnf, program, debug=0):
- "Initialize a new Logger object"
- self.Cnf = Cnf
- self.program = program
- # Create the log directory if it doesn't exist
- logdir = Cnf["Dir::Log"]
- if not os.path.exists(logdir):
- umask = os.umask(00000)
- os.makedirs(logdir, 02775)
- # Open the logfile
- logfilename = "%s/%s" % (logdir, time.strftime("%Y-%m"))
- logfile = None
- if debug:
- logfile = sys.stderr
- else:
- logfile = utils.open_file(logfilename, 'a')
- self.logfile = logfile
- # Log the start of the program
- user = pwd.getpwuid(os.getuid())[0]
- self.log(["program start", user])
-
- def log (self, details):
- "Log an event"
- # Prepend the timestamp and program name
- details.insert(0, self.program)
- timestamp = time.strftime("%Y%m%d%H%M%S")
- details.insert(0, timestamp)
- # Force the contents of the list to be string.join-able
- details = map(str, details)
- # Write out the log in TSV
- self.logfile.write("|".join(details)+'\n')
- # Flush the output to enable tail-ing
- self.logfile.flush()
-
- def close (self):
- "Close a Logger object"
- self.log(["program end"])
- self.logfile.flush()
- self.logfile.close()
+++ /dev/null
-#!/usr/bin/env python
-
-# Queue utility functions for dak
-# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-
-###############################################################################
-
-import cPickle, errno, os, pg, re, stat, string, sys, time
-import apt_inst, apt_pkg
-import utils, database
-
-from types import *
-
-###############################################################################
-
-re_isanum = re.compile (r"^\d+$")
-re_default_answer = re.compile(r"\[(.*)\]")
-re_fdnic = re.compile(r"\n\n")
-re_bin_only_nmu = re.compile(r"\+b\d+$")
-###############################################################################
-
-# Convenience wrapper to carry around all the package information in
-
-class Pkg:
- def __init__(self, **kwds):
- self.__dict__.update(kwds)
-
- def update(self, **kwds):
- self.__dict__.update(kwds)
-
-###############################################################################
-
-class nmu_p:
- # Read in the group maintainer override file
- def __init__ (self, Cnf):
- self.group_maint = {}
- self.Cnf = Cnf
- if Cnf.get("Dinstall::GroupOverrideFilename"):
- filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
- file = utils.open_file(filename)
- for line in file.readlines():
- line = utils.re_comments.sub('', line).lower().strip()
- if line != "":
- self.group_maint[line] = 1
- file.close()
-
- def is_an_nmu (self, pkg):
- Cnf = self.Cnf
- changes = pkg.changes
- dsc = pkg.dsc
-
- i = utils.fix_maintainer (dsc.get("maintainer",
- Cnf["Dinstall::MyEmailAddress"]).lower())
- (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
- # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
- if dsc_name == changes["maintainername"].lower() and \
- (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
- return 0
-
- if dsc.has_key("uploaders"):
- uploaders = dsc["uploaders"].lower().split(",")
- uploadernames = {}
- for i in uploaders:
- (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
- uploadernames[name] = ""
- if uploadernames.has_key(changes["changedbyname"].lower()):
- return 0
-
- # Some group maintained packages (e.g. Debian QA) are never NMU's
- if self.group_maint.has_key(changes["maintaineremail"].lower()):
- return 0
-
- return 1
-
-###############################################################################
-
-class Upload:
-
- def __init__(self, Cnf):
- self.Cnf = Cnf
- # Read in the group-maint override file
- self.nmu = nmu_p(Cnf)
- self.accept_count = 0
- self.accept_bytes = 0L
- self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
- legacy_source_untouchable = {})
-
- # Initialize the substitution template mapping global
- Subst = self.Subst = {}
- Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
- Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
- Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
- Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
-
- self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, self.projectB)
-
- ###########################################################################
-
- def init_vars (self):
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.clear();" % (i)
- self.pkg.orig_tar_id = None
- self.pkg.orig_tar_location = ""
- self.pkg.orig_tar_gz = None
-
- ###########################################################################
-
- def update_vars (self):
- dump_filename = self.pkg.changes_file[:-8]+".dak"
- dump_file = utils.open_file(dump_filename)
- p = cPickle.Unpickler(dump_file)
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.update(p.load());" % (i)
- for i in [ "orig_tar_id", "orig_tar_location" ]:
- exec "self.pkg.%s = p.load();" % (i)
- dump_file.close()
-
- ###########################################################################
-
- # This could just dump the dictionaries as is, but I'd like to
- # avoid this so there's some idea of what process-accepted &
- # process-new use from process-unchecked
-
- def dump_vars(self, dest_dir):
- for i in [ "changes", "dsc", "files", "dsc_files",
- "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
- exec "%s = self.pkg.%s;" % (i,i)
- dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
- dump_file = utils.open_file(dump_filename, 'w')
- try:
- os.chmod(dump_filename, 0660)
- except OSError, e:
- if errno.errorcode[e.errno] == 'EPERM':
- perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
- if perms & stat.S_IROTH:
- utils.fubar("%s is world readable and chmod failed." % (dump_filename))
- else:
- raise
-
- p = cPickle.Pickler(dump_file, 1)
- for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
- exec "%s = {}" % i
- ## files
- for file in files.keys():
- d_files[file] = {}
- for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "component", "location id", "source package",
- "source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "othercomponents",
- "pool name", "original component" ]:
- if files[file].has_key(i):
- d_files[file][i] = files[file][i]
- ## changes
- # Mandatory changes fields
- for i in [ "distribution", "source", "architecture", "version",
- "maintainer", "urgency", "fingerprint", "changedby822",
- "changedby2047", "changedbyname", "maintainer822",
- "maintainer2047", "maintainername", "maintaineremail",
- "closes", "changes" ]:
- d_changes[i] = changes[i]
- # Optional changes fields
- for i in [ "changed-by", "filecontents", "format", "process-new note", "distribution-version" ]:
- if changes.has_key(i):
- d_changes[i] = changes[i]
- ## dsc
- for i in [ "source", "version", "maintainer", "fingerprint",
- "uploaders", "bts changelog" ]:
- if dsc.has_key(i):
- d_dsc[i] = dsc[i]
- ## dsc_files
- for file in dsc_files.keys():
- d_dsc_files[file] = {}
- # Mandatory dsc_files fields
- for i in [ "size", "md5sum" ]:
- d_dsc_files[file][i] = dsc_files[file][i]
- # Optional dsc_files fields
- for i in [ "files id" ]:
- if dsc_files[file].has_key(i):
- d_dsc_files[file][i] = dsc_files[file][i]
-
- for i in [ d_changes, d_dsc, d_files, d_dsc_files,
- legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
- p.dump(i)
- dump_file.close()
-
- ###########################################################################
-
- # Set up the per-package template substitution mappings
-
- def update_subst (self, reject_message = ""):
- Subst = self.Subst
- changes = self.pkg.changes
- # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
- if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
- changes["architecture"] = { "Unknown" : "" }
- # and maintainer2047 may not exist.
- if not changes.has_key("maintainer2047"):
- changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
-
- Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
- Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
- Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
-
- # For source uploads the Changed-By field wins; otherwise Maintainer wins.
- if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
- Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
- Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
- changes["maintainer2047"])
- Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
- else:
- Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
- Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
- Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
- if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
- Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
-
- # Apply any global override of the Maintainer field
- if self.Cnf.get("Dinstall::OverrideMaintainer"):
- Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
- Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
-
- Subst["__REJECT_MESSAGE__"] = reject_message
- Subst["__SOURCE__"] = changes.get("source", "Unknown")
- Subst["__VERSION__"] = changes.get("version", "Unknown")
-
- ###########################################################################
-
- def build_summaries(self):
- changes = self.pkg.changes
- files = self.pkg.files
-
- byhand = summary = new = ""
-
- # changes["distribution"] may not exist in corner cases
- # (e.g. unreadable changes files)
- if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
- changes["distribution"] = {}
-
- file_keys = files.keys()
- file_keys.sort()
- for file in file_keys:
- if files[file].has_key("byhand"):
- byhand = 1
- summary += file + " byhand\n"
- elif files[file].has_key("new"):
- new = 1
- summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
- if files[file].has_key("othercomponents"):
- summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
- if files[file]["type"] == "deb":
- deb_fh = utils.open_file(file)
- summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
- deb_fh.close()
- else:
- files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
- destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
- summary += file + "\n to " + destination + "\n"
-
- short_summary = summary
-
- # This is for direport's benefit...
- f = re_fdnic.sub("\n .\n", changes.get("changes",""))
-
- if byhand or new:
- summary += "Changes: " + f
-
- summary += self.announce(short_summary, 0)
-
- return (summary, short_summary)
-
- ###########################################################################
-
- def close_bugs (self, summary, action):
- changes = self.pkg.changes
- Subst = self.Subst
- Cnf = self.Cnf
-
- bugs = changes["closes"].keys()
-
- if not bugs:
- return summary
-
- bugs.sort()
- if not self.nmu.is_an_nmu(self.pkg):
- if changes["distribution"].has_key("experimental"):
- # tag bugs as fixed-in-experimental for uploads to experimental
- summary += "Setting bugs to severity fixed: "
- control_message = ""
- for bug in bugs:
- summary += "%s " % (bug)
- control_message += "tag %s + fixed-in-experimental\n" % (bug)
- if action and control_message != "":
- Subst["__CONTROL_MESSAGE__"] = control_message
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
- utils.send_mail (mail_message)
- if action:
- self.Logger.log(["setting bugs to fixed"]+bugs)
-
-
- else:
- summary += "Closing bugs: "
- for bug in bugs:
- summary += "%s " % (bug)
- if action:
- Subst["__BUG_NUMBER__"] = bug
- if changes["distribution"].has_key("stable"):
- Subst["__STABLE_WARNING__"] = """
-Note that this package is not part of the released stable Debian
-distribution. It may have dependencies on other unreleased software,
-or other instabilities. Please take care if you wish to install it.
-The update will eventually make its way into the next released Debian
-distribution."""
- else:
- Subst["__STABLE_WARNING__"] = ""
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
- utils.send_mail (mail_message)
- if action:
- self.Logger.log(["closing bugs"]+bugs)
-
- else: # NMU
- summary += "Setting bugs to severity fixed: "
- control_message = ""
- for bug in bugs:
- summary += "%s " % (bug)
- control_message += "tag %s + fixed\n" % (bug)
- if action and control_message != "":
- Subst["__CONTROL_MESSAGE__"] = control_message
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
- utils.send_mail (mail_message)
- if action:
- self.Logger.log(["setting bugs to fixed"]+bugs)
- summary += "\n"
- return summary
-
- ###########################################################################
-
- def announce (self, short_summary, action):
- Subst = self.Subst
- Cnf = self.Cnf
- changes = self.pkg.changes
-
- # Only do announcements for source uploads with a recent dpkg-dev installed
- if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
- return ""
-
- lists_done = {}
- summary = ""
- Subst["__SHORT_SUMMARY__"] = short_summary
-
- for dist in changes["distribution"].keys():
- list = Cnf.Find("Suite::%s::Announce" % (dist))
- if list == "" or lists_done.has_key(list):
- continue
- lists_done[list] = 1
- summary += "Announcing to %s\n" % (list)
-
- if action:
- Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
- if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
- Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
- utils.send_mail (mail_message)
-
- if Cnf.FindB("Dinstall::CloseBugs"):
- summary = self.close_bugs(summary, action)
-
- return summary
-
- ###########################################################################
-
- def accept (self, summary, short_summary):
- Cnf = self.Cnf
- Subst = self.Subst
- files = self.pkg.files
- changes = self.pkg.changes
- changes_file = self.pkg.changes_file
- dsc = self.pkg.dsc
-
- print "Accepting."
- self.Logger.log(["Accepting changes",changes_file])
-
- self.dump_vars(Cnf["Dir::Queue::Accepted"])
-
- # Move all the files into the accepted directory
- utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
- file_keys = files.keys()
- for file in file_keys:
- utils.move(file, Cnf["Dir::Queue::Accepted"])
- self.accept_bytes += float(files[file]["size"])
- self.accept_count += 1
-
- # Send accept mail, announce to lists, close bugs and check for
- # override disparities
- if not Cnf["Dinstall::Options::No-Mail"]:
- Subst["__SUITE__"] = ""
- Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
- utils.send_mail(mail_message)
- self.announce(short_summary, 1)
-
-
- ## Helper stuff for DebBugs Version Tracking
- if Cnf.Find("Dir::Queue::BTSVersionTrack"):
- # ??? once queue/* is cleared on *.d.o and/or reprocessed
- # the conditionalization on dsc["bts changelog"] should be
- # dropped.
-
- # Write out the version history from the changelog
- if changes["architecture"].has_key("source") and \
- dsc.has_key("bts changelog"):
-
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- version_history = utils.open_file(temp_filename, 'w')
- version_history.write(dsc["bts changelog"])
- version_history.close()
- filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
- changes_file[:-8]+".versions")
- os.rename(temp_filename, filename)
-
- # Write out the binary -> source mapping.
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- debinfo = utils.open_file(temp_filename, 'w')
- for file in file_keys:
- f = files[file]
- if f["type"] == "deb":
- line = " ".join([f["package"], f["version"],
- f["architecture"], f["source package"],
- f["source version"]])
- debinfo.write(line+"\n")
- debinfo.close()
- filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
- changes_file[:-8]+".debinfo")
- os.rename(temp_filename, filename)
-
- self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
-
- ###########################################################################
-
- def queue_build (self, queue, path):
- Cnf = self.Cnf
- Subst = self.Subst
- files = self.pkg.files
- changes = self.pkg.changes
- changes_file = self.pkg.changes_file
- dsc = self.pkg.dsc
- file_keys = files.keys()
-
- ## Special support to enable clean auto-building of queued packages
- queue_id = database.get_or_set_queue_id(queue)
-
- self.projectB.query("BEGIN WORK")
- for suite in changes["distribution"].keys():
- if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
- continue
- suite_id = database.get_suite_id(suite)
- dest_dir = Cnf["Dir::QueueBuild"]
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- dest_dir = os.path.join(dest_dir, suite)
- for file in file_keys:
- src = os.path.join(path, file)
- dest = os.path.join(dest_dir, file)
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- # Copy it since the original won't be readable by www-data
- utils.copy(src, dest)
- else:
- # Create a symlink to it
- os.symlink(src, dest)
- # Add it to the list of packages for later processing by apt-ftparchive
- self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
- # If the .orig.tar.gz is in the pool, create a symlink to
- # it (if one doesn't already exist)
- if self.pkg.orig_tar_id:
- # Determine the .orig.tar.gz file name
- for dsc_file in self.pkg.dsc_files.keys():
- if dsc_file.endswith(".orig.tar.gz"):
- filename = dsc_file
- dest = os.path.join(dest_dir, filename)
- # If it doesn't exist, create a symlink
- if not os.path.exists(dest):
- # Find the .orig.tar.gz in the pool
- q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
- ql = q.getresult()
- if not ql:
- utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
- src = os.path.join(ql[0][0], ql[0][1])
- os.symlink(src, dest)
- # Add it to the list of packages for later processing by apt-ftparchive
- self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
- # if it does, update things to ensure it's not removed prematurely
- else:
- self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
-
- self.projectB.query("COMMIT WORK")
-
- ###########################################################################
-
- def check_override (self):
- Subst = self.Subst
- changes = self.pkg.changes
- files = self.pkg.files
- Cnf = self.Cnf
-
- # Abandon the check if:
- # a) it's a non-sourceful upload
- # b) override disparity checks have been disabled
- # c) we're not sending mail
- if not changes["architecture"].has_key("source") or \
- not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
- Cnf["Dinstall::Options::No-Mail"]:
- return
-
- summary = ""
- file_keys = files.keys()
- file_keys.sort()
- for file in file_keys:
- if not files[file].has_key("new") and files[file]["type"] == "deb":
- section = files[file]["section"]
- override_section = files[file]["override section"]
- if section.lower() != override_section.lower() and section != "-":
- # Ignore this; it's a common mistake and not worth whining about
- if section.lower() == "non-us/main" and override_section.lower() == "non-us":
- continue
- summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
- priority = files[file]["priority"]
- override_priority = files[file]["override priority"]
- if priority != override_priority and priority != "-":
- summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
-
- if summary == "":
- return
-
- Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
- utils.send_mail(mail_message)
-
- ###########################################################################
-
- def force_reject (self, files):
- """Forcefully move files from the current directory to the
- reject directory. If any file already exists in the reject
- directory it will be moved to the morgue to make way for
- the new file."""
-
- Cnf = self.Cnf
-
- for file in files:
- # Skip any files which don't exist or which we don't have permission to copy.
- if os.access(file,os.R_OK) == 0:
- continue
- dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
- try:
- dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- except OSError, e:
- # File exists? Let's try and move it to the morgue
- if errno.errorcode[e.errno] == 'EEXIST':
- morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
- try:
- morgue_file = utils.find_next_free(morgue_file)
- except utils.tried_too_hard_exc:
- # Something's either gone badly Pete Tong, or
- # someone is trying to exploit us.
- utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
- return
- utils.move(dest_file, morgue_file, perms=0660)
- try:
- dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- except OSError, e:
- # Likewise
- utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
- return
- else:
- raise
- # If we got here, we own the destination file, so we can
- # safely overwrite it.
- utils.move(file, dest_file, 1, perms=0660)
- os.close(dest_fd)
-
- ###########################################################################
-
- def do_reject (self, manual = 0, reject_message = ""):
- # If we weren't given a manual rejection message, spawn an
- # editor so the user can add one in...
- if manual and not reject_message:
- temp_filename = utils.temp_filename()
- editor = os.environ.get("EDITOR","vi")
- answer = 'E'
- while answer == 'E':
- os.system("%s %s" % (editor, temp_filename))
- temp_fh = utils.open_file(temp_filename)
- reject_message = "".join(temp_fh.readlines())
- temp_fh.close()
- print "Reject message:"
- print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
- prompt = "[R]eject, Edit, Abandon, Quit ?"
- answer = "XXX"
- while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = re_default_answer.search(prompt)
- if answer == "":
- answer = m.group(1)
- answer = answer[:1].upper()
- os.unlink(temp_filename)
- if answer == 'A':
- return 1
- elif answer == 'Q':
- sys.exit(0)
-
- print "Rejecting.\n"
-
- Cnf = self.Cnf
- Subst = self.Subst
- pkg = self.pkg
-
- reason_filename = pkg.changes_file[:-8] + ".reason"
- reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
-
- # Move all the files into the reject directory
- reject_files = pkg.files.keys() + [pkg.changes_file]
- self.force_reject(reject_files)
-
- # If we fail here someone is probably trying to exploit the race
- # so let's just raise an exception ...
- if os.path.exists(reason_filename):
- os.unlink(reason_filename)
- reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
-
- if not manual:
- Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
- Subst["__MANUAL_REJECT_MESSAGE__"] = ""
- Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
- os.write(reason_fd, reject_message)
- reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
- else:
- # Build up the rejection email
- user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
-
- Subst["__REJECTOR_ADDRESS__"] = user_email_address
- Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
- Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
- # Write the rejection email out as the <foo>.reason file
- os.write(reason_fd, reject_mail_message)
-
- os.close(reason_fd)
-
- # Send the rejection mail if appropriate
- if not Cnf["Dinstall::Options::No-Mail"]:
- utils.send_mail(reject_mail_message)
-
- self.Logger.log(["rejected", pkg.changes_file])
- return 0
-
- ################################################################################
-
- # Ensure that source exists somewhere in the archive for the binary
- # upload being processed.
- #
- # (1) exact match => 1.0-3
- # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
-
- def source_exists (self, package, source_version, suites = ["any"]):
- okay = 1
- for suite in suites:
- if suite == "any":
- que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
- (package)
- else:
- # source must exist in suite X, or in some other suite that's
- # mapped to X, recursively... silent-maps are counted too,
- # unreleased-maps aren't.
- maps = self.Cnf.ValueList("SuiteMappings")[:]
- maps.reverse()
- maps = [ m.split() for m in maps ]
- maps = [ (x[1], x[2]) for x in maps
- if x[0] == "map" or x[0] == "silent-map" ]
- s = [suite]
- for x in maps:
- if x[1] in s and x[0] not in s:
- s.append(x[0])
-
- que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "))
- q = self.projectB.query(que)
-
- # Reduce the query results to a list of version numbers
- ql = map(lambda x: x[0], q.getresult())
-
- # Try (1)
- if source_version in ql:
- continue
-
- # Try (2)
- orig_source_version = re_bin_only_nmu.sub('', source_version)
- if orig_source_version in ql:
- continue
-
- # No source found...
- okay = 0
- break
- return okay
-
- ################################################################################
-
- def in_override_p (self, package, component, suite, binary_type, file):
- files = self.pkg.files
-
- if binary_type == "": # must be source
- type = "dsc"
- else:
- type = binary_type
-
- # Override suite name; used for example with proposed-updates
- if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
- suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
-
- # Avoid <undef> on unknown distributions
- suite_id = database.get_suite_id(suite)
- if suite_id == -1:
- return None
- component_id = database.get_component_id(component)
- type_id = database.get_override_type_id(type)
-
- # FIXME: nasty non-US speficic hack
- if component.lower().startswith("non-us/"):
- component = component[7:]
-
- q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
- % (package, suite_id, component_id, type_id))
- result = q.getresult()
- # If checking for a source package fall back on the binary override type
- if type == "dsc" and not result:
- deb_type_id = database.get_override_type_id("deb")
- udeb_type_id = database.get_override_type_id("udeb")
- q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
- % (package, suite_id, component_id, deb_type_id, udeb_type_id))
- result = q.getresult()
-
- # Remember the section and priority so we can check them later if appropriate
- if result:
- files[file]["override section"] = result[0][0]
- files[file]["override priority"] = result[0][1]
-
- return result
-
- ################################################################################
-
- def reject (self, str, prefix="Rejected: "):
- if str:
- # Unlike other rejects we add new lines first to avoid trailing
- # new lines when this message is passed back up to a caller.
- if self.reject_message:
- self.reject_message += "\n"
- self.reject_message += prefix + str
-
- ################################################################################
-
- def get_anyversion(self, query_result, suite):
- anyversion=None
- anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
- for (v, s) in query_result:
- if s in [ string.lower(x) for x in anysuite ]:
- if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
- anyversion=v
- return anyversion
-
- ################################################################################
-
- def cross_suite_version_check(self, query_result, file, new_version):
- """Ensure versions are newer than existing packages in target
- suites and that cross-suite version checking rules as
- set out in the conf file are satisfied."""
-
- # Check versions for each target suite
- for target_suite in self.pkg.changes["distribution"].keys():
- must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)))
- must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)))
- # Enforce "must be newer than target suite" even if conffile omits it
- if target_suite not in must_be_newer_than:
- must_be_newer_than.append(target_suite)
- for entry in query_result:
- existent_version = entry[0]
- suite = entry[1]
- if suite in must_be_newer_than and \
- apt_pkg.VersionCompare(new_version, existent_version) < 1:
- self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
- if suite in must_be_older_than and \
- apt_pkg.VersionCompare(new_version, existent_version) > -1:
- ch = self.pkg.changes
- cansave = 0
- if ch.get('distribution-version', {}).has_key(suite):
- # we really use the other suite, ignoring the conflicting one ...
- addsuite = ch["distribution-version"][suite]
-
- add_version = self.get_anyversion(query_result, addsuite)
- target_version = self.get_anyversion(query_result, target_suite)
-
- if not add_version:
- # not add_version can only happen if we map to a suite
- # that doesn't enhance the suite we're propup'ing from.
- # so "propup-ver x a b c; map a d" is a problem only if
- # d doesn't enhance a.
- #
- # i think we could always propagate in this case, rather
- # than complaining. either way, this isn't a REJECT issue
- #
- # And - we really should complain to the dorks who configured dak
- self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
- self.pkg.changes.setdefault("propdistribution", {})
- self.pkg.changes["propdistribution"][addsuite] = 1
- cansave = 1
- elif not target_version:
- # not targets_version is true when the package is NEW
- # we could just stick with the "...old version..." REJECT
- # for this, I think.
- self.reject("Won't propogate NEW packages.")
- elif apt_pkg.VersionCompare(new_version, add_version) < 0:
- # propogation would be redundant. no need to reject though.
- self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
- cansave = 1
- elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
- apt_pkg.VersionCompare(add_version, target_version) >= 0:
- # propogate!!
- self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
- self.pkg.changes.setdefault("propdistribution", {})
- self.pkg.changes["propdistribution"][addsuite] = 1
- cansave = 1
-
- if not cansave:
- self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
-
- ################################################################################
-
- def check_binary_against_db(self, file):
- self.reject_message = ""
- files = self.pkg.files
-
- # Ensure version is sane
- q = self.projectB.query("""
-SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
- architecture a
- WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
- AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
- % (files[file]["package"],
- files[file]["architecture"]))
- self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
-
- # Check for any existing copies of the file
- q = self.projectB.query("""
-SELECT b.id FROM binaries b, architecture a
- WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
- AND a.id = b.architecture"""
- % (files[file]["package"],
- files[file]["version"],
- files[file]["architecture"]))
- if q.getresult():
- self.reject("%s: can not overwrite existing copy already in the archive." % (file))
-
- return self.reject_message
-
- ################################################################################
-
- def check_source_against_db(self, file):
- self.reject_message = ""
- dsc = self.pkg.dsc
-
- # Ensure version is sane
- q = self.projectB.query("""
-SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
- WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
- self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
-
- return self.reject_message
-
- ################################################################################
-
- # **WARNING**
- # NB: this function can remove entries from the 'files' index [if
- # the .orig.tar.gz is a duplicate of the one in the archive]; if
- # you're iterating over 'files' and call this function as part of
- # the loop, be sure to add a check to the top of the loop to
- # ensure you haven't just tried to derefernece the deleted entry.
- # **WARNING**
-
- def check_dsc_against_db(self, file):
- self.reject_message = ""
- files = self.pkg.files
- dsc_files = self.pkg.dsc_files
- legacy_source_untouchable = self.pkg.legacy_source_untouchable
- self.pkg.orig_tar_gz = None
-
- # Try and find all files mentioned in the .dsc. This has
- # to work harder to cope with the multiple possible
- # locations of an .orig.tar.gz.
- for dsc_file in dsc_files.keys():
- found = None
- if files.has_key(dsc_file):
- actual_md5 = files[dsc_file]["md5sum"]
- actual_size = int(files[dsc_file]["size"])
- found = "%s in incoming" % (dsc_file)
- # Check the file does not already exist in the archive
- q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
- ql = q.getresult()
- # Strip out anything that isn't '%s' or '/%s$'
- for i in ql:
- if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
- ql.remove(i)
-
- # "[dak] has not broken them. [dak] has fixed a
- # brokenness. Your crappy hack exploited a bug in
- # the old dinstall.
- #
- # "(Come on! I thought it was always obvious that
- # one just doesn't release different files with
- # the same name and version.)"
- # -- ajk@ on d-devel@l.d.o
-
- if ql:
- # Ignore exact matches for .orig.tar.gz
- match = 0
- if dsc_file.endswith(".orig.tar.gz"):
- for i in ql:
- if files.has_key(dsc_file) and \
- int(files[dsc_file]["size"]) == int(i[0]) and \
- files[dsc_file]["md5sum"] == i[1]:
- self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
- del files[dsc_file]
- self.pkg.orig_tar_gz = i[2] + i[3]
- match = 1
-
- if not match:
- self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
- elif dsc_file.endswith(".orig.tar.gz"):
- # Check in the pool
- q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
- ql = q.getresult()
- # Strip out anything that isn't '%s' or '/%s$'
- for i in ql:
- if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
- ql.remove(i)
-
- if ql:
- # Unfortunately, we may get more than one match here if,
- # for example, the package was in potato but had an -sa
- # upload in woody. So we need to choose the right one.
-
- x = ql[0]; # default to something sane in case we don't match any or have only one
-
- if len(ql) > 1:
- for i in ql:
- old_file = i[0] + i[1]
- old_file_fh = utils.open_file(old_file)
- actual_md5 = apt_pkg.md5sum(old_file_fh)
- old_file_fh.close()
- actual_size = os.stat(old_file)[stat.ST_SIZE]
- if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
- x = i
- else:
- legacy_source_untouchable[i[3]] = ""
-
- old_file = x[0] + x[1]
- old_file_fh = utils.open_file(old_file)
- actual_md5 = apt_pkg.md5sum(old_file_fh)
- old_file_fh.close()
- actual_size = os.stat(old_file)[stat.ST_SIZE]
- found = old_file
- suite_type = x[2]
- dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
- # See install() in process-accepted...
- self.pkg.orig_tar_id = x[3]
- self.pkg.orig_tar_gz = old_file
- if suite_type == "legacy" or suite_type == "legacy-mixed":
- self.pkg.orig_tar_location = "legacy"
- else:
- self.pkg.orig_tar_location = x[4]
- else:
- # Not there? Check the queue directories...
-
- in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
- # See process_it() in 'dak process-unchecked' for explanation of this
- if os.path.exists(in_unchecked):
- return (self.reject_message, in_unchecked)
- else:
- for dir in [ "Accepted", "New", "Byhand" ]:
- in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
- if os.path.exists(in_otherdir):
- in_otherdir_fh = utils.open_file(in_otherdir)
- actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
- in_otherdir_fh.close()
- actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
- found = in_otherdir
- self.pkg.orig_tar_gz = in_otherdir
-
- if not found:
- self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
- self.pkg.orig_tar_gz = -1
- continue
- else:
- self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
- continue
- if actual_md5 != dsc_files[dsc_file]["md5sum"]:
- self.reject("md5sum for %s doesn't match %s." % (found, file))
- if actual_size != int(dsc_files[dsc_file]["size"]):
- self.reject("size for %s doesn't match %s." % (found, file))
-
- return (self.reject_message, None)
-
- def do_query(self, q):
- sys.stderr.write("query: \"%s\" ... " % (q))
- before = time.time()
- r = self.projectB.query(q)
- time_diff = time.time()-before
- sys.stderr.write("took %.3f seconds.\n" % (time_diff))
- return r
+++ /dev/null
-#!/usr/bin/env python
-
-# Utility functions
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
-# $Id: utils.py,v 1.73 2005-03-18 05:24:38 troup Exp $
-
-################################################################################
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-
-################################################################################
-
-import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
- string, sys, tempfile, traceback
-import apt_pkg
-import database
-
-################################################################################
-
-re_comments = re.compile(r"\#.*")
-re_no_epoch = re.compile(r"^\d+\:")
-re_no_revision = re.compile(r"-[^-]+$")
-re_arch_from_filename = re.compile(r"/binary-[^/]+/")
-re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
-re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
-re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
-
-re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
-re_multi_line_field = re.compile(r"^\s(.*)")
-re_taint_free = re.compile(r"^[-+~/\.\w]+$")
-
-re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
-
-changes_parse_error_exc = "Can't parse line in .changes file"
-invalid_dsc_format_exc = "Invalid .dsc file"
-nk_format_exc = "Unknown Format: in .changes file"
-no_files_exc = "No Files: field in .dsc or .changes file."
-cant_open_exc = "Can't open file"
-unknown_hostname_exc = "Unknown hostname"
-cant_overwrite_exc = "Permission denied; can't overwrite existent file."
-file_exists_exc = "Destination file exists"
-sendmail_failed_exc = "Sendmail invocation failed"
-tried_too_hard_exc = "Tried too hard to find a free filename."
-
-default_config = "/etc/dak/dak.conf"
-default_apt_config = "/etc/dak/apt.conf"
-
-################################################################################
-
-class Error(Exception):
- """Base class for exceptions in this module."""
- pass
-
-class ParseMaintError(Error):
- """Exception raised for errors in parsing a maintainer field.
-
- Attributes:
- message -- explanation of the error
- """
-
- def __init__(self, message):
- self.args = message,
- self.message = message
-
-################################################################################
-
-def open_file(filename, mode='r'):
- try:
- f = open(filename, mode)
- except IOError:
- raise cant_open_exc, filename
- return f
-
-################################################################################
-
-def our_raw_input(prompt=""):
- if prompt:
- sys.stdout.write(prompt)
- sys.stdout.flush()
- try:
- ret = raw_input()
- return ret
- except EOFError:
- sys.stderr.write("\nUser interrupt (^D).\n")
- raise SystemExit
-
-################################################################################
-
-def str_isnum (s):
- for c in s:
- if c not in string.digits:
- return 0
- return 1
-
-################################################################################
-
-def extract_component_from_section(section):
- component = ""
-
- if section.find('/') != -1:
- component = section.split('/')[0]
- if component.lower() == "non-us" and section.find('/') != -1:
- s = component + '/' + section.split('/')[1]
- if Cnf.has_key("Component::%s" % s): # Avoid e.g. non-US/libs
- component = s
-
- if section.lower() == "non-us":
- component = "non-US/main"
-
- # non-US prefix is case insensitive
- if component.lower()[:6] == "non-us":
- component = "non-US"+component[6:]
-
- # Expand default component
- if component == "":
- if Cnf.has_key("Component::%s" % section):
- component = section
- else:
- component = "main"
- elif component == "non-US":
- component = "non-US/main"
-
- return (section, component)
-
-################################################################################
-
-def parse_changes(filename, signing_rules=0):
- """Parses a changes file and returns a dictionary where each field is a
-key. The mandatory first argument is the filename of the .changes
-file.
-
-signing_rules is an optional argument:
-
- o If signing_rules == -1, no signature is required.
- o If signing_rules == 0 (the default), a signature is required.
- o If signing_rules == 1, it turns on the same strict format checking
- as dpkg-source.
-
-The rules for (signing_rules == 1)-mode are:
-
- o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
- followed by any PGP header data and must end with a blank line.
-
- o The data section must end with a blank line and must be followed by
- "-----BEGIN PGP SIGNATURE-----".
-"""
-
- error = ""
- changes = {}
-
- changes_in = open_file(filename)
- lines = changes_in.readlines()
-
- if not lines:
- raise changes_parse_error_exc, "[Empty changes file]"
-
- # Reindex by line number so we can easily verify the format of
- # .dsc files...
- index = 0
- indexed_lines = {}
- for line in lines:
- index += 1
- indexed_lines[index] = line[:-1]
-
- inside_signature = 0
-
- num_of_lines = len(indexed_lines.keys())
- index = 0
- first = -1
- while index < num_of_lines:
- index += 1
- line = indexed_lines[index]
- if line == "":
- if signing_rules == 1:
- index += 1
- if index > num_of_lines:
- raise invalid_dsc_format_exc, index
- line = indexed_lines[index]
- if not line.startswith("-----BEGIN PGP SIGNATURE"):
- raise invalid_dsc_format_exc, index
- inside_signature = 0
- break
- else:
- continue
- if line.startswith("-----BEGIN PGP SIGNATURE"):
- break
- if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
- inside_signature = 1
- if signing_rules == 1:
- while index < num_of_lines and line != "":
- index += 1
- line = indexed_lines[index]
- continue
- # If we're not inside the signed data, don't process anything
- if signing_rules >= 0 and not inside_signature:
- continue
- slf = re_single_line_field.match(line)
- if slf:
- field = slf.groups()[0].lower()
- changes[field] = slf.groups()[1]
- first = 1
- continue
- if line == " .":
- changes[field] += '\n'
- continue
- mlf = re_multi_line_field.match(line)
- if mlf:
- if first == -1:
- raise changes_parse_error_exc, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
- if first == 1 and changes[field] != "":
- changes[field] += '\n'
- first = 0
- changes[field] += mlf.groups()[0] + '\n'
- continue
- error += line
-
- if signing_rules == 1 and inside_signature:
- raise invalid_dsc_format_exc, index
-
- changes_in.close()
- changes["filecontents"] = "".join(lines)
-
- if error:
- raise changes_parse_error_exc, error
-
- return changes
-
-################################################################################
-
-# Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
-
-def build_file_list(changes, is_a_dsc=0):
- files = {}
-
- # Make sure we have a Files: field to parse...
- if not changes.has_key("files"):
- raise no_files_exc
-
- # Make sure we recognise the format of the Files: field
- format = changes.get("format", "")
- if format != "":
- format = float(format)
- if not is_a_dsc and (format < 1.5 or format > 2.0):
- raise nk_format_exc, format
-
- # Parse each entry/line:
- for i in changes["files"].split('\n'):
- if not i:
- break
- s = i.split()
- section = priority = ""
- try:
- if is_a_dsc:
- (md5, size, name) = s
- else:
- (md5, size, section, priority, name) = s
- except ValueError:
- raise changes_parse_error_exc, i
-
- if section == "":
- section = "-"
- if priority == "":
- priority = "-"
-
- (section, component) = extract_component_from_section(section)
-
- files[name] = Dict(md5sum=md5, size=size, section=section,
- priority=priority, component=component)
-
- return files
-
-################################################################################
-
-def force_to_utf8(s):
- """Forces a string to UTF-8. If the string isn't already UTF-8,
-it's assumed to be ISO-8859-1."""
- try:
- unicode(s, 'utf-8')
- return s
- except UnicodeError:
- latin1_s = unicode(s,'iso8859-1')
- return latin1_s.encode('utf-8')
-
-def rfc2047_encode(s):
- """Encodes a (header) string per RFC2047 if necessary. If the
-string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
- try:
- codecs.lookup('ascii')[1](s)
- return s
- except UnicodeError:
- pass
- try:
- codecs.lookup('utf-8')[1](s)
- h = email.Header.Header(s, 'utf-8', 998)
- return str(h)
- except UnicodeError:
- h = email.Header.Header(s, 'iso-8859-1', 998)
- return str(h)
-
-################################################################################
-
-# <Culus> 'The standard sucks, but my tool is supposed to interoperate
-# with it. I know - I'll fix the suckage and make things
-# incompatible!'
-
-def fix_maintainer (maintainer):
- """Parses a Maintainer or Changed-By field and returns:
- (1) an RFC822 compatible version,
- (2) an RFC2047 compatible version,
- (3) the name
- (4) the email
-
-The name is forced to UTF-8 for both (1) and (3). If the name field
-contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
-switched to 'email (name)' format."""
- maintainer = maintainer.strip()
- if not maintainer:
- return ('', '', '', '')
-
- if maintainer.find("<") == -1:
- email = maintainer
- name = ""
- elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
- email = maintainer[1:-1]
- name = ""
- else:
- m = re_parse_maintainer.match(maintainer)
- if not m:
- raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
- name = m.group(1)
- email = m.group(2)
-
- # Get an RFC2047 compliant version of the name
- rfc2047_name = rfc2047_encode(name)
-
- # Force the name to be UTF-8
- name = force_to_utf8(name)
-
- if name.find(',') != -1 or name.find('.') != -1:
- rfc822_maint = "%s (%s)" % (email, name)
- rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
- else:
- rfc822_maint = "%s <%s>" % (name, email)
- rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
-
- if email.find("@") == -1 and email.find("buildd_") != 0:
- raise ParseMaintError, "No @ found in email address part."
-
- return (rfc822_maint, rfc2047_maint, name, email)
-
-################################################################################
-
-# sendmail wrapper, takes _either_ a message string or a file as arguments
-def send_mail (message, filename=""):
- # If we've been passed a string dump it into a temporary file
- if message:
- filename = tempfile.mktemp()
- fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
- os.write (fd, message)
- os.close (fd)
-
- # Invoke sendmail
- (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
- if (result != 0):
- raise sendmail_failed_exc, output
-
- # Clean up any temporary files
- if message:
- os.unlink (filename)
-
-################################################################################
-
-def poolify (source, component):
- if component:
- component += '/'
- # FIXME: this is nasty
- component = component.lower().replace("non-us/", "non-US/")
- if source[:3] == "lib":
- return component + source[:4] + '/' + source + '/'
- else:
- return component + source[:1] + '/' + source + '/'
-
-################################################################################
-
-def move (src, dest, overwrite = 0, perms = 0664):
- if os.path.exists(dest) and os.path.isdir(dest):
- dest_dir = dest
- else:
- dest_dir = os.path.dirname(dest)
- if not os.path.exists(dest_dir):
- umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
- os.umask(umask)
- #print "Moving %s to %s..." % (src, dest)
- if os.path.exists(dest) and os.path.isdir(dest):
- dest += '/' + os.path.basename(src)
- # Don't overwrite unless forced to
- if os.path.exists(dest):
- if not overwrite:
- fubar("Can't move %s to %s - file already exists." % (src, dest))
- else:
- if not os.access(dest, os.W_OK):
- fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
- shutil.copy2(src, dest)
- os.chmod(dest, perms)
- os.unlink(src)
-
-def copy (src, dest, overwrite = 0, perms = 0664):
- if os.path.exists(dest) and os.path.isdir(dest):
- dest_dir = dest
- else:
- dest_dir = os.path.dirname(dest)
- if not os.path.exists(dest_dir):
- umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
- os.umask(umask)
- #print "Copying %s to %s..." % (src, dest)
- if os.path.exists(dest) and os.path.isdir(dest):
- dest += '/' + os.path.basename(src)
- # Don't overwrite unless forced to
- if os.path.exists(dest):
- if not overwrite:
- raise file_exists_exc
- else:
- if not os.access(dest, os.W_OK):
- raise cant_overwrite_exc
- shutil.copy2(src, dest)
- os.chmod(dest, perms)
-
-################################################################################
-
-def where_am_i ():
- res = socket.gethostbyaddr(socket.gethostname())
- database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
- if database_hostname:
- return database_hostname
- else:
- return res[0]
-
-def which_conf_file ():
- res = socket.gethostbyaddr(socket.gethostname())
- if Cnf.get("Config::" + res[0] + "::DakConfig"):
- return Cnf["Config::" + res[0] + "::DakConfig"]
- else:
- return default_config
-
-def which_apt_conf_file ():
- res = socket.gethostbyaddr(socket.gethostname())
- if Cnf.get("Config::" + res[0] + "::AptConfig"):
- return Cnf["Config::" + res[0] + "::AptConfig"]
- else:
- return default_apt_config
-
-################################################################################
-
-# Escape characters which have meaning to SQL's regex comparison operator ('~')
-# (woefully incomplete)
-
-def regex_safe (s):
- s = s.replace('+', '\\\\+')
- s = s.replace('.', '\\\\.')
- return s
-
-################################################################################
-
-# Perform a substition of template
-def TemplateSubst(map, filename):
- file = open_file(filename)
- template = file.read()
- for x in map.keys():
- template = template.replace(x,map[x])
- file.close()
- return template
-
-################################################################################
-
-def fubar(msg, exit_code=1):
- sys.stderr.write("E: %s\n" % (msg))
- sys.exit(exit_code)
-
-def warn(msg):
- sys.stderr.write("W: %s\n" % (msg))
-
-################################################################################
-
-# Returns the user name with a laughable attempt at rfc822 conformancy
-# (read: removing stray periods).
-def whoami ():
- return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
-
-################################################################################
-
-def size_type (c):
- t = " B"
- if c > 10240:
- c = c / 1024
- t = " KB"
- if c > 10240:
- c = c / 1024
- t = " MB"
- return ("%d%s" % (c, t))
-
-################################################################################
-
-def cc_fix_changes (changes):
- o = changes.get("architecture", "")
- if o:
- del changes["architecture"]
- changes["architecture"] = {}
- for j in o.split():
- changes["architecture"][j] = 1
-
-# Sort by source name, source version, 'have source', and then by filename
-def changes_compare (a, b):
- try:
- a_changes = parse_changes(a)
- except:
- return -1
-
- try:
- b_changes = parse_changes(b)
- except:
- return 1
-
- cc_fix_changes (a_changes)
- cc_fix_changes (b_changes)
-
- # Sort by source name
- a_source = a_changes.get("source")
- b_source = b_changes.get("source")
- q = cmp (a_source, b_source)
- if q:
- return q
-
- # Sort by source version
- a_version = a_changes.get("version", "0")
- b_version = b_changes.get("version", "0")
- q = apt_pkg.VersionCompare(a_version, b_version)
- if q:
- return q
-
- # Sort by 'have source'
- a_has_source = a_changes["architecture"].get("source")
- b_has_source = b_changes["architecture"].get("source")
- if a_has_source and not b_has_source:
- return -1
- elif b_has_source and not a_has_source:
- return 1
-
- # Fall back to sort by filename
- return cmp(a, b)
-
-################################################################################
-
-def find_next_free (dest, too_many=100):
- extra = 0
- orig_dest = dest
- while os.path.exists(dest) and extra < too_many:
- dest = orig_dest + '.' + repr(extra)
- extra += 1
- if extra >= too_many:
- raise tried_too_hard_exc
- return dest
-
-################################################################################
-
-def result_join (original, sep = '\t'):
- list = []
- for i in xrange(len(original)):
- if original[i] == None:
- list.append("")
- else:
- list.append(original[i])
- return sep.join(list)
-
-################################################################################
-
-def prefix_multi_line_string(str, prefix, include_blank_lines=0):
- out = ""
- for line in str.split('\n'):
- line = line.strip()
- if line or include_blank_lines:
- out += "%s%s\n" % (prefix, line)
- # Strip trailing new line
- if out:
- out = out[:-1]
- return out
-
-################################################################################
-
-def validate_changes_file_arg(filename, require_changes=1):
- """'filename' is either a .changes or .dak file. If 'filename' is a
-.dak file, it's changed to be the corresponding .changes file. The
-function then checks if the .changes file a) exists and b) is
-readable and returns the .changes filename if so. If there's a
-problem, the next action depends on the option 'require_changes'
-argument:
-
- o If 'require_changes' == -1, errors are ignored and the .changes
- filename is returned.
- o If 'require_changes' == 0, a warning is given and 'None' is returned.
- o If 'require_changes' == 1, a fatal error is raised.
-"""
- error = None
-
- orig_filename = filename
- if filename.endswith(".dak"):
- filename = filename[:-6]+".changes"
-
- if not filename.endswith(".changes"):
- error = "invalid file type; not a changes file"
- else:
- if not os.access(filename,os.R_OK):
- if os.path.exists(filename):
- error = "permission denied"
- else:
- error = "file not found"
-
- if error:
- if require_changes == 1:
- fubar("%s: %s." % (orig_filename, error))
- elif require_changes == 0:
- warn("Skipping %s - %s" % (orig_filename, error))
- return None
- else: # We only care about the .dak file
- return filename
- else:
- return filename
-
-################################################################################
-
-def real_arch(arch):
- return (arch != "source" and arch != "all")
-
-################################################################################
-
-def join_with_commas_and(list):
- if len(list) == 0: return "nothing"
- if len(list) == 1: return list[0]
- return ", ".join(list[:-1]) + " and " + list[-1]
-
-################################################################################
-
-def pp_deps (deps):
- pp_deps = []
- for atom in deps:
- (pkg, version, constraint) = atom
- if constraint:
- pp_dep = "%s (%s %s)" % (pkg, constraint, version)
- else:
- pp_dep = pkg
- pp_deps.append(pp_dep)
- return " |".join(pp_deps)
-
-################################################################################
-
-def get_conf():
- return Cnf
-
-################################################################################
-
-# Handle -a, -c and -s arguments; returns them as SQL constraints
-def parse_args(Options):
- # Process suite
- if Options["Suite"]:
- suite_ids_list = []
- for suite in split_args(Options["Suite"]):
- suite_id = dak.lib.database.get_suite_id(suite)
- if suite_id == -1:
- warn("suite '%s' not recognised." % (suite))
- else:
- suite_ids_list.append(suite_id)
- if suite_ids_list:
- con_suites = "AND su.id IN (%s)" % ", ".join(map(str, suite_ids_list))
- else:
- fubar("No valid suite given.")
- else:
- con_suites = ""
-
- # Process component
- if Options["Component"]:
- component_ids_list = []
- for component in split_args(Options["Component"]):
- component_id = dak.lib.database.get_component_id(component)
- if component_id == -1:
- warn("component '%s' not recognised." % (component))
- else:
- component_ids_list.append(component_id)
- if component_ids_list:
- con_components = "AND c.id IN (%s)" % ", ".join(map(str, component_ids_list))
- else:
- fubar("No valid component given.")
- else:
- con_components = ""
-
- # Process architecture
- con_architectures = ""
- if Options["Architecture"]:
- arch_ids_list = []
- check_source = 0
- for architecture in split_args(Options["Architecture"]):
- if architecture == "source":
- check_source = 1
- else:
- architecture_id = dak.lib.database.get_architecture_id(architecture)
- if architecture_id == -1:
- warn("architecture '%s' not recognised." % (architecture))
- else:
- arch_ids_list.append(architecture_id)
- if arch_ids_list:
- con_architectures = "AND a.id IN (%s)" % ", ".join(map(str, arch_ids_list))
- else:
- if not check_source:
- fubar("No valid architecture given.")
- else:
- check_source = 1
-
- return (con_suites, con_architectures, con_components, check_source)
-
-################################################################################
-
-# Inspired(tm) by Bryn Keller's print_exc_plus (See
-# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
-
-def print_exc():
- tb = sys.exc_info()[2]
- while tb.tb_next:
- tb = tb.tb_next
- stack = []
- frame = tb.tb_frame
- while frame:
- stack.append(frame)
- frame = frame.f_back
- stack.reverse()
- traceback.print_exc()
- for frame in stack:
- print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
- frame.f_code.co_filename,
- frame.f_lineno)
- for key, value in frame.f_locals.items():
- print "\t%20s = " % key,
- try:
- print value
- except:
- print "<unable to print>"
-
-################################################################################
-
-def try_with_debug(function):
- try:
- function()
- except SystemExit:
- raise
- except:
- print_exc()
-
-################################################################################
-
-# Function for use in sorting lists of architectures.
-# Sorts normally except that 'source' dominates all others.
-
-def arch_compare_sw (a, b):
- if a == "source" and b == "source":
- return 0
- elif a == "source":
- return -1
- elif b == "source":
- return 1
-
- return cmp (a, b)
-
-################################################################################
-
-# Split command line arguments which can be separated by either commas
-# or whitespace. If dwim is set, it will complain about string ending
-# in comma since this usually means someone did 'dak ls -a i386, m68k
-# foo' or something and the inevitable confusion resulting from 'm68k'
-# being treated as an argument is undesirable.
-
-def split_args (s, dwim=1):
- if s.find(",") == -1:
- return s.split()
- else:
- if s[-1:] == "," and dwim:
- fubar("split_args: found trailing comma, spurious space maybe?")
- return s.split(",")
-
-################################################################################
-
-def Dict(**dict): return dict
-
-########################################
-
-# Our very own version of commands.getouputstatus(), hacked to support
-# gpgv's status fd.
-def gpgv_get_status_output(cmd, status_read, status_write):
- cmd = ['/bin/sh', '-c', cmd]
- p2cread, p2cwrite = os.pipe()
- c2pread, c2pwrite = os.pipe()
- errout, errin = os.pipe()
- pid = os.fork()
- if pid == 0:
- # Child
- os.close(0)
- os.close(1)
- os.dup(p2cread)
- os.dup(c2pwrite)
- os.close(2)
- os.dup(errin)
- for i in range(3, 256):
- if i != status_write:
- try:
- os.close(i)
- except:
- pass
- try:
- os.execvp(cmd[0], cmd)
- finally:
- os._exit(1)
-
- # Parent
- os.close(p2cread)
- os.dup2(c2pread, c2pwrite)
- os.dup2(errout, errin)
-
- output = status = ""
- while 1:
- i, o, e = select.select([c2pwrite, errin, status_read], [], [])
- more_data = []
- for fd in i:
- r = os.read(fd, 8196)
- if len(r) > 0:
- more_data.append(fd)
- if fd == c2pwrite or fd == errin:
- output += r
- elif fd == status_read:
- status += r
- else:
- fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
- if not more_data:
- pid, exit_status = os.waitpid(pid, 0)
- try:
- os.close(status_write)
- os.close(status_read)
- os.close(c2pread)
- os.close(c2pwrite)
- os.close(p2cwrite)
- os.close(errin)
- os.close(errout)
- except:
- pass
- break
-
- return output, status, exit_status
-
-############################################################
-
-
-def check_signature (sig_filename, reject, data_filename="", keyrings=None):
- """Check the signature of a file and return the fingerprint if the
-signature is valid or 'None' if it's not. The first argument is the
-filename whose signature should be checked. The second argument is a
-reject function and is called when an error is found. The reject()
-function must allow for two arguments: the first is the error message,
-the second is an optional prefix string. It's possible for reject()
-to be called more than once during an invocation of check_signature().
-The third argument is optional and is the name of the files the
-detached signature applies to. The fourth argument is optional and is
-a *list* of keyrings to use.
-"""
-
- # Ensure the filename contains no shell meta-characters or other badness
- if not re_taint_free.match(sig_filename):
- reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
- return None
-
- if data_filename and not re_taint_free.match(data_filename):
- reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
- return None
-
- if not keyrings:
- keyrings = (Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"])
-
- # Build the command line
- status_read, status_write = os.pipe();
- cmd = "gpgv --status-fd %s" % (status_write)
- for keyring in keyrings:
- cmd += " --keyring %s" % (keyring)
- cmd += " %s %s" % (sig_filename, data_filename)
- # Invoke gpgv on the file
- (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
-
- # Process the status-fd output
- keywords = {}
- bad = internal_error = ""
- for line in status.split('\n'):
- line = line.strip()
- if line == "":
- continue
- split = line.split()
- if len(split) < 2:
- internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
- continue
- (gnupg, keyword) = split[:2]
- if gnupg != "[GNUPG:]":
- internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
- continue
- args = split[2:]
- if keywords.has_key(keyword) and (keyword != "NODATA" and keyword != "SIGEXPIRED"):
- internal_error += "found duplicate status token ('%s').\n" % (keyword)
- continue
- else:
- keywords[keyword] = args
-
- # If we failed to parse the status-fd output, let's just whine and bail now
- if internal_error:
- reject("internal error while performing signature check on %s." % (sig_filename))
- reject(internal_error, "")
- reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
- return None
-
- # Now check for obviously bad things in the processed output
- if keywords.has_key("SIGEXPIRED"):
- reject("The key used to sign %s has expired." % (sig_filename))
- bad = 1
- if keywords.has_key("KEYREVOKED"):
- reject("The key used to sign %s has been revoked." % (sig_filename))
- bad = 1
- if keywords.has_key("BADSIG"):
- reject("bad signature on %s." % (sig_filename))
- bad = 1
- if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
- reject("failed to check signature on %s." % (sig_filename))
- bad = 1
- if keywords.has_key("NO_PUBKEY"):
- args = keywords["NO_PUBKEY"]
- if len(args) >= 1:
- key = args[0]
- reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
- bad = 1
- if keywords.has_key("BADARMOR"):
- reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
- bad = 1
- if keywords.has_key("NODATA"):
- reject("no signature found in %s." % (sig_filename))
- bad = 1
-
- if bad:
- return None
-
- # Next check gpgv exited with a zero return code
- if exit_status:
- reject("gpgv failed while checking %s." % (sig_filename))
- if status.strip():
- reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
- else:
- reject(prefix_multi_line_string(output, " [GPG output:] "), "")
- return None
-
- # Sanity check the good stuff we expect
- if not keywords.has_key("VALIDSIG"):
- reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
- bad = 1
- else:
- args = keywords["VALIDSIG"]
- if len(args) < 1:
- reject("internal error while checking signature on %s." % (sig_filename))
- bad = 1
- else:
- fingerprint = args[0]
- if not keywords.has_key("GOODSIG"):
- reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
- bad = 1
- if not keywords.has_key("SIG_ID"):
- reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
- bad = 1
-
- # Finally ensure there's not something we don't recognise
- known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
- SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
- NODATA="")
-
- for keyword in keywords.keys():
- if not known_keywords.has_key(keyword):
- reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
- bad = 1
-
- if bad:
- return None
- else:
- return fingerprint
-
-################################################################################
-
-# Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
-
-def wrap(paragraph, max_length, prefix=""):
- line = ""
- s = ""
- have_started = 0
- words = paragraph.split()
-
- for word in words:
- word_size = len(word)
- if word_size > max_length:
- if have_started:
- s += line + '\n' + prefix
- s += word + '\n' + prefix
- else:
- if have_started:
- new_length = len(line) + word_size + 1
- if new_length > max_length:
- s += line + '\n' + prefix
- line = word
- else:
- line += ' ' + word
- else:
- line = word
- have_started = 1
-
- if have_started:
- s += line
-
- return s
-
-################################################################################
-
-# Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
-# Returns fixed 'src'
-def clean_symlink (src, dest, root):
- src = src.replace(root, '', 1)
- dest = dest.replace(root, '', 1)
- dest = os.path.dirname(dest)
- new_src = '../' * len(dest.split('/'))
- return new_src + src
-
-################################################################################
-
-def temp_filename(directory=None, dotprefix=None, perms=0700):
- """Return a secure and unique filename by pre-creating it.
-If 'directory' is non-null, it will be the directory the file is pre-created in.
-If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
-
- if directory:
- old_tempdir = tempfile.tempdir
- tempfile.tempdir = directory
-
- filename = tempfile.mktemp()
-
- if dotprefix:
- filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
- fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
- os.close(fd)
-
- if directory:
- tempfile.tempdir = old_tempdir
-
- return filename
-
-################################################################################
-
-apt_pkg.init()
-
-Cnf = apt_pkg.newConfiguration()
-apt_pkg.ReadConfigFileISC(Cnf,default_config)
-
-if which_conf_file() != default_config:
- apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
-
-################################################################################
import os, pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a', "architecture", "Ls::Options::Architecture", "HasArg"),
('b', "binarytype", "Ls::Options::BinaryType", "HasArg"),
if Options["Help"]:
usage()
if not packages:
- utils.fubar("need at least one package name as an argument.")
+ daklib.utils.fubar("need at least one package name as an argument.")
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
# If cron.daily is running; warn the user that our output might seem strange
if os.path.exists(os.path.join(Cnf["Dir::Root"], "Archive_Maintenance_In_Progress")):
- utils.warn("Archive maintenance is in progress; database inconsistencies are possible.")
+ daklib.utils.warn("Archive maintenance is in progress; database inconsistencies are possible.")
# Handle buildd maintenance helper options
if Options["GreaterOrEqual"] or Options["GreaterThan"]:
if Options["GreaterOrEqual"] and Options["GreaterThan"]:
- utils.fubar("-g/--greaterorequal and -G/--greaterthan are mutually exclusive.")
+ daklib.utils.fubar("-g/--greaterorequal and -G/--greaterthan are mutually exclusive.")
if not Options["Suite"]:
Options["Suite"] = "unstable"
# Parse -a/--architecture, -c/--component and -s/--suite
(con_suites, con_architectures, con_components, check_source) = \
- utils.parse_args(Options)
+ daklib.utils.parse_args(Options)
if Options["BinaryType"]:
if Options["BinaryType"] != "udeb" and Options["BinaryType"] != "deb":
- utils.fubar("Invalid binary type. 'udeb' and 'deb' recognised.")
+ daklib.utils.fubar("Invalid binary type. 'udeb' and 'deb' recognised.")
con_bintype = "AND b.type = '%s'" % (Options["BinaryType"])
# REMOVE ME TRAMP
if Options["BinaryType"] == "udeb":
suites.sort()
for suite in suites:
arches = d[pkg][version][suite]
- arches.sort(utils.arch_compare_sw)
+ arches.sort(daklib.utils.arch_compare_sw)
if Options["Format"] == "": #normal
sys.stdout.write("%10s | %10s | %13s | " % (pkg, version, suite))
sys.stdout.write(", ".join(arches))
import pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
global fixed_maintainer_cache
if not fixed_maintainer_cache.has_key(maintainer):
- fixed_maintainer_cache[maintainer] = utils.fix_maintainer(maintainer)[0]
+ fixed_maintainer_cache[maintainer] = daklib.utils.fix_maintainer(maintainer)[0]
return fixed_maintainer_cache[maintainer]
def get_maintainer (maintainer):
- return fix_maintainer(database.get_maintainer(maintainer))
+ return fix_maintainer(daklib.database.get_maintainer(maintainer))
def get_maintainer_from_source (source_id):
global maintainer_from_source_cache
def main():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Make-Maintainers::Options::Help")]
if not Cnf.has_key("Make-Maintainers::Options::Help"):
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
for suite in Cnf.SubTree("Suite").List():
suite = suite.lower()
# Process any additional Maintainer files (e.g. from non-US or pseudo packages)
for filename in extra_files:
- file = utils.open_file(filename)
+ file = daklib.utils.open_file(filename)
for line in file.readlines():
- line = utils.re_comments.sub('', line).strip()
+ line = daklib.utils.re_comments.sub('', line).strip()
if line == "":
continue
split = line.split()
import pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def do_list(output_file, suite, component, otype):
global override
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
if suite_id == -1:
- utils.fubar("Suite '%s' not recognised." % (suite))
+ daklib.utils.fubar("Suite '%s' not recognised." % (suite))
- component_id = database.get_component_id(component)
+ component_id = daklib.database.get_component_id(component)
if component_id == -1:
- utils.fubar("Component '%s' not recognised." % (component))
+ daklib.utils.fubar("Component '%s' not recognised." % (component))
- otype_id = database.get_override_type_id(otype)
+ otype_id = daklib.database.get_override_type_id(otype)
if otype_id == -1:
- utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (otype))
+ daklib.utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (otype))
override.setdefault(suite, {})
override[suite].setdefault(component, {})
q = projectB.query("SELECT o.package, s.section, o.maintainer FROM override o, section s WHERE o.suite = %s AND o.component = %s AND o.type = %s AND o.section = s.id ORDER BY s.section, o.package" % (suite_id, component_id, otype_id))
for i in q.getresult():
override[suite][component][otype][i[0]] = i
- output_file.write(utils.result_join(i)+'\n')
+ output_file.write(daklib.utils.result_join(i)+'\n')
else:
q = projectB.query("SELECT o.package, p.priority, s.section, o.maintainer, p.level FROM override o, priority p, section s WHERE o.suite = %s AND o.component = %s AND o.type = %s AND o.priority = p.id AND o.section = s.id ORDER BY s.section, p.level, o.package" % (suite_id, component_id, otype_id))
for i in q.getresult():
i = i[:-1]; # Strip the priority level
override[suite][component][otype][i[0]] = i
- output_file.write(utils.result_join(i)+'\n')
+ output_file.write(daklib.utils.result_join(i)+'\n')
################################################################################
def main ():
global Cnf, projectB, override
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Make-Overrides::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Make-Overrides::Options::%s" % (i)):
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
for suite in Cnf.SubTree("Check-Overrides::OverrideSuites").List():
if Cnf.has_key("Suite::%s::Untouchable" % suite) and Cnf["Suite::%s::Untouchable" % suite] != 0:
elif otype == "dsc":
suffix = ".src"
filename = "%s/override.%s.%s%s" % (Cnf["Dir::Override"], override_suite, component.replace("non-US/", ""), suffix)
- output_file = utils.open_file(filename, 'w')
+ output_file = daklib.utils.open_file(filename, 'w')
do_list(output_file, suite, component, otype)
output_file.close()
import copy, os, pg, string, sys
import apt_pkg
import symlink_dists
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.utils
################################################################################
def delete_packages(delete_versions, pkg, dominant_arch, suite,
dominant_version, delete_table, delete_col, packages):
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
for version in delete_versions:
delete_unique_id = version[1]
if not packages.has_key(delete_unique_id):
def write_legacy_mixed_filelist(suite, list, packages, dislocated_files):
# Work out the filename
filename = os.path.join(Cnf["Dir::Lists"], "%s_-_all.list" % (suite))
- output = utils.open_file(filename, "w")
+ output = daklib.utils.open_file(filename, "w")
# Generate the final list of files
files = {}
for id in list:
else:
filename = path + filename
if files.has_key(filename):
- utils.warn("%s (in %s) is duplicated." % (filename, suite))
+ daklib.utils.warn("%s (in %s) is duplicated." % (filename, suite))
else:
files[filename] = ""
# Sort the files since apt-ftparchive doesn't
elif type == "deb":
arch = "binary-%s" % (arch)
filename = os.path.join(Cnf["Dir::Lists"], "%s_%s_%s.list" % (suite, component, arch))
- output = utils.open_file(filename, "w")
+ output = daklib.utils.open_file(filename, "w")
# Generate the final list of files
files = {}
for id in list:
else:
filename = path + filename
if files.has_key(pkg):
- utils.warn("%s (in %s/%s, %s) is duplicated." % (pkg, suite, component, filename))
+ daklib.utils.warn("%s (in %s/%s, %s) is duplicated." % (pkg, suite, component, filename))
else:
files[pkg] = filename
# Sort the files since apt-ftparchive doesn't
if not Options["Suite"]:
suites = Cnf.SubTree("Suite").List()
else:
- suites = utils.split_args(Options["Suite"])
+ suites = daklib.utils.split_args(Options["Suite"])
for suite in map(string.lower, suites):
d.setdefault(suite, {})
if not Options["Component"]:
components = Cnf.ValueList("Suite::%s::Components" % (suite))
else:
- components = utils.split_args(Options["Component"])
+ components = daklib.utils.split_args(Options["Component"])
udeb_components = Cnf.ValueList("Suite::%s::UdebComponents" % (suite))
udeb_components = udeb_components
for component in components:
if not Options["Architecture"]:
architectures = Cnf.ValueList("Suite::%s::Architectures" % (suite))
else:
- architectures = utils.split_args(Options["Architectures"])
+ architectures = daklib.utils.split_args(Options["Architectures"])
for arch in map(string.lower, architectures):
d[suite][component].setdefault(arch, {})
if arch == "source":
if not Options["Suite"]:
return 1
# Otherwise, look in what suites the user specified
- suites = utils.split_args(Options["Suite"])
+ suites = daklib.utils.split_args(Options["Suite"])
if "stable" in suites:
return 1
# If we're only doing a subset of suites, ensure we do enough to
# be able to do arch: all mapping.
if Options["Suite"]:
- suites = utils.split_args(Options["Suite"])
+ suites = daklib.utils.split_args(Options["Suite"])
for suite in suites:
archall_suite = Cnf.get("Make-Suite-File-List::ArchAllMap::%s" % (suite))
if archall_suite and archall_suite not in suites:
- utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite))
+ daklib.utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite))
suites.append(archall_suite)
Options["Suite"] = ",".join(suites)
(con_suites, con_architectures, con_components, check_source) = \
- utils.parse_args(Options)
+ daklib.utils.parse_args(Options)
if stable_dislocation_p():
dislocated_files = symlink_dists.find_dislocated_stable(Cnf, projectB)
def main():
global Cnf, projectB, Options, Logger
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a', "architecture", "Make-Suite-File-List::Options::Architecture", "HasArg"),
('c', "component", "Make-Suite-File-List::Options::Component", "HasArg"),
('h', "help", "Make-Suite-File-List::Options::Help"),
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
- Logger = logging.Logger(Cnf, "make-suite-file-list")
+ daklib.database.init(Cnf, projectB)
+ Logger = daklib.logging.Logger(Cnf, "make-suite-file-list")
do_da_do_da()
Logger.close()
import os
import cPickle
-import dak.lib.utils as utils
+import daklib.utils
## Master path is the main repository
#MASTER_PATH = "/org/ftp.debian.org/scratch/dsilvers/master"
elif S_ISREG(lnl[0]):
bdir.files[ln] = lnl[1]
else:
- utils.fubar( "Confused by %s/%s -- not a dir, link or file" %
+ daklib.utils.fubar( "Confused by %s/%s -- not a dir, link or file" %
( path, ln ) )
for d in dirs:
bdir.dirs[d] = self._internal_recurse( "%s/%s" % (path,d) )
def main ():
global Cnf
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Mirror-Split::Options::Help"),
('l',"list","Mirror-Split::Options::List"),
import pg, sys
import apt_pkg, logging
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
################################################################################
-# Shamelessly stolen from 'dak rm'. Should probably end up in utils.py
+# Shamelessly stolen from 'dak rm'. Should probably end up in daklib.utils.py
def game_over():
- answer = utils.our_raw_input("Continue (y/N)? ").lower()
+ answer = daklib.utils.our_raw_input("Continue (y/N)? ").lower()
if answer != "y":
print "Aborted."
sys.exit(1)
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Override::Options::Help"),
('d',"done","Override::Options::Done", "HasArg"),
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
if not arguments:
- utils.fubar("package name is a required argument.")
+ daklib.utils.fubar("package name is a required argument.")
package = arguments.pop(0)
suite = Options["Suite"]
if arguments and len(arguments) > 2:
- utils.fubar("Too many arguments")
+ daklib.utils.fubar("Too many arguments")
if arguments and len(arguments) == 1:
# Determine if the argument is a priority or a section...
elif r[0][1] == 1:
arguments = (".",arg)
else:
- utils.fubar("%s is not a valid section or priority" % (arg))
+ daklib.utils.fubar("%s is not a valid section or priority" % (arg))
# Retrieve current section/priority...
""" % (pg._quote(package,"str"), pg._quote(suite,"str")))
if q.ntuples() == 0:
- utils.fubar("Unable to find package %s" % (package))
+ daklib.utils.fubar("Unable to find package %s" % (package))
if q.ntuples() > 1:
- utils.fubar("%s is ambiguous. Matches %d packages" % (package,q.ntuples()))
+ daklib.utils.fubar("%s is ambiguous. Matches %d packages" % (package,q.ntuples()))
r = q.getresult()
oldsection = r[0][1]
pg._quote(newsection,"str")))
if q.ntuples() == 0:
- utils.fubar("Supplied section %s is invalid" % (newsection))
+ daklib.utils.fubar("Supplied section %s is invalid" % (newsection))
newsecid = q.getresult()[0][0]
q = projectB.query("SELECT id FROM priority WHERE priority=%s" % (
pg._quote(newpriority,"str")))
if q.ntuples() == 0:
- utils.fubar("Supplied priority %s is invalid" % (newpriority))
+ daklib.utils.fubar("Supplied priority %s is invalid" % (newpriority))
newprioid = q.getresult()[0][0]
if newpriority == oldpriority and newsection == oldsection:
if not Options.has_key("Done"):
pass
- #utils.warn("No bugs to close have been specified. Noone will know you have done this.")
+ #daklib.utils.warn("No bugs to close have been specified. Noone will know you have done this.")
else:
print "I: Will close bug(s): %s" % (Options["Done"])
game_over()
- Logger = logging.Logger(Cnf, "override")
+ Logger = daklib.logging.Logger(Cnf, "override")
projectB.query("BEGIN WORK")
# We're in "do it" mode, we have something to do... do it
Subst["__CC__"] = "X-DAK: dak override"
Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
- Subst["__WHOAMI__"] = utils.whoami()
+ Subst["__WHOAMI__"] = daklib.utils.whoami()
summary = "Concerning package %s...\n" % (package)
summary += "Operating on the %s suite\n" % (suite)
summary += "Changed section from %s to %s\n" % (oldsection,newsection)
Subst["__SUMMARY__"] = summary
- for bug in utils.split_args(Options["Done"]):
+ for bug in daklib.utils.split_args(Options["Done"]):
Subst["__BUG_NUMBER__"] = bug
- mail_message = utils.TemplateSubst(
+ mail_message = daklib.utils.TemplateSubst(
Subst,Cnf["Dir::Templates"]+"/override.bug-close")
- utils.send_mail(mail_message)
+ daklib.utils.send_mail(mail_message)
Logger.log(["closed bug",bug])
Logger.close()
import os, pg, re, stat, sys
import apt_pkg, apt_inst
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
legacy_filename = qid["path"]+qid["filename"]
size = os.stat(legacy_filename)[stat.ST_SIZE]
if (poolized_size + size) > limit and limit >= 0:
- utils.warn("Hit %s limit." % (utils.size_type(limit)))
+ daklib.utils.warn("Hit %s limit." % (daklib.utils.size_type(limit)))
break
poolized_size += size
poolized_count += 1
destination_filename = base_filename
# Work out the source package name
if re_isadeb.match(base_filename):
- control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(legacy_filename)))
+ control = apt_pkg.ParseSection(apt_inst.debExtractControl(daklib.utils.open_file(legacy_filename)))
package = control.Find("Package", "")
source = control.Find("Source", package)
if source.find("(") != -1:
- m = utils.re_extract_src_version.match(source)
+ m = daklib.utils.re_extract_src_version.match(source)
source = m.group(1)
# If it's a binary, we need to also rename the file to include the architecture
version = control.Find("Version", "")
architecture = control.Find("Architecture", "")
if package == "" or version == "" or architecture == "":
- utils.fubar("%s: couldn't determine required information to rename .deb file." % (legacy_filename))
- version = utils.re_no_epoch.sub('', version)
+ daklib.utils.fubar("%s: couldn't determine required information to rename .deb file." % (legacy_filename))
+ version = daklib.utils.re_no_epoch.sub('', version)
destination_filename = "%s_%s_%s.deb" % (package, version, architecture)
else:
- m = utils.re_issource.match(base_filename)
+ m = daklib.utils.re_issource.match(base_filename)
if m:
source = m.group(1)
else:
- utils.fubar("expansion of source filename '%s' failed." % (legacy_filename))
+ daklib.utils.fubar("expansion of source filename '%s' failed." % (legacy_filename))
# Work out the component name
component = qid["component"]
if component == "":
q = projectB.query("SELECT DISTINCT(c.name) FROM override o, component c WHERE o.package = '%s' AND o.component = c.id;" % (source))
ql = q.getresult()
if not ql:
- utils.fubar("No override match for '%s' so I can't work out the component." % (source))
+ daklib.utils.fubar("No override match for '%s' so I can't work out the component." % (source))
if len(ql) > 1:
- utils.fubar("Multiple override matches for '%s' so I can't work out the component." % (source))
+ daklib.utils.fubar("Multiple override matches for '%s' so I can't work out the component." % (source))
component = ql[0][0]
# Work out the new location
q = projectB.query("SELECT l.id FROM location l, component c WHERE c.name = '%s' AND c.id = l.component AND l.type = 'pool';" % (component))
ql = q.getresult()
if len(ql) != 1:
- utils.fubar("couldn't determine location ID for '%s'. [query returned %d matches, not 1 as expected]" % (source, len(ql)))
+ daklib.utils.fubar("couldn't determine location ID for '%s'. [query returned %d matches, not 1 as expected]" % (source, len(ql)))
location_id = ql[0][0]
# First move the files to the new location
- pool_location = utils.poolify (source, component)
+ pool_location = daklib.utils.poolify (source, component)
pool_filename = pool_location + destination_filename
destination = Cnf["Dir::Pool"] + pool_location + destination_filename
if os.path.exists(destination):
- utils.fubar("'%s' already exists in the pool; serious FUBARity." % (legacy_filename))
+ daklib.utils.fubar("'%s' already exists in the pool; serious FUBARity." % (legacy_filename))
if verbose:
print "Moving: %s -> %s" % (legacy_filename, destination)
if not no_action:
- utils.move(legacy_filename, destination)
+ daklib.utils.move(legacy_filename, destination)
# Then Update the DB's files table
if verbose:
print "SQL: UPDATE files SET filename = '%s', location = '%s' WHERE id = '%s'" % (pool_filename, location_id, qid["files_id"])
if not no_action:
q = projectB.query("UPDATE files SET filename = '%s', location = '%s' WHERE id = '%s'" % (pool_filename, location_id, qid["files_id"]))
- sys.stderr.write("Poolized %s in %s files.\n" % (utils.size_type(poolized_size), poolized_count))
+ sys.stderr.write("Poolized %s in %s files.\n" % (daklib.utils.size_type(poolized_size), poolized_count))
################################################################################
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
for i in ["help", "limit", "no-action", "verbose" ]:
if not Cnf.has_key("Poolize::Options::%s" % (i)):
usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, projectB)
+ daklib.database.init(Cnf, projectB)
if not Options["Limit"]:
limit = -1
# Sanity check the limit argument
if limit > 0 and limit < 1024:
- utils.fubar("-l/--limit takes an argument with a value in kilobytes.")
+ daklib.utils.fubar("-l/--limit takes an argument with a value in kilobytes.")
# Grab a list of all files not already in the pool
q = projectB.query("""
import errno, fcntl, os, sys, time, re
import apt_pkg
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.queue
+import daklib.utils
###############################################################################
os.makedirs(self.log_dir, 02775)
# Open the logfile
self.log_filename = "%s/.install-urgencies-%s.new" % (self.log_dir, self.timestamp)
- self.log_file = utils.open_file(self.log_filename, 'w')
+ self.log_file = daklib.utils.open_file(self.log_filename, 'w')
self.writes = 0
def log (self, source, version, urgency):
self.log_file.close()
if self.writes:
new_filename = "%s/install-urgencies-%s" % (self.log_dir, self.timestamp)
- utils.move(self.log_filename, new_filename)
+ daklib.utils.move(self.log_filename, new_filename)
else:
os.unlink(self.log_filename)
def init():
global Cnf, Options, Upload, projectB, changes, dsc, dsc_files, files, pkg, Subst
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
('h',"help","Dinstall::Options::Help"),
if Options["Help"]:
usage()
- Upload = queue.Upload(Cnf)
+ Upload = daklib.queue.Upload(Cnf)
projectB = Upload.projectB
changes = Upload.pkg.changes
answer = 'I'
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.match(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.match(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
Subst["__REJECT_MESSAGE__"] = reject_message
Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.unaccept")
+ reject_mail_message = daklib.utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.unaccept")
# Write the rejection email out as the <foo>.reason file
reason_filename = os.path.basename(pkg.changes_file[:-8]) + ".reason"
os.write(fd, reject_mail_message)
os.close(fd)
- utils.send_mail(reject_mail_message)
+ daklib.utils.send_mail(reject_mail_message)
Logger.log(["unaccepted", pkg.changes_file])
###############################################################################
version = dsc["version"] # NB: not files[file]["version"], that has no epoch
maintainer = dsc["maintainer"]
maintainer = maintainer.replace("'", "\\'")
- maintainer_id = database.get_or_set_maintainer_id(maintainer)
- fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"])
+ maintainer_id = daklib.database.get_or_set_maintainer_id(maintainer)
+ fingerprint_id = daklib.database.get_or_set_fingerprint_id(dsc["fingerprint"])
install_date = time.strftime("%Y-%m-%d")
filename = files[file]["pool name"] + file
dsc_component = files[file]["component"]
dsc_location_id = files[file]["location id"]
if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], dsc_location_id)
+ files[file]["files id"] = daklib.database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], dsc_location_id)
projectB.query("INSERT INTO source (source, version, maintainer, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, '%s', %s)"
% (package, version, maintainer_id, files[file]["files id"], install_date, fingerprint_id))
for suite in changes["distribution"].keys():
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id))
# Add the source files to the DB (files and dsc_files)
# files id is stored in dsc_files by check_dsc().
files_id = dsc_files[dsc_file].get("files id", None)
if files_id == None:
- files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
+ files_id = daklib.database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
# FIXME: needs to check for -1/-2 and or handle exception
if files_id == None:
- files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
+ files_id = daklib.database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
# Add the .deb files to the DB
version = files[file]["version"]
maintainer = files[file]["maintainer"]
maintainer = maintainer.replace("'", "\\'")
- maintainer_id = database.get_or_set_maintainer_id(maintainer)
- fingerprint_id = database.get_or_set_fingerprint_id(changes["fingerprint"])
+ maintainer_id = daklib.database.get_or_set_maintainer_id(maintainer)
+ fingerprint_id = daklib.database.get_or_set_fingerprint_id(changes["fingerprint"])
architecture = files[file]["architecture"]
- architecture_id = database.get_architecture_id (architecture)
+ architecture_id = daklib.database.get_architecture_id (architecture)
type = files[file]["dbtype"]
source = files[file]["source package"]
source_version = files[file]["source version"]
filename = files[file]["pool name"] + file
if not files[file].has_key("location id") or not files[file]["location id"]:
- files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
+ files[file]["location id"] = daklib.database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],daklib.utils.where_am_i())
if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
- source_id = database.get_source_id (source, source_version)
+ files[file]["files id"] = daklib.database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
+ source_id = daklib.database.get_source_id (source, source_version)
if source_id:
projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
% (package, version, maintainer_id, source_id, architecture_id, files[file]["files id"], type, fingerprint_id))
projectB.query("INSERT INTO binaries (package, version, maintainer, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %d)"
% (package, version, maintainer_id, architecture_id, files[file]["files id"], type, fingerprint_id))
for suite in changes["distribution"].keys():
- suite_id = database.get_suite_id(suite)
+ suite_id = daklib.database.get_suite_id(suite)
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
# If the .orig.tar.gz is in a legacy directory we need to poolify
continue
# First move the files to the new location
legacy_filename = qid["path"] + qid["filename"]
- pool_location = utils.poolify (changes["source"], files[file]["component"])
+ pool_location = daklib.utils.poolify (changes["source"], files[file]["component"])
pool_filename = pool_location + os.path.basename(qid["filename"])
destination = Cnf["Dir::Pool"] + pool_location
- utils.move(legacy_filename, destination)
+ daklib.utils.move(legacy_filename, destination)
# Then Update the DB's files table
q = projectB.query("UPDATE files SET filename = '%s', location = '%s' WHERE id = '%s'" % (pool_filename, dsc_location_id, qid["files_id"]))
old_filename = ql[0] + ql[1]
file_size = ql[2]
file_md5sum = ql[3]
- new_filename = utils.poolify(changes["source"], dsc_component) + os.path.basename(old_filename)
- new_files_id = database.get_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
+ new_filename = daklib.utils.poolify(changes["source"], dsc_component) + os.path.basename(old_filename)
+ new_files_id = daklib.database.get_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
if new_files_id == None:
- utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
+ daklib.utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, source_id, orig_tar_id))
# Install the files into the pool
for file in files.keys():
destination = Cnf["Dir::Pool"] + files[file]["pool name"] + file
- utils.move(file, destination)
+ daklib.utils.move(file, destination)
Logger.log(["installed", file, files[file]["type"], files[file]["size"], files[file]["architecture"]])
install_bytes += float(files[file]["size"])
if Cnf.has_key("Suite::%s::CopyDotDak" % (suite)):
copy_dot_dak[Cnf["Suite::%s::CopyDotDak" % (suite)]] = ""
for dest in copy_changes.keys():
- utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
+ daklib.utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
for dest in copy_dot_dak.keys():
- utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
+ daklib.utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
projectB.query("COMMIT WORK")
# Move the .changes into the 'done' directory
- utils.move (pkg.changes_file,
+ daklib.utils.move (pkg.changes_file,
os.path.join(Cnf["Dir::Queue::Done"], os.path.basename(pkg.changes_file)))
# Remove the .dak file
projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, dest, suite_id))
if not Cnf.FindB("Dinstall::SecurityQueueBuild"):
# Update the symlink to point to the new location in the pool
- pool_location = utils.poolify (changes["source"], files[file]["component"])
+ pool_location = daklib.utils.poolify (changes["source"], files[file]["component"])
src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(file))
if os.path.islink(dest):
os.unlink(dest)
q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
ql = q.getresult()
if not ql:
- utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
+ daklib.utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
source_id = ql[0][0]
suite_id = database.get_suite_id('proposed-updates')
projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id))
# Reduce the query results to a list of version numbers
ql = map(lambda x: x[0], q.getresult())
if not ql:
- utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
+ daklib.utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
else:
for x in ql:
if re.match(re.compile(r"%s((\.0)?\.)|(\+b)\d+$" % re.escape(version)),x):
projectB.query("COMMIT WORK")
- utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file))
+ daklib.utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file))
## Update the Stable ChangeLog file
new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + ".ChangeLog"
if os.path.exists(new_changelog_filename):
os.unlink (new_changelog_filename)
- new_changelog = utils.open_file(new_changelog_filename, 'w')
+ new_changelog = daklib.utils.open_file(new_changelog_filename, 'w')
for file in files.keys():
if files[file]["type"] == "deb":
new_changelog.write("stable/%s/binary-%s/%s\n" % (files[file]["component"], files[file]["architecture"], file))
- elif utils.re_issource.match(file):
+ elif daklib.utils.re_issource.match(file):
new_changelog.write("stable/%s/source/%s\n" % (files[file]["component"], file))
else:
new_changelog.write("%s\n" % (file))
- chop_changes = queue.re_fdnic.sub("\n", changes["changes"])
+ chop_changes = daklib.queue.re_fdnic.sub("\n", changes["changes"])
new_changelog.write(chop_changes + '\n\n')
if os.access(changelog_filename, os.R_OK) != 0:
- changelog = utils.open_file(changelog_filename)
+ changelog = daklib.utils.open_file(changelog_filename)
new_changelog.write(changelog.read())
new_changelog.close()
if os.access(changelog_filename, os.R_OK) != 0:
os.unlink(changelog_filename)
- utils.move(new_changelog_filename, changelog_filename)
+ daklib.utils.move(new_changelog_filename, changelog_filename)
install_count += 1
if not Options["No-Mail"] and changes["architecture"].has_key("source"):
Subst["__SUITE__"] = " into stable"
Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.installed")
- utils.send_mail(mail_message)
+ mail_message = daklib.utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.installed")
+ daklib.utils.send_mail(mail_message)
Upload.announce(short_summary, 1)
# Finally remove the .dak file
# Check that we aren't going to clash with the daily cron job
if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (Cnf["Dir::Root"])) and not Options["No-Lock"]:
- utils.fubar("Archive maintenance in progress. Try again later.")
+ daklib.utils.fubar("Archive maintenance in progress. Try again later.")
# If running from within proposed-updates; assume an install to stable
if os.getcwd().find('proposed-updates') != -1:
fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError, e:
if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
- utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
+ daklib.utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
else:
raise
- Logger = Upload.Logger = logging.Logger(Cnf, "process-accepted")
+ Logger = Upload.Logger = daklib.logging.Logger(Cnf, "process-accepted")
if not installing_to_stable and Cnf.get("Dir::UrgencyLog"):
Urgency_Logger = Urgency_Log(Cnf)
Subst["__BCC__"] = bcc
# Sort the .changes files so that we process sourceful ones first
- changes_files.sort(utils.changes_compare)
+ changes_files.sort(daklib.utils.changes_compare)
# Process the changes files
for changes_file in changes_files:
sets = "set"
if install_count > 1:
sets = "sets"
- sys.stderr.write("Installed %d package %s, %s.\n" % (install_count, sets, utils.size_type(int(install_bytes))))
+ sys.stderr.write("Installed %d package %s, %s.\n" % (install_count, sets, daklib.utils.size_type(int(install_bytes))))
Logger.log(["total",install_count,install_bytes])
if not Options["No-Action"]:
import copy, errno, os, readline, stat, sys, time
import apt_pkg, apt_inst
import examine_package
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.queue
+import daklib.utils
# Globals
Cnf = None
source_package = files[file]["source package"]
if not Upload.pkg.changes["architecture"].has_key("source") \
and not Upload.source_exists(source_package, source_version, Upload.pkg.changes["distribution"].keys()):
- source_epochless_version = utils.re_no_epoch.sub('', source_version)
+ source_epochless_version = daklib.utils.re_no_epoch.sub('', source_version)
dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
if not os.path.exists(Cnf["Dir::Queue::Accepted"] + '/' + dsc_filename):
reject("no source found for %s %s (%s)." % (source_package, source_version, file))
prompt = "[R]eject, Skip, Quit ?"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.match(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.match(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
elif f["type"] == "orig.tar.gz" or f["type"] == "tar.gz" or f["type"] == "diff.gz" or f["type"] == "dsc":
type = "dsc"
else:
- utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
+ daklib.utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
# Validate the override type
type_id = database.get_override_type_id(type)
if type_id == -1:
- utils.fubar("invalid type (%s) for new. Say wha?" % (type))
+ daklib.utils.fubar("invalid type (%s) for new. Say wha?" % (type))
return type
def edit_new (new):
# Write the current data to a temporary file
- temp_filename = utils.temp_filename()
- temp_file = utils.open_file(temp_filename, 'w')
+ temp_filename = daklib.utils.temp_filename()
+ temp_file = daklib.utils.open_file(temp_filename, 'w')
print_new (new, 0, temp_file)
temp_file.close()
# Spawn an editor on that file
editor = os.environ.get("EDITOR","vi")
result = os.system("%s %s" % (editor, temp_filename))
if result != 0:
- utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result)
+ daklib.utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result)
# Read the edited data back in
- temp_file = utils.open_file(temp_filename)
+ temp_file = daklib.utils.open_file(temp_filename)
lines = temp_file.readlines()
temp_file.close()
os.unlink(temp_filename)
s[len(s):3] = [None] * (3-len(s))
(pkg, priority, section) = s[:3]
if not new.has_key(pkg):
- utils.warn("Ignoring unknown package '%s'" % (pkg))
+ daklib.utils.warn("Ignoring unknown package '%s'" % (pkg))
else:
# Strip off any invalid markers, print_new will readd them.
if section.endswith("[!]"):
edit_priority = edit_section = 0
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.match(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.match(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
readline.set_completer(Priorities.complete)
got_priority = 0
while not got_priority:
- new_priority = utils.our_raw_input("New priority: ").strip()
+ new_priority = daklib.utils.our_raw_input("New priority: ").strip()
if new_priority not in Priorities.priorities:
print "E: '%s' is not a valid priority, try again." % (new_priority)
else:
readline.set_completer(Sections.complete)
got_section = 0
while not got_section:
- new_section = utils.our_raw_input("New section: ").strip()
+ new_section = daklib.utils.our_raw_input("New section: ").strip()
if new_section not in Sections.sections:
print "E: '%s' is not a valid section, try again." % (new_section)
else:
got_answer = 0
while not got_answer:
- answer = utils.our_raw_input(prompt)
- if not utils.str_isnum(answer):
+ answer = daklib.utils.our_raw_input(prompt)
+ if not daklib.utils.str_isnum(answer):
answer = answer[:1].upper()
if answer == "E" or answer == "D":
got_answer = 1
- elif queue.re_isanum.match (answer):
+ elif daklib.queue.re_isanum.match (answer):
answer = int(answer)
if (answer < 1) or (answer > index):
print "%s is not a valid index (%s). Please retry." % (answer, index_range(index))
def edit_note(note):
# Write the current data to a temporary file
- temp_filename = utils.temp_filename()
- temp_file = utils.open_file(temp_filename, 'w')
+ temp_filename = daklib.utils.temp_filename()
+ temp_file = daklib.utils.open_file(temp_filename, 'w')
temp_file.write(note)
temp_file.close()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
os.system("%s %s" % (editor, temp_filename))
- temp_file = utils.open_file(temp_filename)
+ temp_file = daklib.utils.open_file(temp_filename)
note = temp_file.read().rstrip()
temp_file.close()
print "Note:"
- print utils.prefix_multi_line_string(note," ")
+ print daklib.utils.prefix_multi_line_string(note," ")
prompt = "[D]one, Edit, Abandon, Quit ?"
answer = "XXX"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.search(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.search(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
sys.stdout = stdout_fd
except IOError, e:
if errno.errorcode[e.errno] == 'EPIPE':
- utils.warn("[examine_package] Caught EPIPE; skipping.")
+ daklib.utils.warn("[examine_package] Caught EPIPE; skipping.")
pass
else:
raise
except KeyboardInterrupt:
- utils.warn("[examine_package] Caught C-c; skipping.")
+ daklib.utils.warn("[examine_package] Caught C-c; skipping.")
pass
################################################################################
summary = ""
for file in files.keys():
if files[file]["type"] == "deb":
- control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))
+ control = apt_pkg.ParseSection(apt_inst.debExtractControl(daklib.utils.open_file(file)))
summary += "\n"
summary += "Package: %s\n" % (control.Find("Package"))
summary += "Description: %s\n" % (control.Find("Description"))
Upload.Subst["__BINARY_DESCRIPTIONS__"] = summary
- bxa_mail = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-new.bxa_notification")
- utils.send_mail(bxa_mail)
+ bxa_mail = daklib.utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-new.bxa_notification")
+ daklib.utils.send_mail(bxa_mail)
################################################################################
def prod_maintainer ():
# Here we prepare an editor and get them ready to prod...
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
os.system("%s %s" % (editor, temp_filename))
- file = utils.open_file(temp_filename)
+ file = daklib.utils.open_file(temp_filename)
prod_message = "".join(file.readlines())
file.close()
print "Prod message:"
- print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
+ print daklib.utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
prompt = "[P]rod, Edit, Abandon, Quit ?"
answer = "XXX"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.search(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.search(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
elif answer == 'Q':
sys.exit(0)
# Otherwise, do the proding...
- user_email_address = utils.whoami() + " <%s>" % (
+ user_email_address = daklib.utils.whoami() + " <%s>" % (
Cnf["Dinstall::MyAdminAddress"])
Subst = Upload.Subst
Subst["__PROD_MESSAGE__"] = prod_message
Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- prod_mail_message = utils.TemplateSubst(
+ prod_mail_message = daklib.utils.TemplateSubst(
Subst,Cnf["Dir::Templates"]+"/process-new.prod")
# Send the prod mail if appropriate
if not Cnf["Dinstall::Options::No-Mail"]:
- utils.send_mail(prod_mail_message)
+ daklib.utils.send_mail(prod_mail_message)
print "Sent proding message"
for suite in changes["suite"].keys():
suite_id = database.get_suite_id(suite)
if suite_id == -1:
- utils.fubar("%s has invalid suite '%s' (possibly overriden). say wha?" % (changes, suite))
+ daklib.utils.fubar("%s has invalid suite '%s' (possibly overriden). say wha?" % (changes, suite))
# The main NEW processing loop
done = 0
prompt += "Edit overrides, Check, Manual reject, Note edit, Prod, [S]kip, Quit ?"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.search(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.search(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
elif answer == 'P':
prod_maintainer()
elif answer == 'R':
- confirm = utils.our_raw_input("Really clear note (y/N)? ").lower()
+ confirm = daklib.utils.our_raw_input("Really clear note (y/N)? ").lower()
if confirm == "y":
del changes["process-new note"]
elif answer == 'S':
def init():
global Cnf, Options, Logger, Upload, projectB, Sections, Priorities
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('a',"automatic","Process-New::Options::Automatic"),
('h',"help","Process-New::Options::Help"),
if Options["Help"]:
usage()
- Upload = queue.Upload(Cnf)
+ Upload = daklib.queue.Upload(Cnf)
if not Options["No-Action"]:
- Logger = Upload.Logger = logging.Logger(Cnf, "process-new")
+ Logger = Upload.Logger = daklib.logging.Logger(Cnf, "process-new")
projectB = Upload.projectB
prompt = "Manual reject, [S]kip, Quit ?"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.search(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.search(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EEXIST':
retry += 1
if (retry >= 10):
- utils.fubar("Couldn't obtain lock; assuming 'dak process-unchecked' is already running.")
+ daklib.utils.fubar("Couldn't obtain lock; assuming 'dak process-unchecked' is already running.")
else:
print("Unable to get accepted lock (try %d of 10)" % retry)
time.sleep(60)
sets = "set"
if accept_count > 1:
sets = "sets"
- sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, utils.size_type(int(accept_bytes))))
+ sys.stderr.write("Accepted %d package %s, %s.\n" % (accept_count, sets, daklib.utils.size_type(int(accept_bytes))))
Logger.log(["total",accept_count,accept_bytes])
if not Options["No-Action"]:
Upload.Subst["__BCC__"] = bcc
for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file, 0)
+ changes_file = daklib.utils.validate_changes_file_arg(changes_file, 0)
if not changes_file:
continue
print "\n" + changes_file
import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback
import apt_inst, apt_pkg
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.queue
+import daklib.utils
from types import *
apt_pkg.init()
Cnf = apt_pkg.newConfiguration()
- apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
+ apt_pkg.ReadConfigFileISC(Cnf,daklib.utils.which_conf_file())
Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
('h',"help","Dinstall::Options::Help"),
if Options["Help"]:
usage()
- Upload = queue.Queue(Cnf)
+ Upload = daklib.queue.Queue(Cnf)
changes = Upload.pkg.changes
dsc = Upload.pkg.dsc
for file in in_holding.keys():
if os.path.exists(file):
if file.find('/') != -1:
- utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (file))
+ daklib.utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (file))
else:
os.unlink(file)
in_holding = {}
# Parse the .changes field into a dictionary
try:
- changes.update(utils.parse_changes(filename))
- except utils.cant_open_exc:
+ changes.update(daklib.utils.parse_changes(filename))
+ except daklib.utils.cant_open_exc:
reject("%s: can't read file." % (filename))
return 0
- except utils.changes_parse_error_exc, line:
+ except daklib.utils.changes_parse_error_exc, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
return 0
# Parse the Files field from the .changes into another dictionary
try:
- files.update(utils.build_file_list(changes))
- except utils.changes_parse_error_exc, line:
+ files.update(daklib.utils.build_file_list(changes))
+ except daklib.utils.changes_parse_error_exc, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
- except utils.nk_format_exc, format:
+ except daklib.utils.nk_format_exc, format:
reject("%s: unknown format '%s'." % (filename, format))
return 0
try:
(changes["maintainer822"], changes["maintainer2047"],
changes["maintainername"], changes["maintaineremail"]) = \
- utils.fix_maintainer (changes["maintainer"])
- except utils.ParseMaintError, msg:
+ daklib.utils.fix_maintainer (changes["maintainer"])
+ except daklib.utils.ParseMaintError, msg:
reject("%s: Maintainer field ('%s') failed to parse: %s" \
% (filename, changes["maintainer"], msg))
try:
(changes["changedby822"], changes["changedby2047"],
changes["changedbyname"], changes["changedbyemail"]) = \
- utils.fix_maintainer (changes.get("changed-by", ""))
- except utils.ParseMaintError, msg:
+ daklib.utils.fix_maintainer (changes.get("changed-by", ""))
+ except daklib.utils.ParseMaintError, msg:
(changes["changedby822"], changes["changedby2047"],
changes["changedbyname"], changes["changedbyemail"]) = \
("", "", "", "")
# Ensure all the values in Closes: are numbers
if changes.has_key("closes"):
for i in changes["closes"].keys():
- if queue.re_isanum.match (i) == None:
+ if daklib.queue.re_isanum.match (i) == None:
reject("%s: `%s' from Closes field isn't a number." % (filename, i))
# chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
- changes["chopversion"] = utils.re_no_epoch.sub('', changes["version"])
- changes["chopversion2"] = utils.re_no_revision.sub('', changes["chopversion"])
+ changes["chopversion"] = daklib.utils.re_no_epoch.sub('', changes["version"])
+ changes["chopversion2"] = daklib.utils.re_no_revision.sub('', changes["chopversion"])
# Check there isn't already a changes file of the same name in one
# of the queue directories.
(result, output) = commands.getstatusoutput(cmd)
if result != 0:
reject("%s: 'ar t' invocation failed." % (filename))
- reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
+ reject(daklib.utils.prefix_multi_line_string(output, " [ar output:] "), "")
chunks = output.split('\n')
if len(chunks) != 3:
reject("%s: found %d chunks, expected 3." % (filename, len(chunks)))
def check_files():
global reprocess
- archive = utils.where_am_i()
+ archive = daklib.utils.where_am_i()
file_keys = files.keys()
# if reprocess is 2 we've already done this and we're checking
for dir in [ "Accepted", "Byhand", "New" ]:
if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+file):
reject("%s file already exists in the %s directory." % (file, dir))
- if not utils.re_taint_free.match(file):
+ if not daklib.utils.re_taint_free.match(file):
reject("!!WARNING!! tainted filename: '%s'." % (file))
# Check the file is readable
if os.access(file,os.R_OK) == 0:
files[file]["byhand"] = 1
files[file]["type"] = "byhand"
# Checks for a binary package...
- elif utils.re_isadeb.match(file):
+ elif daklib.utils.re_isadeb.match(file):
has_binaries = 1
files[file]["type"] = "deb"
# Extract package control information
- deb_file = utils.open_file(file)
+ deb_file = daklib.utils.open_file(file)
try:
control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
except:
source = files[file]["source"]
source_version = ""
if source.find("(") != -1:
- m = utils.re_extract_src_version.match(source)
+ m = daklib.utils.re_extract_src_version.match(source)
source = m.group(1)
source_version = m.group(2)
if not source_version:
files[file]["source version"] = source_version
# Ensure the filename matches the contents of the .deb
- m = utils.re_isadeb.match(file)
+ m = daklib.utils.re_isadeb.match(file)
# package name
file_package = m.group(1)
if files[file]["package"] != file_package:
reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (file, file_package, files[file]["dbtype"], files[file]["package"]))
- epochless_version = utils.re_no_epoch.sub('', control.Find("Version"))
+ epochless_version = daklib.utils.re_no_epoch.sub('', control.Find("Version"))
# version
file_version = m.group(2)
if epochless_version != file_version:
# Check in the SQL database
if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
# Check in one of the other directories
- source_epochless_version = utils.re_no_epoch.sub('', source_version)
+ source_epochless_version = daklib.utils.re_no_epoch.sub('', source_version)
dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
files[file]["byhand"] = 1
# Checks for a source package...
else:
- m = utils.re_issource.match(file)
+ m = daklib.utils.re_issource.match(file)
if m:
has_source = 1
files[file]["package"] = m.group(1)
# Check the signature of a .dsc file
if files[file]["type"] == "dsc":
- dsc["fingerprint"] = utils.check_signature(file, reject)
+ dsc["fingerprint"] = daklib.utils.check_signature(file, reject)
files[file]["architecture"] = "source"
files[file]["location id"] = location_id
# Check the md5sum & size against existing files (if any)
- files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"])
+ files[file]["pool name"] = daklib.utils.poolify (changes["source"], files[file]["component"])
files_id = database.get_files_id(files[file]["pool name"] + file, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
if files_id == -1:
reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (file))
# Parse the .dsc file
try:
- dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
- except utils.cant_open_exc:
+ dsc.update(daklib.utils.parse_changes(dsc_filename, signing_rules=1))
+ except daklib.utils.cant_open_exc:
# if not -n copy_to_holding() will have done this for us...
if Options["No-Action"]:
reject("%s: can't read file." % (dsc_filename))
- except utils.changes_parse_error_exc, line:
+ except daklib.utils.changes_parse_error_exc, line:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
- except utils.invalid_dsc_format_exc, line:
+ except daklib.utils.invalid_dsc_format_exc, line:
reject("%s: syntax error on line %s." % (dsc_filename, line))
# Build up the file list of files mentioned by the .dsc
try:
- dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
- except utils.no_files_exc:
+ dsc_files.update(daklib.utils.build_file_list(dsc, is_a_dsc=1))
+ except daklib.utils.no_files_exc:
reject("%s: no Files: field." % (dsc_filename))
return 0
- except utils.changes_parse_error_exc, line:
+ except daklib.utils.changes_parse_error_exc, line:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
return 0
# Validate the Maintainer field
try:
- utils.fix_maintainer (dsc["maintainer"])
- except utils.ParseMaintError, msg:
+ daklib.utils.fix_maintainer (dsc["maintainer"])
+ except daklib.utils.ParseMaintError, msg:
reject("%s: Maintainer field ('%s') failed to parse: %s" \
% (dsc_filename, dsc["maintainer"], msg))
pass
# Ensure the version number in the .dsc matches the version number in the .changes
- epochless_dsc_version = utils.re_no_epoch.sub('', dsc["version"])
+ epochless_dsc_version = daklib.utils.re_no_epoch.sub('', dsc["version"])
changes_version = files[dsc_filename]["version"]
if epochless_dsc_version != files[dsc_filename]["version"]:
reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
# Ensure there is a .tar.gz in the .dsc file
has_tar = 0
for f in dsc_files.keys():
- m = utils.re_issource.match(f)
+ m = daklib.utils.re_issource.match(f)
if not m:
reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
type = m.group(3)
# Create a symlink mirror of the source files in our temporary directory
for f in files.keys():
- m = utils.re_issource.match(f)
+ m = daklib.utils.re_issource.match(f)
if m:
src = os.path.join(source_dir, f)
# If a file is missing for whatever reason, give up.
(result, output) = commands.getstatusoutput(cmd)
if (result != 0):
reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
- reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
+ reject(daklib.utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
return
if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
return
# Get the upstream version
- upstr_version = utils.re_no_epoch.sub('', dsc["version"])
+ upstr_version = daklib.utils.re_no_epoch.sub('', dsc["version"])
if re_strip_revision.search(upstr_version):
upstr_version = re_strip_revision.sub('', upstr_version)
# Parse the changelog
dsc["bts changelog"] = ""
- changelog_file = utils.open_file(changelog_filename)
+ changelog_file = daklib.utils.open_file(changelog_filename)
for line in changelog_file.readlines():
m = re_changelog_versions.match(line)
if m:
shutil.rmtree(tmpdir)
except OSError, e:
if errno.errorcode[e.errno] != 'EACCES':
- utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
+ daklib.utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
# We probably have u-r or u-w directories so chmod everything
cmd = "chmod -R u+rwx %s" % (tmpdir)
result = os.system(cmd)
if result != 0:
- utils.fubar("'%s' failed with result %s." % (cmd, result))
+ daklib.utils.fubar("'%s' failed with result %s." % (cmd, result))
shutil.rmtree(tmpdir)
except:
- utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
+ daklib.utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
################################################################################
def check_md5sums ():
for file in files.keys():
try:
- file_handle = utils.open_file(file)
- except utils.cant_open_exc:
+ file_handle = daklib.utils.open_file(file)
+ except daklib.utils.cant_open_exc:
continue
# Check md5sum
for file in dsc_files.keys():
try:
- file_handle = utils.open_file(file)
- except utils.cant_open_exc:
+ file_handle = daklib.utils.open_file(file)
+ except daklib.utils.cant_open_exc:
continue
# Check md5sum
if files[filename]["type"] == "deb":
tar.reset()
try:
- deb_file = utils.open_file(filename)
+ deb_file = daklib.utils.open_file(filename)
apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
deb_file.seek(0)
try:
break
if queue:
print "%s for %s\n%s%s" % (
- queue.upper(), ", ".join(changes["distribution"].keys()),
+ daklib.queue.upper(), ", ".join(changes["distribution"].keys()),
reject_message, summary),
queuekey = queue[0].upper()
if queuekey in "RQSA":
answer = 'A'
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.match(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.match(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
################################################################################
def move_to_dir (dest, perms=0660, changesperms=0664):
- utils.move (pkg.changes_file, dest, perms=changesperms)
+ daklib.utils.move (pkg.changes_file, dest, perms=changesperms)
file_keys = files.keys()
for file in file_keys:
- utils.move (file, dest, perms=perms)
+ daklib.utils.move (file, dest, perms=perms)
################################################################################
if not Options["No-Mail"]:
print "Sending new ack."
Subst["__SUMMARY__"] = summary
- new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
- utils.send_mail(new_ack_message)
+ new_ack_message = daklib.utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
+ daklib.utils.send_mail(new_ack_message)
################################################################################
# Relativize the filename so we use the copy in holding
# rather than the original...
pkg.changes_file = os.path.basename(pkg.changes_file)
- changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
+ changes["fingerprint"] = daklib.utils.check_signature(pkg.changes_file, reject)
if changes["fingerprint"]:
valid_changes_p = check_changes()
else:
# Ensure all the arguments we were given are .changes files
for file in changes_files:
if not file.endswith(".changes"):
- utils.warn("Ignoring '%s' because it's not a .changes file." % (file))
+ daklib.utils.warn("Ignoring '%s' because it's not a .changes file." % (file))
changes_files.remove(file)
if changes_files == []:
- utils.fubar("Need at least one .changes file as an argument.")
+ daklib.utils.fubar("Need at least one .changes file as an argument.")
# Check that we aren't going to clash with the daily cron job
if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
- utils.fubar("Archive maintenance in progress. Try again later.")
+ daklib.utils.fubar("Archive maintenance in progress. Try again later.")
# Obtain lock if not in no-action mode and initialize the log
fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError, e:
if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
- utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
+ daklib.utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
else:
raise
- Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
+ Logger = Upload.Logger = daklib.logging.Logger(Cnf, "process-unchecked")
# debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
bcc = "X-DAK: dak process-unchecked\nX-Katie: this header is obsolete"
# Sort the .changes files so that we process sourceful ones first
- changes_files.sort(utils.changes_compare)
+ changes_files.sort(daklib.utils.changes_compare)
# Process the changes files
for changes_file in changes_files:
sets = "set"
if accept_count > 1:
sets = "sets"
- print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
+ print "Accepted %d package %s, %s." % (accept_count, sets, daklib.utils.size_type(int(accept_bytes)))
Logger.log(["total",accept_count,accept_bytes])
if not Options["No-Action"]:
import copy, glob, os, stat, sys, time
import apt_pkg
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.queue
+import daklib.utils
Cnf = None
Upload = None
try:
(maintainer["maintainer822"], maintainer["maintainer2047"],
maintainer["maintainername"], maintainer["maintaineremail"]) = \
- utils.fix_maintainer (j["maintainer"])
- except utils.ParseMaintError, msg:
+ daklib.utils.fix_maintainer (j["maintainer"])
+ except daklib.utils.ParseMaintError, msg:
print "Problems while parsing maintainer address\n"
maintainer["maintainername"] = "Unknown"
maintainer["maintaineremail"] = "Unknown"
version = j["version"]
versions[version] = ""
arches_list = arches.keys()
- arches_list.sort(utils.arch_compare_sw)
+ arches_list.sort(daklib.utils.arch_compare_sw)
arch_list = " ".join(arches_list)
version_list = " ".join(versions.keys())
if len(version_list) > max_version_len:
def main():
global Cnf, Upload
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Queue-Report::Options::Help"),
('n',"new","Queue-Report::Options::New"),
('s',"sort","Queue-Report::Options::Sort", "HasArg"),
if Options["Help"]:
usage()
- Upload = queue.Upload(Cnf)
+ Upload = daklib.queue.Upload(Cnf)
if Cnf.has_key("Queue-Report::Options::New"):
header()
import os, pg, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.database
+import daklib.logging
+import daklib.queue
+import daklib.utils
################################################################################
def main():
global Cnf, Logger, Options, projectB, Upload
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Reject-Proposed-Updates::Options::Help"),
('m',"manual-reject","Reject-Proposed-Updates::Options::Manual-Reject", "HasArg"),
('s',"no-mail", "Reject-Proposed-Updates::Options::No-Mail")]
if Options["Help"]:
usage()
if not arguments:
- utils.fubar("need at least one .changes filename as an argument.")
+ daklib.utils.fubar("need at least one .changes filename as an argument.")
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
database.init(Cnf, projectB)
- Upload = queue.Upload(Cnf)
- Logger = Upload.Logger = logging.Logger(Cnf, "reject-proposed-updates")
+ Upload = daklib.queue.Upload(Cnf)
+ Logger = Upload.Logger = daklib.logging.Logger(Cnf, "reject-proposed-updates")
bcc = "X-DAK: dak rejected-proposed-updates\nX-Katie: this header is obsolete"
if Cnf.has_key("Dinstall::Bcc"):
Upload.Subst["__BCC__"] = bcc
for arg in arguments:
- arg = utils.validate_changes_file_arg(arg)
+ arg = daklib.utils.validate_changes_file_arg(arg)
Upload.pkg.changes_file = arg
Upload.init_vars()
cwd = os.getcwd()
answer = "XXX"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.search(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.search(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
# If we weren't given a manual rejection message, spawn an editor
# so the user can add one in...
if not reject_message:
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
os.system("%s %s" % (editor, temp_filename))
- file = utils.open_file(temp_filename)
+ file = daklib.utils.open_file(temp_filename)
reject_message = "".join(file.readlines())
file.close()
print "Reject message:"
- print utils.prefix_multi_line_string(reject_message," ", include_blank_lines=1)
+ print daklib.utils.prefix_multi_line_string(reject_message," ", include_blank_lines=1)
prompt = "[R]eject, Edit, Abandon, Quit ?"
answer = "XXX"
while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = queue.re_default_answer.search(prompt)
+ answer = daklib.utils.our_raw_input(prompt)
+ m = daklib.queue.re_default_answer.search(prompt)
if answer == "":
answer = m.group(1)
answer = answer[:1].upper()
reject_fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
# Build up the rejection email
- user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
+ user_email_address = daklib.utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
Upload.Subst["__REJECTOR_ADDRESS__"] = user_email_address
Upload.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
Upload.Subst["__STABLE_REJECTOR__"] = Cnf["Reject-Proposed-Updates::StableRejector"]
Upload.Subst["__MORE_INFO_URL__"] = Cnf["Reject-Proposed-Updates::MoreInfoURL"]
Upload.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- reject_mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/reject-proposed-updates.rejected")
+ reject_mail_message = daklib.utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/reject-proposed-updates.rejected")
# Write the rejection email out as the <foo>.reason file
os.write(reject_fd, reject_mail_message)
q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
ql = q.getresult()
if not ql:
- utils.fubar("reject: Couldn't find %s_%s in source table." % (package, version))
+ daklib.utils.fubar("reject: Couldn't find %s_%s in source table." % (package, version))
source_id = ql[0][0]
projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id))
elif files[file]["type"] == "deb":
# newer version of the package and only do the
# warn&continue thing if it finds one.
if not ql:
- utils.warn("reject: Couldn't find %s_%s_%s in binaries table." % (package, version, architecture))
+ daklib.utils.warn("reject: Couldn't find %s_%s_%s in binaries table." % (package, version, architecture))
else:
binary_id = ql[0][0]
projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
# Send the rejection mail if appropriate
if not Options["No-Mail"]:
- utils.send_mail(reject_mail_message)
+ daklib.utils.send_mail(reject_mail_message)
# Finally remove the .dak file
dot_dak_file = os.path.join(Cnf["Suite::Proposed-Updates::CopyDotDak"], os.path.basename(changes_file[:-8]+".dak"))
import commands, os, pg, re, sys
import apt_pkg, apt_inst
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
# the fuck are we gonna do now? What are we gonna do?"
def game_over():
- answer = utils.our_raw_input("Continue (y/N)? ").lower()
+ answer = daklib.utils.our_raw_input("Continue (y/N)? ").lower()
if answer != "y":
print "Aborted."
sys.exit(1)
for component in components:
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suites[0], component, architecture)
# apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
(result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if (result != 0):
- utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
- packages = utils.open_file(temp_filename)
+ daklib.utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
+ packages = daklib.utils.open_file(temp_filename)
Packages = apt_pkg.ParseTagFile(packages)
while Packages.Step():
package = Packages.Section.Find("Package")
what = "%s/%s" % (package, component)
else:
what = "** %s" % (package)
- print "%s has an unsatisfied dependency on %s: %s" % (what, architecture, utils.pp_deps(dep))
+ print "%s has an unsatisfied dependency on %s: %s" % (what, architecture, daklib.utils.pp_deps(dep))
dep_problem = 1
# Check source dependencies (Build-Depends and Build-Depends-Indep)
for component in components:
filename = "%s/dists/%s/%s/source/Sources.gz" % (Cnf["Dir::Root"], suites[0], component)
# apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
result, output = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
if result != 0:
sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
sys.exit(result)
- sources = utils.open_file(temp_filename, "r")
+ sources = daklib.utils.open_file(temp_filename, "r")
Sources = apt_pkg.ParseTagFile(sources)
while Sources.Step():
source = Sources.Section.Find("Package")
source = "%s/%s" % (source, component)
else:
source = "** %s" % (source)
- print "%s has an unsatisfied build-dependency: %s" % (source, utils.pp_deps(dep))
+ print "%s has an unsatisfied build-dependency: %s" % (source, daklib.utils.pp_deps(dep))
dep_problem = 1
sources.close()
os.unlink(temp_filename)
def main ():
global Cnf, Options, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Rm::Options::Help"),
('a',"architecture","Rm::Options::Architecture", "HasArg"),
# Sanity check options
if not arguments:
- utils.fubar("need at least one package name as an argument.")
+ daklib.utils.fubar("need at least one package name as an argument.")
if Options["Architecture"] and Options["Source-Only"]:
- utils.fubar("can't use -a/--architecutre and -S/--source-only options simultaneously.")
+ daklib.utils.fubar("can't use -a/--architecutre and -S/--source-only options simultaneously.")
if Options["Binary-Only"] and Options["Source-Only"]:
- utils.fubar("can't use -b/--binary-only and -S/--source-only options simultaneously.")
+ daklib.utils.fubar("can't use -b/--binary-only and -S/--source-only options simultaneously.")
if Options.has_key("Carbon-Copy") and not Options.has_key("Done"):
- utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.")
+ daklib.utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.")
if Options["Architecture"] and not Options["Partial"]:
- utils.warn("-a/--architecture implies -p/--partial.")
+ daklib.utils.warn("-a/--architecture implies -p/--partial.")
Options["Partial"] = "true"
# Force the admin to tell someone if we're not doing a 'dak
# as telling someone).
if not Options["No-Action"] and not Options["Carbon-Copy"] \
and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1:
- utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.")
+ daklib.utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.")
# Process -C/--carbon-copy
#
# 3) contains a '@' - assumed to be an email address, used unmofidied
#
carbon_copy = []
- for copy_to in utils.split_args(Options.get("Carbon-Copy")):
- if utils.str_isnum(copy_to):
+ for copy_to in daklib.utils.split_args(Options.get("Carbon-Copy")):
+ if daklib.utils.str_isnum(copy_to):
carbon_copy.append(copy_to + "@" + Cnf["Dinstall::BugServer"])
elif copy_to == 'package':
for package in arguments:
elif '@' in copy_to:
carbon_copy.append(copy_to)
else:
- utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to))
+ daklib.utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to))
if Options["Binary-Only"]:
field = "b.package"
con_packages = "AND %s IN (%s)" % (field, ", ".join(map(repr, arguments)))
(con_suites, con_architectures, con_components, check_source) = \
- utils.parse_args(Options)
+ daklib.utils.parse_args(Options)
# Additional suite checks
suite_ids_list = []
- suites = utils.split_args(Options["Suite"])
- suites_list = utils.join_with_commas_and(suites)
+ suites = daklib.utils.split_args(Options["Suite"])
+ suites_list = daklib.utils.join_with_commas_and(suites)
if not Options["No-Action"]:
for suite in suites:
suite_id = database.get_suite_id(suite)
# Additional architecture checks
if Options["Architecture"] and check_source:
- utils.warn("'source' in -a/--argument makes no sense and is ignored.")
+ daklib.utils.warn("'source' in -a/--argument makes no sense and is ignored.")
# Additional component processing
over_con_components = con_components.replace("c.id", "component")
for i in source_packages.keys():
filename = "/".join(source_packages[i])
try:
- dsc = utils.parse_changes(filename)
- except utils.cant_open_exc:
- utils.warn("couldn't open '%s'." % (filename))
+ dsc = daklib.utils.parse_changes(filename)
+ except daklib.utils.cant_open_exc:
+ daklib.utils.warn("couldn't open '%s'." % (filename))
continue
for package in dsc.get("binary").split(','):
package = package.strip()
q = projectB.query("SELECT l.path, f.filename, b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s AND b.package = '%s'" % (con_suites, con_components, con_architectures, package))
for i in q.getresult():
filename = "/".join(i[:2])
- control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(filename)))
+ control = apt_pkg.ParseSection(apt_inst.debExtractControl(daklib.utils.open_file(filename)))
source = control.Find("Source", control.Find("Package"))
source = re_strip_source_version.sub('', source)
if source_packages.has_key(source):
# If we don't have a reason; spawn an editor so the user can add one
# Write the rejection email out as the <foo>.reason file
if not Options["Reason"] and not Options["No-Action"]:
- temp_filename = utils.temp_filename()
+ temp_filename = daklib.utils.temp_filename()
editor = os.environ.get("EDITOR","vi")
result = os.system("%s %s" % (editor, temp_filename))
if result != 0:
- utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result)
- temp_file = utils.open_file(temp_filename)
+ daklib.utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result)
+ temp_file = daklib.utils.open_file(temp_filename)
for line in temp_file.readlines():
Options["Reason"] += line
temp_file.close()
versions = d[package].keys()
versions.sort(apt_pkg.VersionCompare)
for version in versions:
- d[package][version].sort(utils.arch_compare_sw)
+ d[package][version].sort(daklib.utils.arch_compare_sw)
summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
print "Will remove the following packages from %s:" % (suites_list)
print
print "Going to remove the packages now."
game_over()
- whoami = utils.whoami()
+ whoami = daklib.utils.whoami()
date = commands.getoutput('date -R')
# Log first; if it all falls apart I want a record that we at least tried.
- logfile = utils.open_file(Cnf["Rm::LogFile"], 'a')
+ logfile = daklib.utils.open_file(Cnf["Rm::LogFile"], 'a')
logfile.write("=========================================================================\n")
logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary))
Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
Subst["__WHOAMI__"] = whoami
- whereami = utils.where_am_i()
+ whereami = daklib.utils.where_am_i()
Archive = Cnf.SubTree("Archive::%s" % (whereami))
Subst["__MASTER_ARCHIVE__"] = Archive["OriginServer"]
Subst["__PRIMARY_MIRROR__"] = Archive["PrimaryMirror"]
- for bug in utils.split_args(Options["Done"]):
+ for bug in daklib.utils.split_args(Options["Done"]):
Subst["__BUG_NUMBER__"] = bug
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/rm.bug-close")
- utils.send_mail(mail_message)
+ mail_message = daklib.utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/rm.bug-close")
+ daklib.utils.send_mail(mail_message)
logfile.write("=========================================================================\n")
logfile.close()
import commands, os, pwd, re, sys, time
import apt_pkg
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+import daklib.queue
+import daklib.utils
################################################################################
uploads = {}; # uploads[uri] = file_list
changesfiles = {}; # changesfiles[uri] = file_list
package_list = {} # package_list[source_name][version]
- changes_files.sort(utils.changes_compare)
+ changes_files.sort(daklib.utils.changes_compare)
for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file)
+ changes_file = daklib.utils.validate_changes_file_arg(changes_file)
# Reset variables
components = {}
upload_uris = {}
# Build the file list for this .changes file
for file in files.keys():
poolname = os.path.join(Cnf["Dir::Root"], Cnf["Dir::PoolRoot"],
- utils.poolify(changes["source"], files[file]["component"]),
+ daklib.utils.poolify(changes["source"], files[file]["component"]),
file)
file_list.append(poolname)
orig_component = files[file].get("original component", files[file]["component"])
upload_uris[upload_uri] = ""
num_upload_uris = len(upload_uris.keys())
if num_upload_uris == 0:
- utils.fubar("%s: No valid upload URI found from components (%s)."
+ daklib.utils.fubar("%s: No valid upload URI found from components (%s)."
% (changes_file, ", ".join(components.keys())))
elif num_upload_uris > 1:
- utils.fubar("%s: more than one upload URI (%s) from components (%s)."
+ daklib.utils.fubar("%s: more than one upload URI (%s) from components (%s)."
% (changes_file, ", ".join(upload_uris.keys()),
", ".join(components.keys())))
upload_uri = upload_uris.keys()[0]
if not Options["No-Action"]:
filename = "%s/testing-processed" % (Cnf["Dir::Log"])
- file = utils.open_file(filename, 'a')
+ file = daklib.utils.open_file(filename, 'a')
for source in package_list.keys():
for version in package_list[source].keys():
file.write(" ".join([source, version])+'\n')
updated_pkgs = {}; # updated_pkgs[distro][arch][file] = {path,md5,size}
for arg in changes_files:
- arg = utils.validate_changes_file_arg(arg)
+ arg = daklib.utils.validate_changes_file_arg(arg)
Upload.pkg.changes_file = arg
Upload.init_vars()
Upload.update_vars()
md5 = files[file]["md5sum"]
size = files[file]["size"]
poolname = Cnf["Dir::PoolRoot"] + \
- utils.poolify(src, files[file]["component"])
+ daklib.utils.poolify(src, files[file]["component"])
if arch == "source" and file.endswith(".dsc"):
dscpoolname = poolname
for suite in suites:
Subst["__BCC__"] = "Bcc: %s" % (Cnf["Dinstall::Bcc"])
adv = ""
- archive = Cnf["Archive::%s::PrimaryMirror" % (utils.where_am_i())]
+ archive = Cnf["Archive::%s::PrimaryMirror" % (daklib.utils.where_am_i())]
for suite in updated_pkgs.keys():
suite_header = "%s %s (%s)" % (Cnf["Dinstall::MyDistribution"],
Cnf["Suite::%s::Version" % suite], suite)
arches.sort()
adv += " %s was released for %s.\n\n" % (
- suite.capitalize(), utils.join_with_commas_and(arches))
+ suite.capitalize(), daklib.utils.join_with_commas_and(arches))
for a in ["source", "all"] + arches:
if not updated_pkgs[suite].has_key(a):
Subst["__ADVISORY_TEXT__"] = adv
- adv = utils.TemplateSubst(Subst, Cnf["Dir::Templates"]+"/security-install.advisory")
+ adv = daklib.utils.TemplateSubst(Subst, Cnf["Dir::Templates"]+"/security-install.advisory")
if not Options["No-Action"]:
- utils.send_mail (adv)
+ daklib.utils.send_mail (adv)
else:
print "[<Would send template advisory mail>]"
global Cnf, Upload, Options
apt_pkg.init()
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h', "help", "Security-Install::Options::Help"),
('n', "no-action", "Security-Install::Options::No-Action")]
arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Security-Install::Options")
- Upload = queue.Upload(Cnf)
+ Upload = daklib.queue.Upload(Cnf)
if Options["Help"]:
usage(0)
advisory_number = arguments[0]
changes_files = arguments[1:]
if advisory_number.endswith(".changes"):
- utils.warn("first argument must be the advisory number.")
+ daklib.utils.warn("first argument must be the advisory number.")
usage(1)
for file in changes_files:
- file = utils.validate_changes_file_arg(file)
+ file = daklib.utils.validate_changes_file_arg(file)
return (advisory_number, changes_files)
######################################################################
def yes_no(prompt):
while 1:
- answer = utils.our_raw_input(prompt+" ").lower()
+ answer = daklib.utils.our_raw_input(prompt+" ").lower()
if answer == "y" or answer == "n":
break
else:
def spawn(command):
if not re_taint_free.match(command):
- utils.fubar("Invalid character in \"%s\"." % (command))
+ daklib.utils.fubar("Invalid character in \"%s\"." % (command))
if Options["No-Action"]:
print "[%s]" % (command)
else:
(result, output) = commands.getstatusoutput(command)
if (result != 0):
- utils.fubar("Invocation of '%s' failed:\n%s\n" % (command, output), result)
+ daklib.utils.fubar("Invocation of '%s' failed:\n%s\n" % (command, output), result)
######################################################################
print "Updating file lists for apt-ftparchive..."
spawn("dak make-suite-file-list")
print "Updating Packages and Sources files..."
- spawn("apt-ftparchive generate %s" % (utils.which_apt_conf_file()))
+ spawn("apt-ftparchive generate %s" % (daklib.utils.which_apt_conf_file()))
print "Updating Release files..."
spawn("dak generate-releases")
################################################################################
import glob, os, stat, time
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
def main():
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
count = 0
os.chdir(Cnf["Dir::Queue::Done"])
files = glob.glob("%s/*" % (Cnf["Dir::Queue::Done"]))
os.makedirs(dirname)
dest = dirname + '/' + os.path.basename(filename)
if os.path.exists(dest):
- utils.fubar("%s already exists." % (dest))
+ daklib.utils.fubar("%s already exists." % (dest))
print "Move: %s -> %s" % (filename, dest)
os.rename(filename, dest)
count = count + 1
import pg, sys
import apt_pkg
-import dak.lib.utils as utils
+import daklib.utils
################################################################################
def daily_install_stats():
stats = {}
- file = utils.open_file("2001-11")
+ file = daklib.utils.open_file("2001-11")
for line in file.readlines():
split = line.strip().split('~')
program = split[1]
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Stats::Options::Help")]
for i in [ "help" ]:
if not Cnf.has_key("Stats::Options::%s" % (i)):
usage()
if len(args) < 1:
- utils.warn("dak stats requires a MODE argument")
+ daklib.utils.warn("dak stats requires a MODE argument")
usage(1)
elif len(args) > 1:
- utils.warn("dak stats accepts only one MODE argument")
+ daklib.utils.warn("dak stats accepts only one MODE argument")
usage(1)
mode = args[0].lower()
elif mode == "daily-install":
daily_install_stats()
else:
- utils.warn("unknown mode '%s'" % (mode))
+ daklib.utils.warn("unknown mode '%s'" % (mode))
usage(1)
################################################################################
import os, pg, re, sys
import apt_pkg
-import dak.lib.database as database
-import dak.lib.utils as utils
+import daklib.database
+import daklib.utils
################################################################################
def fix_component_section (component, section):
if component == "":
- component = utils.extract_component_from_section(section)[1]
+ component = daklib.utils.extract_component_from_section(section)[1]
# FIXME: ugly hacks to work around override brain damage
section = re_strip_section_prefix.sub('', section)
dest = "%sdists/%s/%s/source/%s%s" % (Cnf["Dir::Root"], codename, component, section, os.path.basename(i[3]))
if not os.path.exists(dest):
src = i[2]+i[3]
- src = utils.clean_symlink(src, dest, Cnf["Dir::Root"])
+ src = daklib.utils.clean_symlink(src, dest, Cnf["Dir::Root"])
if Cnf.Find("Symlink-Dists::Options::Verbose"):
print src+' -> '+dest
os.symlink(src, dest)
dislocated_files[i[4]] = dest
# Binary
- architectures = filter(utils.real_arch, Cnf.ValueList("Suite::Stable::Architectures"))
+ architectures = filter(daklib.utils.real_arch, Cnf.ValueList("Suite::Stable::Architectures"))
q = projectB.query("""
SELECT DISTINCT ON (f.id) c.name, a.arch_string, sec.section, b.package,
b.version, l.path, f.filename, f.id
section=""
architecture = i[1]
package = i[3]
- version = utils.re_no_epoch.sub('', i[4])
+ version = daklib.utils.re_no_epoch.sub('', i[4])
src = i[5]+i[6]
dest = "%sdists/%s/%s/binary-%s/%s%s_%s.deb" % (Cnf["Dir::Root"], codename, component, architecture, section, package, version)
- src = utils.clean_symlink(src, dest, Cnf["Dir::Root"])
+ src = daklib.utils.clean_symlink(src, dest, Cnf["Dir::Root"])
if not os.path.exists(dest):
if Cnf.Find("Symlink-Dists::Options::Verbose"):
print src+' -> '+dest
def main ():
global Cnf, projectB
- Cnf = utils.get_conf()
+ Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Symlink-Dists::Options::Help"),
('v',"verbose","Symlink-Dists::Options::Verbose")]
--- /dev/null
+#!/usr/bin/env python
+
+# DB access fucntions
+# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import sys, time, types
+
+################################################################################
+
+Cnf = None
+projectB = None
+suite_id_cache = {}
+section_id_cache = {}
+priority_id_cache = {}
+override_type_id_cache = {}
+architecture_id_cache = {}
+archive_id_cache = {}
+component_id_cache = {}
+location_id_cache = {}
+maintainer_id_cache = {}
+source_id_cache = {}
+files_id_cache = {}
+maintainer_cache = {}
+fingerprint_id_cache = {}
+queue_id_cache = {}
+uid_id_cache = {}
+
+################################################################################
+
+def init (config, sql):
+ global Cnf, projectB
+
+ Cnf = config
+ projectB = sql
+
+
+def do_query(q):
+ sys.stderr.write("query: \"%s\" ... " % (q))
+ before = time.time()
+ r = projectB.query(q)
+ time_diff = time.time()-before
+ sys.stderr.write("took %.3f seconds.\n" % (time_diff))
+ if type(r) is int:
+ sys.stderr.write("int result: %s\n" % (r))
+ elif type(r) is types.NoneType:
+ sys.stderr.write("result: None\n")
+ else:
+ sys.stderr.write("pgresult: %s\n" % (r.getresult()))
+ return r
+
+################################################################################
+
+def get_suite_id (suite):
+ global suite_id_cache
+
+ if suite_id_cache.has_key(suite):
+ return suite_id_cache[suite]
+
+ q = projectB.query("SELECT id FROM suite WHERE suite_name = '%s'" % (suite))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ suite_id = ql[0][0]
+ suite_id_cache[suite] = suite_id
+
+ return suite_id
+
+def get_section_id (section):
+ global section_id_cache
+
+ if section_id_cache.has_key(section):
+ return section_id_cache[section]
+
+ q = projectB.query("SELECT id FROM section WHERE section = '%s'" % (section))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ section_id = ql[0][0]
+ section_id_cache[section] = section_id
+
+ return section_id
+
+def get_priority_id (priority):
+ global priority_id_cache
+
+ if priority_id_cache.has_key(priority):
+ return priority_id_cache[priority]
+
+ q = projectB.query("SELECT id FROM priority WHERE priority = '%s'" % (priority))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ priority_id = ql[0][0]
+ priority_id_cache[priority] = priority_id
+
+ return priority_id
+
+def get_override_type_id (type):
+ global override_type_id_cache
+
+ if override_type_id_cache.has_key(type):
+ return override_type_id_cache[type]
+
+ q = projectB.query("SELECT id FROM override_type WHERE type = '%s'" % (type))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ override_type_id = ql[0][0]
+ override_type_id_cache[type] = override_type_id
+
+ return override_type_id
+
+def get_architecture_id (architecture):
+ global architecture_id_cache
+
+ if architecture_id_cache.has_key(architecture):
+ return architecture_id_cache[architecture]
+
+ q = projectB.query("SELECT id FROM architecture WHERE arch_string = '%s'" % (architecture))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ architecture_id = ql[0][0]
+ architecture_id_cache[architecture] = architecture_id
+
+ return architecture_id
+
+def get_archive_id (archive):
+ global archive_id_cache
+
+ archive = archive.lower()
+
+ if archive_id_cache.has_key(archive):
+ return archive_id_cache[archive]
+
+ q = projectB.query("SELECT id FROM archive WHERE lower(name) = '%s'" % (archive))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ archive_id = ql[0][0]
+ archive_id_cache[archive] = archive_id
+
+ return archive_id
+
+def get_component_id (component):
+ global component_id_cache
+
+ component = component.lower()
+
+ if component_id_cache.has_key(component):
+ return component_id_cache[component]
+
+ q = projectB.query("SELECT id FROM component WHERE lower(name) = '%s'" % (component))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ component_id = ql[0][0]
+ component_id_cache[component] = component_id
+
+ return component_id
+
+def get_location_id (location, component, archive):
+ global location_id_cache
+
+ cache_key = location + '~' + component + '~' + location
+ if location_id_cache.has_key(cache_key):
+ return location_id_cache[cache_key]
+
+ archive_id = get_archive_id (archive)
+ if component != "":
+ component_id = get_component_id (component)
+ if component_id != -1:
+ q = projectB.query("SELECT id FROM location WHERE path = '%s' AND component = %d AND archive = %d" % (location, component_id, archive_id))
+ else:
+ q = projectB.query("SELECT id FROM location WHERE path = '%s' AND archive = %d" % (location, archive_id))
+ ql = q.getresult()
+ if not ql:
+ return -1
+
+ location_id = ql[0][0]
+ location_id_cache[cache_key] = location_id
+
+ return location_id
+
+def get_source_id (source, version):
+ global source_id_cache
+
+ cache_key = source + '~' + version + '~'
+ if source_id_cache.has_key(cache_key):
+ return source_id_cache[cache_key]
+
+ q = projectB.query("SELECT id FROM source s WHERE s.source = '%s' AND s.version = '%s'" % (source, version))
+
+ if not q.getresult():
+ return None
+
+ source_id = q.getresult()[0][0]
+ source_id_cache[cache_key] = source_id
+
+ return source_id
+
+################################################################################
+
+def get_or_set_maintainer_id (maintainer):
+ global maintainer_id_cache
+
+ if maintainer_id_cache.has_key(maintainer):
+ return maintainer_id_cache[maintainer]
+
+ q = projectB.query("SELECT id FROM maintainer WHERE name = '%s'" % (maintainer))
+ if not q.getresult():
+ projectB.query("INSERT INTO maintainer (name) VALUES ('%s')" % (maintainer))
+ q = projectB.query("SELECT id FROM maintainer WHERE name = '%s'" % (maintainer))
+ maintainer_id = q.getresult()[0][0]
+ maintainer_id_cache[maintainer] = maintainer_id
+
+ return maintainer_id
+
+################################################################################
+
+def get_or_set_uid_id (uid):
+ global uid_id_cache
+
+ if uid_id_cache.has_key(uid):
+ return uid_id_cache[uid]
+
+ q = projectB.query("SELECT id FROM uid WHERE uid = '%s'" % (uid))
+ if not q.getresult():
+ projectB.query("INSERT INTO uid (uid) VALUES ('%s')" % (uid))
+ q = projectB.query("SELECT id FROM uid WHERE uid = '%s'" % (uid))
+ uid_id = q.getresult()[0][0]
+ uid_id_cache[uid] = uid_id
+
+ return uid_id
+
+################################################################################
+
+def get_or_set_fingerprint_id (fingerprint):
+ global fingerprint_id_cache
+
+ if fingerprint_id_cache.has_key(fingerprint):
+ return fingerprint_id_cache[fingerprint]
+
+ q = projectB.query("SELECT id FROM fingerprint WHERE fingerprint = '%s'" % (fingerprint))
+ if not q.getresult():
+ projectB.query("INSERT INTO fingerprint (fingerprint) VALUES ('%s')" % (fingerprint))
+ q = projectB.query("SELECT id FROM fingerprint WHERE fingerprint = '%s'" % (fingerprint))
+ fingerprint_id = q.getresult()[0][0]
+ fingerprint_id_cache[fingerprint] = fingerprint_id
+
+ return fingerprint_id
+
+################################################################################
+
+def get_files_id (filename, size, md5sum, location_id):
+ global files_id_cache
+
+ cache_key = "%s~%d" % (filename, location_id)
+
+ if files_id_cache.has_key(cache_key):
+ return files_id_cache[cache_key]
+
+ size = int(size)
+ q = projectB.query("SELECT id, size, md5sum FROM files WHERE filename = '%s' AND location = %d" % (filename, location_id))
+ ql = q.getresult()
+ if ql:
+ if len(ql) != 1:
+ return -1
+ ql = ql[0]
+ orig_size = int(ql[1])
+ orig_md5sum = ql[2]
+ if orig_size != size or orig_md5sum != md5sum:
+ return -2
+ files_id_cache[cache_key] = ql[0]
+ return files_id_cache[cache_key]
+ else:
+ return None
+
+################################################################################
+
+def get_or_set_queue_id (queue):
+ global queue_id_cache
+
+ if queue_id_cache.has_key(queue):
+ return queue_id_cache[queue]
+
+ q = projectB.query("SELECT id FROM queue WHERE queue_name = '%s'" % (queue))
+ if not q.getresult():
+ projectB.query("INSERT INTO queue (queue_name) VALUES ('%s')" % (queue))
+ q = projectB.query("SELECT id FROM queue WHERE queue_name = '%s'" % (queue))
+ queue_id = q.getresult()[0][0]
+ queue_id_cache[queue] = queue_id
+
+ return queue_id
+
+################################################################################
+
+def set_files_id (filename, size, md5sum, location_id):
+ global files_id_cache
+
+ projectB.query("INSERT INTO files (filename, size, md5sum, location) VALUES ('%s', %d, '%s', %d)" % (filename, long(size), md5sum, location_id))
+
+ return get_files_id (filename, size, md5sum, location_id)
+
+ ### currval has issues with postgresql 7.1.3 when the table is big
+ ### it was taking ~3 seconds to return on auric which is very Not
+ ### Cool(tm).
+ ##
+ ##q = projectB.query("SELECT id FROM files WHERE id = currval('files_id_seq')")
+ ##ql = q.getresult()[0]
+ ##cache_key = "%s~%d" % (filename, location_id)
+ ##files_id_cache[cache_key] = ql[0]
+ ##return files_id_cache[cache_key]
+
+################################################################################
+
+def get_maintainer (maintainer_id):
+ global maintainer_cache
+
+ if not maintainer_cache.has_key(maintainer_id):
+ q = projectB.query("SELECT name FROM maintainer WHERE id = %s" % (maintainer_id))
+ maintainer_cache[maintainer_id] = q.getresult()[0][0]
+
+ return maintainer_cache[maintainer_id]
+
+################################################################################
--- /dev/null
+#!/usr/bin/env python
+
+# Logging functions
+# Copyright (C) 2001, 2002 James Troup <james@nocrew.org>
+# $Id: logging.py,v 1.4 2005-11-15 09:50:32 ajt Exp $
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import os, pwd, time, sys
+import utils
+
+################################################################################
+
+class Logger:
+ "Logger object"
+ Cnf = None
+ logfile = None
+ program = None
+
+ def __init__ (self, Cnf, program, debug=0):
+ "Initialize a new Logger object"
+ self.Cnf = Cnf
+ self.program = program
+ # Create the log directory if it doesn't exist
+ logdir = Cnf["Dir::Log"]
+ if not os.path.exists(logdir):
+ umask = os.umask(00000)
+ os.makedirs(logdir, 02775)
+ # Open the logfile
+ logfilename = "%s/%s" % (logdir, time.strftime("%Y-%m"))
+ logfile = None
+ if debug:
+ logfile = sys.stderr
+ else:
+ logfile = utils.open_file(logfilename, 'a')
+ self.logfile = logfile
+ # Log the start of the program
+ user = pwd.getpwuid(os.getuid())[0]
+ self.log(["program start", user])
+
+ def log (self, details):
+ "Log an event"
+ # Prepend the timestamp and program name
+ details.insert(0, self.program)
+ timestamp = time.strftime("%Y%m%d%H%M%S")
+ details.insert(0, timestamp)
+ # Force the contents of the list to be string.join-able
+ details = map(str, details)
+ # Write out the log in TSV
+ self.logfile.write("|".join(details)+'\n')
+ # Flush the output to enable tail-ing
+ self.logfile.flush()
+
+ def close (self):
+ "Close a Logger object"
+ self.log(["program end"])
+ self.logfile.flush()
+ self.logfile.close()
--- /dev/null
+#!/usr/bin/env python
+
+# Queue utility functions for dak
+# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+###############################################################################
+
+import cPickle, errno, os, pg, re, stat, string, sys, time
+import apt_inst, apt_pkg
+import utils, database
+
+from types import *
+
+###############################################################################
+
+re_isanum = re.compile (r"^\d+$")
+re_default_answer = re.compile(r"\[(.*)\]")
+re_fdnic = re.compile(r"\n\n")
+re_bin_only_nmu = re.compile(r"\+b\d+$")
+###############################################################################
+
+# Convenience wrapper to carry around all the package information in
+
+class Pkg:
+ def __init__(self, **kwds):
+ self.__dict__.update(kwds)
+
+ def update(self, **kwds):
+ self.__dict__.update(kwds)
+
+###############################################################################
+
+class nmu_p:
+ # Read in the group maintainer override file
+ def __init__ (self, Cnf):
+ self.group_maint = {}
+ self.Cnf = Cnf
+ if Cnf.get("Dinstall::GroupOverrideFilename"):
+ filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
+ file = utils.open_file(filename)
+ for line in file.readlines():
+ line = utils.re_comments.sub('', line).lower().strip()
+ if line != "":
+ self.group_maint[line] = 1
+ file.close()
+
+ def is_an_nmu (self, pkg):
+ Cnf = self.Cnf
+ changes = pkg.changes
+ dsc = pkg.dsc
+
+ i = utils.fix_maintainer (dsc.get("maintainer",
+ Cnf["Dinstall::MyEmailAddress"]).lower())
+ (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
+ # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
+ if dsc_name == changes["maintainername"].lower() and \
+ (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
+ return 0
+
+ if dsc.has_key("uploaders"):
+ uploaders = dsc["uploaders"].lower().split(",")
+ uploadernames = {}
+ for i in uploaders:
+ (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
+ uploadernames[name] = ""
+ if uploadernames.has_key(changes["changedbyname"].lower()):
+ return 0
+
+ # Some group maintained packages (e.g. Debian QA) are never NMU's
+ if self.group_maint.has_key(changes["maintaineremail"].lower()):
+ return 0
+
+ return 1
+
+###############################################################################
+
+class Upload:
+
+ def __init__(self, Cnf):
+ self.Cnf = Cnf
+ # Read in the group-maint override file
+ self.nmu = nmu_p(Cnf)
+ self.accept_count = 0
+ self.accept_bytes = 0L
+ self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
+ legacy_source_untouchable = {})
+
+ # Initialize the substitution template mapping global
+ Subst = self.Subst = {}
+ Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
+ Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
+ Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
+ Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
+
+ self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
+ database.init(Cnf, self.projectB)
+
+ ###########################################################################
+
+ def init_vars (self):
+ for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
+ exec "self.pkg.%s.clear();" % (i)
+ self.pkg.orig_tar_id = None
+ self.pkg.orig_tar_location = ""
+ self.pkg.orig_tar_gz = None
+
+ ###########################################################################
+
+ def update_vars (self):
+ dump_filename = self.pkg.changes_file[:-8]+".dak"
+ dump_file = utils.open_file(dump_filename)
+ p = cPickle.Unpickler(dump_file)
+ for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
+ exec "self.pkg.%s.update(p.load());" % (i)
+ for i in [ "orig_tar_id", "orig_tar_location" ]:
+ exec "self.pkg.%s = p.load();" % (i)
+ dump_file.close()
+
+ ###########################################################################
+
+ # This could just dump the dictionaries as is, but I'd like to
+ # avoid this so there's some idea of what process-accepted &
+ # process-new use from process-unchecked
+
+ def dump_vars(self, dest_dir):
+ for i in [ "changes", "dsc", "files", "dsc_files",
+ "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
+ exec "%s = self.pkg.%s;" % (i,i)
+ dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
+ dump_file = utils.open_file(dump_filename, 'w')
+ try:
+ os.chmod(dump_filename, 0660)
+ except OSError, e:
+ if errno.errorcode[e.errno] == 'EPERM':
+ perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
+ if perms & stat.S_IROTH:
+ utils.fubar("%s is world readable and chmod failed." % (dump_filename))
+ else:
+ raise
+
+ p = cPickle.Pickler(dump_file, 1)
+ for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
+ exec "%s = {}" % i
+ ## files
+ for file in files.keys():
+ d_files[file] = {}
+ for i in [ "package", "version", "architecture", "type", "size",
+ "md5sum", "component", "location id", "source package",
+ "source version", "maintainer", "dbtype", "files id",
+ "new", "section", "priority", "othercomponents",
+ "pool name", "original component" ]:
+ if files[file].has_key(i):
+ d_files[file][i] = files[file][i]
+ ## changes
+ # Mandatory changes fields
+ for i in [ "distribution", "source", "architecture", "version",
+ "maintainer", "urgency", "fingerprint", "changedby822",
+ "changedby2047", "changedbyname", "maintainer822",
+ "maintainer2047", "maintainername", "maintaineremail",
+ "closes", "changes" ]:
+ d_changes[i] = changes[i]
+ # Optional changes fields
+ for i in [ "changed-by", "filecontents", "format", "process-new note", "distribution-version" ]:
+ if changes.has_key(i):
+ d_changes[i] = changes[i]
+ ## dsc
+ for i in [ "source", "version", "maintainer", "fingerprint",
+ "uploaders", "bts changelog" ]:
+ if dsc.has_key(i):
+ d_dsc[i] = dsc[i]
+ ## dsc_files
+ for file in dsc_files.keys():
+ d_dsc_files[file] = {}
+ # Mandatory dsc_files fields
+ for i in [ "size", "md5sum" ]:
+ d_dsc_files[file][i] = dsc_files[file][i]
+ # Optional dsc_files fields
+ for i in [ "files id" ]:
+ if dsc_files[file].has_key(i):
+ d_dsc_files[file][i] = dsc_files[file][i]
+
+ for i in [ d_changes, d_dsc, d_files, d_dsc_files,
+ legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
+ p.dump(i)
+ dump_file.close()
+
+ ###########################################################################
+
+ # Set up the per-package template substitution mappings
+
+ def update_subst (self, reject_message = ""):
+ Subst = self.Subst
+ changes = self.pkg.changes
+ # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
+ if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
+ changes["architecture"] = { "Unknown" : "" }
+ # and maintainer2047 may not exist.
+ if not changes.has_key("maintainer2047"):
+ changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
+
+ Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
+ Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
+ Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
+
+ # For source uploads the Changed-By field wins; otherwise Maintainer wins.
+ if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
+ Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
+ Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
+ changes["maintainer2047"])
+ Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
+ else:
+ Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
+ Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
+ Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
+ if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
+ Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
+
+ # Apply any global override of the Maintainer field
+ if self.Cnf.get("Dinstall::OverrideMaintainer"):
+ Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
+ Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
+
+ Subst["__REJECT_MESSAGE__"] = reject_message
+ Subst["__SOURCE__"] = changes.get("source", "Unknown")
+ Subst["__VERSION__"] = changes.get("version", "Unknown")
+
+ ###########################################################################
+
+ def build_summaries(self):
+ changes = self.pkg.changes
+ files = self.pkg.files
+
+ byhand = summary = new = ""
+
+ # changes["distribution"] may not exist in corner cases
+ # (e.g. unreadable changes files)
+ if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
+ changes["distribution"] = {}
+
+ file_keys = files.keys()
+ file_keys.sort()
+ for file in file_keys:
+ if files[file].has_key("byhand"):
+ byhand = 1
+ summary += file + " byhand\n"
+ elif files[file].has_key("new"):
+ new = 1
+ summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
+ if files[file].has_key("othercomponents"):
+ summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
+ if files[file]["type"] == "deb":
+ deb_fh = utils.open_file(file)
+ summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
+ deb_fh.close()
+ else:
+ files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
+ destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
+ summary += file + "\n to " + destination + "\n"
+
+ short_summary = summary
+
+ # This is for direport's benefit...
+ f = re_fdnic.sub("\n .\n", changes.get("changes",""))
+
+ if byhand or new:
+ summary += "Changes: " + f
+
+ summary += self.announce(short_summary, 0)
+
+ return (summary, short_summary)
+
+ ###########################################################################
+
+ def close_bugs (self, summary, action):
+ changes = self.pkg.changes
+ Subst = self.Subst
+ Cnf = self.Cnf
+
+ bugs = changes["closes"].keys()
+
+ if not bugs:
+ return summary
+
+ bugs.sort()
+ if not self.nmu.is_an_nmu(self.pkg):
+ if changes["distribution"].has_key("experimental"):
+ # tag bugs as fixed-in-experimental for uploads to experimental
+ summary += "Setting bugs to severity fixed: "
+ control_message = ""
+ for bug in bugs:
+ summary += "%s " % (bug)
+ control_message += "tag %s + fixed-in-experimental\n" % (bug)
+ if action and control_message != "":
+ Subst["__CONTROL_MESSAGE__"] = control_message
+ mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
+ utils.send_mail (mail_message)
+ if action:
+ self.Logger.log(["setting bugs to fixed"]+bugs)
+
+
+ else:
+ summary += "Closing bugs: "
+ for bug in bugs:
+ summary += "%s " % (bug)
+ if action:
+ Subst["__BUG_NUMBER__"] = bug
+ if changes["distribution"].has_key("stable"):
+ Subst["__STABLE_WARNING__"] = """
+Note that this package is not part of the released stable Debian
+distribution. It may have dependencies on other unreleased software,
+or other instabilities. Please take care if you wish to install it.
+The update will eventually make its way into the next released Debian
+distribution."""
+ else:
+ Subst["__STABLE_WARNING__"] = ""
+ mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
+ utils.send_mail (mail_message)
+ if action:
+ self.Logger.log(["closing bugs"]+bugs)
+
+ else: # NMU
+ summary += "Setting bugs to severity fixed: "
+ control_message = ""
+ for bug in bugs:
+ summary += "%s " % (bug)
+ control_message += "tag %s + fixed\n" % (bug)
+ if action and control_message != "":
+ Subst["__CONTROL_MESSAGE__"] = control_message
+ mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
+ utils.send_mail (mail_message)
+ if action:
+ self.Logger.log(["setting bugs to fixed"]+bugs)
+ summary += "\n"
+ return summary
+
+ ###########################################################################
+
+ def announce (self, short_summary, action):
+ Subst = self.Subst
+ Cnf = self.Cnf
+ changes = self.pkg.changes
+
+ # Only do announcements for source uploads with a recent dpkg-dev installed
+ if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
+ return ""
+
+ lists_done = {}
+ summary = ""
+ Subst["__SHORT_SUMMARY__"] = short_summary
+
+ for dist in changes["distribution"].keys():
+ list = Cnf.Find("Suite::%s::Announce" % (dist))
+ if list == "" or lists_done.has_key(list):
+ continue
+ lists_done[list] = 1
+ summary += "Announcing to %s\n" % (list)
+
+ if action:
+ Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
+ if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
+ Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
+ mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
+ utils.send_mail (mail_message)
+
+ if Cnf.FindB("Dinstall::CloseBugs"):
+ summary = self.close_bugs(summary, action)
+
+ return summary
+
+ ###########################################################################
+
+ def accept (self, summary, short_summary):
+ Cnf = self.Cnf
+ Subst = self.Subst
+ files = self.pkg.files
+ changes = self.pkg.changes
+ changes_file = self.pkg.changes_file
+ dsc = self.pkg.dsc
+
+ print "Accepting."
+ self.Logger.log(["Accepting changes",changes_file])
+
+ self.dump_vars(Cnf["Dir::Queue::Accepted"])
+
+ # Move all the files into the accepted directory
+ utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
+ file_keys = files.keys()
+ for file in file_keys:
+ utils.move(file, Cnf["Dir::Queue::Accepted"])
+ self.accept_bytes += float(files[file]["size"])
+ self.accept_count += 1
+
+ # Send accept mail, announce to lists, close bugs and check for
+ # override disparities
+ if not Cnf["Dinstall::Options::No-Mail"]:
+ Subst["__SUITE__"] = ""
+ Subst["__SUMMARY__"] = summary
+ mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
+ utils.send_mail(mail_message)
+ self.announce(short_summary, 1)
+
+
+ ## Helper stuff for DebBugs Version Tracking
+ if Cnf.Find("Dir::Queue::BTSVersionTrack"):
+ # ??? once queue/* is cleared on *.d.o and/or reprocessed
+ # the conditionalization on dsc["bts changelog"] should be
+ # dropped.
+
+ # Write out the version history from the changelog
+ if changes["architecture"].has_key("source") and \
+ dsc.has_key("bts changelog"):
+
+ temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
+ dotprefix=1, perms=0644)
+ version_history = utils.open_file(temp_filename, 'w')
+ version_history.write(dsc["bts changelog"])
+ version_history.close()
+ filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
+ changes_file[:-8]+".versions")
+ os.rename(temp_filename, filename)
+
+ # Write out the binary -> source mapping.
+ temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
+ dotprefix=1, perms=0644)
+ debinfo = utils.open_file(temp_filename, 'w')
+ for file in file_keys:
+ f = files[file]
+ if f["type"] == "deb":
+ line = " ".join([f["package"], f["version"],
+ f["architecture"], f["source package"],
+ f["source version"]])
+ debinfo.write(line+"\n")
+ debinfo.close()
+ filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
+ changes_file[:-8]+".debinfo")
+ os.rename(temp_filename, filename)
+
+ self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
+
+ ###########################################################################
+
+ def queue_build (self, queue, path):
+ Cnf = self.Cnf
+ Subst = self.Subst
+ files = self.pkg.files
+ changes = self.pkg.changes
+ changes_file = self.pkg.changes_file
+ dsc = self.pkg.dsc
+ file_keys = files.keys()
+
+ ## Special support to enable clean auto-building of queued packages
+ queue_id = database.get_or_set_queue_id(queue)
+
+ self.projectB.query("BEGIN WORK")
+ for suite in changes["distribution"].keys():
+ if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
+ continue
+ suite_id = database.get_suite_id(suite)
+ dest_dir = Cnf["Dir::QueueBuild"]
+ if Cnf.FindB("Dinstall::SecurityQueueBuild"):
+ dest_dir = os.path.join(dest_dir, suite)
+ for file in file_keys:
+ src = os.path.join(path, file)
+ dest = os.path.join(dest_dir, file)
+ if Cnf.FindB("Dinstall::SecurityQueueBuild"):
+ # Copy it since the original won't be readable by www-data
+ utils.copy(src, dest)
+ else:
+ # Create a symlink to it
+ os.symlink(src, dest)
+ # Add it to the list of packages for later processing by apt-ftparchive
+ self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
+ # If the .orig.tar.gz is in the pool, create a symlink to
+ # it (if one doesn't already exist)
+ if self.pkg.orig_tar_id:
+ # Determine the .orig.tar.gz file name
+ for dsc_file in self.pkg.dsc_files.keys():
+ if dsc_file.endswith(".orig.tar.gz"):
+ filename = dsc_file
+ dest = os.path.join(dest_dir, filename)
+ # If it doesn't exist, create a symlink
+ if not os.path.exists(dest):
+ # Find the .orig.tar.gz in the pool
+ q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
+ ql = q.getresult()
+ if not ql:
+ utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
+ src = os.path.join(ql[0][0], ql[0][1])
+ os.symlink(src, dest)
+ # Add it to the list of packages for later processing by apt-ftparchive
+ self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
+ # if it does, update things to ensure it's not removed prematurely
+ else:
+ self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
+
+ self.projectB.query("COMMIT WORK")
+
+ ###########################################################################
+
+ def check_override (self):
+ Subst = self.Subst
+ changes = self.pkg.changes
+ files = self.pkg.files
+ Cnf = self.Cnf
+
+ # Abandon the check if:
+ # a) it's a non-sourceful upload
+ # b) override disparity checks have been disabled
+ # c) we're not sending mail
+ if not changes["architecture"].has_key("source") or \
+ not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
+ Cnf["Dinstall::Options::No-Mail"]:
+ return
+
+ summary = ""
+ file_keys = files.keys()
+ file_keys.sort()
+ for file in file_keys:
+ if not files[file].has_key("new") and files[file]["type"] == "deb":
+ section = files[file]["section"]
+ override_section = files[file]["override section"]
+ if section.lower() != override_section.lower() and section != "-":
+ # Ignore this; it's a common mistake and not worth whining about
+ if section.lower() == "non-us/main" and override_section.lower() == "non-us":
+ continue
+ summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
+ priority = files[file]["priority"]
+ override_priority = files[file]["override priority"]
+ if priority != override_priority and priority != "-":
+ summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
+
+ if summary == "":
+ return
+
+ Subst["__SUMMARY__"] = summary
+ mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
+ utils.send_mail(mail_message)
+
+ ###########################################################################
+
+ def force_reject (self, files):
+ """Forcefully move files from the current directory to the
+ reject directory. If any file already exists in the reject
+ directory it will be moved to the morgue to make way for
+ the new file."""
+
+ Cnf = self.Cnf
+
+ for file in files:
+ # Skip any files which don't exist or which we don't have permission to copy.
+ if os.access(file,os.R_OK) == 0:
+ continue
+ dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
+ try:
+ dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
+ except OSError, e:
+ # File exists? Let's try and move it to the morgue
+ if errno.errorcode[e.errno] == 'EEXIST':
+ morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
+ try:
+ morgue_file = utils.find_next_free(morgue_file)
+ except utils.tried_too_hard_exc:
+ # Something's either gone badly Pete Tong, or
+ # someone is trying to exploit us.
+ utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
+ return
+ utils.move(dest_file, morgue_file, perms=0660)
+ try:
+ dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
+ except OSError, e:
+ # Likewise
+ utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
+ return
+ else:
+ raise
+ # If we got here, we own the destination file, so we can
+ # safely overwrite it.
+ utils.move(file, dest_file, 1, perms=0660)
+ os.close(dest_fd)
+
+ ###########################################################################
+
+ def do_reject (self, manual = 0, reject_message = ""):
+ # If we weren't given a manual rejection message, spawn an
+ # editor so the user can add one in...
+ if manual and not reject_message:
+ temp_filename = utils.temp_filename()
+ editor = os.environ.get("EDITOR","vi")
+ answer = 'E'
+ while answer == 'E':
+ os.system("%s %s" % (editor, temp_filename))
+ temp_fh = utils.open_file(temp_filename)
+ reject_message = "".join(temp_fh.readlines())
+ temp_fh.close()
+ print "Reject message:"
+ print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
+ prompt = "[R]eject, Edit, Abandon, Quit ?"
+ answer = "XXX"
+ while prompt.find(answer) == -1:
+ answer = utils.our_raw_input(prompt)
+ m = re_default_answer.search(prompt)
+ if answer == "":
+ answer = m.group(1)
+ answer = answer[:1].upper()
+ os.unlink(temp_filename)
+ if answer == 'A':
+ return 1
+ elif answer == 'Q':
+ sys.exit(0)
+
+ print "Rejecting.\n"
+
+ Cnf = self.Cnf
+ Subst = self.Subst
+ pkg = self.pkg
+
+ reason_filename = pkg.changes_file[:-8] + ".reason"
+ reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
+
+ # Move all the files into the reject directory
+ reject_files = pkg.files.keys() + [pkg.changes_file]
+ self.force_reject(reject_files)
+
+ # If we fail here someone is probably trying to exploit the race
+ # so let's just raise an exception ...
+ if os.path.exists(reason_filename):
+ os.unlink(reason_filename)
+ reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
+
+ if not manual:
+ Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
+ Subst["__MANUAL_REJECT_MESSAGE__"] = ""
+ Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
+ os.write(reason_fd, reject_message)
+ reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
+ else:
+ # Build up the rejection email
+ user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
+
+ Subst["__REJECTOR_ADDRESS__"] = user_email_address
+ Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
+ Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+ reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/dak.rejected")
+ # Write the rejection email out as the <foo>.reason file
+ os.write(reason_fd, reject_mail_message)
+
+ os.close(reason_fd)
+
+ # Send the rejection mail if appropriate
+ if not Cnf["Dinstall::Options::No-Mail"]:
+ utils.send_mail(reject_mail_message)
+
+ self.Logger.log(["rejected", pkg.changes_file])
+ return 0
+
+ ################################################################################
+
+ # Ensure that source exists somewhere in the archive for the binary
+ # upload being processed.
+ #
+ # (1) exact match => 1.0-3
+ # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
+
+ def source_exists (self, package, source_version, suites = ["any"]):
+ okay = 1
+ for suite in suites:
+ if suite == "any":
+ que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
+ (package)
+ else:
+ # source must exist in suite X, or in some other suite that's
+ # mapped to X, recursively... silent-maps are counted too,
+ # unreleased-maps aren't.
+ maps = self.Cnf.ValueList("SuiteMappings")[:]
+ maps.reverse()
+ maps = [ m.split() for m in maps ]
+ maps = [ (x[1], x[2]) for x in maps
+ if x[0] == "map" or x[0] == "silent-map" ]
+ s = [suite]
+ for x in maps:
+ if x[1] in s and x[0] not in s:
+ s.append(x[0])
+
+ que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "))
+ q = self.projectB.query(que)
+
+ # Reduce the query results to a list of version numbers
+ ql = map(lambda x: x[0], q.getresult())
+
+ # Try (1)
+ if source_version in ql:
+ continue
+
+ # Try (2)
+ orig_source_version = re_bin_only_nmu.sub('', source_version)
+ if orig_source_version in ql:
+ continue
+
+ # No source found...
+ okay = 0
+ break
+ return okay
+
+ ################################################################################
+
+ def in_override_p (self, package, component, suite, binary_type, file):
+ files = self.pkg.files
+
+ if binary_type == "": # must be source
+ type = "dsc"
+ else:
+ type = binary_type
+
+ # Override suite name; used for example with proposed-updates
+ if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
+ suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
+
+ # Avoid <undef> on unknown distributions
+ suite_id = database.get_suite_id(suite)
+ if suite_id == -1:
+ return None
+ component_id = database.get_component_id(component)
+ type_id = database.get_override_type_id(type)
+
+ # FIXME: nasty non-US speficic hack
+ if component.lower().startswith("non-us/"):
+ component = component[7:]
+
+ q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
+ % (package, suite_id, component_id, type_id))
+ result = q.getresult()
+ # If checking for a source package fall back on the binary override type
+ if type == "dsc" and not result:
+ deb_type_id = database.get_override_type_id("deb")
+ udeb_type_id = database.get_override_type_id("udeb")
+ q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
+ % (package, suite_id, component_id, deb_type_id, udeb_type_id))
+ result = q.getresult()
+
+ # Remember the section and priority so we can check them later if appropriate
+ if result:
+ files[file]["override section"] = result[0][0]
+ files[file]["override priority"] = result[0][1]
+
+ return result
+
+ ################################################################################
+
+ def reject (self, str, prefix="Rejected: "):
+ if str:
+ # Unlike other rejects we add new lines first to avoid trailing
+ # new lines when this message is passed back up to a caller.
+ if self.reject_message:
+ self.reject_message += "\n"
+ self.reject_message += prefix + str
+
+ ################################################################################
+
+ def get_anyversion(self, query_result, suite):
+ anyversion=None
+ anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
+ for (v, s) in query_result:
+ if s in [ string.lower(x) for x in anysuite ]:
+ if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
+ anyversion=v
+ return anyversion
+
+ ################################################################################
+
+ def cross_suite_version_check(self, query_result, file, new_version):
+ """Ensure versions are newer than existing packages in target
+ suites and that cross-suite version checking rules as
+ set out in the conf file are satisfied."""
+
+ # Check versions for each target suite
+ for target_suite in self.pkg.changes["distribution"].keys():
+ must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)))
+ must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)))
+ # Enforce "must be newer than target suite" even if conffile omits it
+ if target_suite not in must_be_newer_than:
+ must_be_newer_than.append(target_suite)
+ for entry in query_result:
+ existent_version = entry[0]
+ suite = entry[1]
+ if suite in must_be_newer_than and \
+ apt_pkg.VersionCompare(new_version, existent_version) < 1:
+ self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+ if suite in must_be_older_than and \
+ apt_pkg.VersionCompare(new_version, existent_version) > -1:
+ ch = self.pkg.changes
+ cansave = 0
+ if ch.get('distribution-version', {}).has_key(suite):
+ # we really use the other suite, ignoring the conflicting one ...
+ addsuite = ch["distribution-version"][suite]
+
+ add_version = self.get_anyversion(query_result, addsuite)
+ target_version = self.get_anyversion(query_result, target_suite)
+
+ if not add_version:
+ # not add_version can only happen if we map to a suite
+ # that doesn't enhance the suite we're propup'ing from.
+ # so "propup-ver x a b c; map a d" is a problem only if
+ # d doesn't enhance a.
+ #
+ # i think we could always propagate in this case, rather
+ # than complaining. either way, this isn't a REJECT issue
+ #
+ # And - we really should complain to the dorks who configured dak
+ self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
+ self.pkg.changes.setdefault("propdistribution", {})
+ self.pkg.changes["propdistribution"][addsuite] = 1
+ cansave = 1
+ elif not target_version:
+ # not targets_version is true when the package is NEW
+ # we could just stick with the "...old version..." REJECT
+ # for this, I think.
+ self.reject("Won't propogate NEW packages.")
+ elif apt_pkg.VersionCompare(new_version, add_version) < 0:
+ # propogation would be redundant. no need to reject though.
+ self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
+ cansave = 1
+ elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
+ apt_pkg.VersionCompare(add_version, target_version) >= 0:
+ # propogate!!
+ self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
+ self.pkg.changes.setdefault("propdistribution", {})
+ self.pkg.changes["propdistribution"][addsuite] = 1
+ cansave = 1
+
+ if not cansave:
+ self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+
+ ################################################################################
+
+ def check_binary_against_db(self, file):
+ self.reject_message = ""
+ files = self.pkg.files
+
+ # Ensure version is sane
+ q = self.projectB.query("""
+SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
+ architecture a
+ WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
+ AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
+ % (files[file]["package"],
+ files[file]["architecture"]))
+ self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
+
+ # Check for any existing copies of the file
+ q = self.projectB.query("""
+SELECT b.id FROM binaries b, architecture a
+ WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
+ AND a.id = b.architecture"""
+ % (files[file]["package"],
+ files[file]["version"],
+ files[file]["architecture"]))
+ if q.getresult():
+ self.reject("%s: can not overwrite existing copy already in the archive." % (file))
+
+ return self.reject_message
+
+ ################################################################################
+
+ def check_source_against_db(self, file):
+ self.reject_message = ""
+ dsc = self.pkg.dsc
+
+ # Ensure version is sane
+ q = self.projectB.query("""
+SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
+ WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
+ self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
+
+ return self.reject_message
+
+ ################################################################################
+
+ # **WARNING**
+ # NB: this function can remove entries from the 'files' index [if
+ # the .orig.tar.gz is a duplicate of the one in the archive]; if
+ # you're iterating over 'files' and call this function as part of
+ # the loop, be sure to add a check to the top of the loop to
+ # ensure you haven't just tried to derefernece the deleted entry.
+ # **WARNING**
+
+ def check_dsc_against_db(self, file):
+ self.reject_message = ""
+ files = self.pkg.files
+ dsc_files = self.pkg.dsc_files
+ legacy_source_untouchable = self.pkg.legacy_source_untouchable
+ self.pkg.orig_tar_gz = None
+
+ # Try and find all files mentioned in the .dsc. This has
+ # to work harder to cope with the multiple possible
+ # locations of an .orig.tar.gz.
+ for dsc_file in dsc_files.keys():
+ found = None
+ if files.has_key(dsc_file):
+ actual_md5 = files[dsc_file]["md5sum"]
+ actual_size = int(files[dsc_file]["size"])
+ found = "%s in incoming" % (dsc_file)
+ # Check the file does not already exist in the archive
+ q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
+ ql = q.getresult()
+ # Strip out anything that isn't '%s' or '/%s$'
+ for i in ql:
+ if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
+ ql.remove(i)
+
+ # "[dak] has not broken them. [dak] has fixed a
+ # brokenness. Your crappy hack exploited a bug in
+ # the old dinstall.
+ #
+ # "(Come on! I thought it was always obvious that
+ # one just doesn't release different files with
+ # the same name and version.)"
+ # -- ajk@ on d-devel@l.d.o
+
+ if ql:
+ # Ignore exact matches for .orig.tar.gz
+ match = 0
+ if dsc_file.endswith(".orig.tar.gz"):
+ for i in ql:
+ if files.has_key(dsc_file) and \
+ int(files[dsc_file]["size"]) == int(i[0]) and \
+ files[dsc_file]["md5sum"] == i[1]:
+ self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
+ del files[dsc_file]
+ self.pkg.orig_tar_gz = i[2] + i[3]
+ match = 1
+
+ if not match:
+ self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
+ elif dsc_file.endswith(".orig.tar.gz"):
+ # Check in the pool
+ q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
+ ql = q.getresult()
+ # Strip out anything that isn't '%s' or '/%s$'
+ for i in ql:
+ if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
+ ql.remove(i)
+
+ if ql:
+ # Unfortunately, we may get more than one match here if,
+ # for example, the package was in potato but had an -sa
+ # upload in woody. So we need to choose the right one.
+
+ x = ql[0]; # default to something sane in case we don't match any or have only one
+
+ if len(ql) > 1:
+ for i in ql:
+ old_file = i[0] + i[1]
+ old_file_fh = utils.open_file(old_file)
+ actual_md5 = apt_pkg.md5sum(old_file_fh)
+ old_file_fh.close()
+ actual_size = os.stat(old_file)[stat.ST_SIZE]
+ if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
+ x = i
+ else:
+ legacy_source_untouchable[i[3]] = ""
+
+ old_file = x[0] + x[1]
+ old_file_fh = utils.open_file(old_file)
+ actual_md5 = apt_pkg.md5sum(old_file_fh)
+ old_file_fh.close()
+ actual_size = os.stat(old_file)[stat.ST_SIZE]
+ found = old_file
+ suite_type = x[2]
+ dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
+ # See install() in process-accepted...
+ self.pkg.orig_tar_id = x[3]
+ self.pkg.orig_tar_gz = old_file
+ if suite_type == "legacy" or suite_type == "legacy-mixed":
+ self.pkg.orig_tar_location = "legacy"
+ else:
+ self.pkg.orig_tar_location = x[4]
+ else:
+ # Not there? Check the queue directories...
+
+ in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
+ # See process_it() in 'dak process-unchecked' for explanation of this
+ if os.path.exists(in_unchecked):
+ return (self.reject_message, in_unchecked)
+ else:
+ for dir in [ "Accepted", "New", "Byhand" ]:
+ in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
+ if os.path.exists(in_otherdir):
+ in_otherdir_fh = utils.open_file(in_otherdir)
+ actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
+ in_otherdir_fh.close()
+ actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
+ found = in_otherdir
+ self.pkg.orig_tar_gz = in_otherdir
+
+ if not found:
+ self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
+ self.pkg.orig_tar_gz = -1
+ continue
+ else:
+ self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
+ continue
+ if actual_md5 != dsc_files[dsc_file]["md5sum"]:
+ self.reject("md5sum for %s doesn't match %s." % (found, file))
+ if actual_size != int(dsc_files[dsc_file]["size"]):
+ self.reject("size for %s doesn't match %s." % (found, file))
+
+ return (self.reject_message, None)
+
+ def do_query(self, q):
+ sys.stderr.write("query: \"%s\" ... " % (q))
+ before = time.time()
+ r = self.projectB.query(q)
+ time_diff = time.time()-before
+ sys.stderr.write("took %.3f seconds.\n" % (time_diff))
+ return r
--- /dev/null
+#!/usr/bin/env python
+
+# Utility functions
+# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
+# $Id: utils.py,v 1.73 2005-03-18 05:24:38 troup Exp $
+
+################################################################################
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
+ string, sys, tempfile, traceback
+import apt_pkg
+import database
+
+################################################################################
+
+re_comments = re.compile(r"\#.*")
+re_no_epoch = re.compile(r"^\d+\:")
+re_no_revision = re.compile(r"-[^-]+$")
+re_arch_from_filename = re.compile(r"/binary-[^/]+/")
+re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
+re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
+re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
+
+re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
+re_multi_line_field = re.compile(r"^\s(.*)")
+re_taint_free = re.compile(r"^[-+~/\.\w]+$")
+
+re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
+
+changes_parse_error_exc = "Can't parse line in .changes file"
+invalid_dsc_format_exc = "Invalid .dsc file"
+nk_format_exc = "Unknown Format: in .changes file"
+no_files_exc = "No Files: field in .dsc or .changes file."
+cant_open_exc = "Can't open file"
+unknown_hostname_exc = "Unknown hostname"
+cant_overwrite_exc = "Permission denied; can't overwrite existent file."
+file_exists_exc = "Destination file exists"
+sendmail_failed_exc = "Sendmail invocation failed"
+tried_too_hard_exc = "Tried too hard to find a free filename."
+
+default_config = "/etc/dak/dak.conf"
+default_apt_config = "/etc/dak/apt.conf"
+
+################################################################################
+
+class Error(Exception):
+ """Base class for exceptions in this module."""
+ pass
+
+class ParseMaintError(Error):
+ """Exception raised for errors in parsing a maintainer field.
+
+ Attributes:
+ message -- explanation of the error
+ """
+
+ def __init__(self, message):
+ self.args = message,
+ self.message = message
+
+################################################################################
+
+def open_file(filename, mode='r'):
+ try:
+ f = open(filename, mode)
+ except IOError:
+ raise cant_open_exc, filename
+ return f
+
+################################################################################
+
+def our_raw_input(prompt=""):
+ if prompt:
+ sys.stdout.write(prompt)
+ sys.stdout.flush()
+ try:
+ ret = raw_input()
+ return ret
+ except EOFError:
+ sys.stderr.write("\nUser interrupt (^D).\n")
+ raise SystemExit
+
+################################################################################
+
+def str_isnum (s):
+ for c in s:
+ if c not in string.digits:
+ return 0
+ return 1
+
+################################################################################
+
+def extract_component_from_section(section):
+ component = ""
+
+ if section.find('/') != -1:
+ component = section.split('/')[0]
+ if component.lower() == "non-us" and section.find('/') != -1:
+ s = component + '/' + section.split('/')[1]
+ if Cnf.has_key("Component::%s" % s): # Avoid e.g. non-US/libs
+ component = s
+
+ if section.lower() == "non-us":
+ component = "non-US/main"
+
+ # non-US prefix is case insensitive
+ if component.lower()[:6] == "non-us":
+ component = "non-US"+component[6:]
+
+ # Expand default component
+ if component == "":
+ if Cnf.has_key("Component::%s" % section):
+ component = section
+ else:
+ component = "main"
+ elif component == "non-US":
+ component = "non-US/main"
+
+ return (section, component)
+
+################################################################################
+
+def parse_changes(filename, signing_rules=0):
+ """Parses a changes file and returns a dictionary where each field is a
+key. The mandatory first argument is the filename of the .changes
+file.
+
+signing_rules is an optional argument:
+
+ o If signing_rules == -1, no signature is required.
+ o If signing_rules == 0 (the default), a signature is required.
+ o If signing_rules == 1, it turns on the same strict format checking
+ as dpkg-source.
+
+The rules for (signing_rules == 1)-mode are:
+
+ o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
+ followed by any PGP header data and must end with a blank line.
+
+ o The data section must end with a blank line and must be followed by
+ "-----BEGIN PGP SIGNATURE-----".
+"""
+
+ error = ""
+ changes = {}
+
+ changes_in = open_file(filename)
+ lines = changes_in.readlines()
+
+ if not lines:
+ raise changes_parse_error_exc, "[Empty changes file]"
+
+ # Reindex by line number so we can easily verify the format of
+ # .dsc files...
+ index = 0
+ indexed_lines = {}
+ for line in lines:
+ index += 1
+ indexed_lines[index] = line[:-1]
+
+ inside_signature = 0
+
+ num_of_lines = len(indexed_lines.keys())
+ index = 0
+ first = -1
+ while index < num_of_lines:
+ index += 1
+ line = indexed_lines[index]
+ if line == "":
+ if signing_rules == 1:
+ index += 1
+ if index > num_of_lines:
+ raise invalid_dsc_format_exc, index
+ line = indexed_lines[index]
+ if not line.startswith("-----BEGIN PGP SIGNATURE"):
+ raise invalid_dsc_format_exc, index
+ inside_signature = 0
+ break
+ else:
+ continue
+ if line.startswith("-----BEGIN PGP SIGNATURE"):
+ break
+ if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
+ inside_signature = 1
+ if signing_rules == 1:
+ while index < num_of_lines and line != "":
+ index += 1
+ line = indexed_lines[index]
+ continue
+ # If we're not inside the signed data, don't process anything
+ if signing_rules >= 0 and not inside_signature:
+ continue
+ slf = re_single_line_field.match(line)
+ if slf:
+ field = slf.groups()[0].lower()
+ changes[field] = slf.groups()[1]
+ first = 1
+ continue
+ if line == " .":
+ changes[field] += '\n'
+ continue
+ mlf = re_multi_line_field.match(line)
+ if mlf:
+ if first == -1:
+ raise changes_parse_error_exc, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
+ if first == 1 and changes[field] != "":
+ changes[field] += '\n'
+ first = 0
+ changes[field] += mlf.groups()[0] + '\n'
+ continue
+ error += line
+
+ if signing_rules == 1 and inside_signature:
+ raise invalid_dsc_format_exc, index
+
+ changes_in.close()
+ changes["filecontents"] = "".join(lines)
+
+ if error:
+ raise changes_parse_error_exc, error
+
+ return changes
+
+################################################################################
+
+# Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
+
+def build_file_list(changes, is_a_dsc=0):
+ files = {}
+
+ # Make sure we have a Files: field to parse...
+ if not changes.has_key("files"):
+ raise no_files_exc
+
+ # Make sure we recognise the format of the Files: field
+ format = changes.get("format", "")
+ if format != "":
+ format = float(format)
+ if not is_a_dsc and (format < 1.5 or format > 2.0):
+ raise nk_format_exc, format
+
+ # Parse each entry/line:
+ for i in changes["files"].split('\n'):
+ if not i:
+ break
+ s = i.split()
+ section = priority = ""
+ try:
+ if is_a_dsc:
+ (md5, size, name) = s
+ else:
+ (md5, size, section, priority, name) = s
+ except ValueError:
+ raise changes_parse_error_exc, i
+
+ if section == "":
+ section = "-"
+ if priority == "":
+ priority = "-"
+
+ (section, component) = extract_component_from_section(section)
+
+ files[name] = Dict(md5sum=md5, size=size, section=section,
+ priority=priority, component=component)
+
+ return files
+
+################################################################################
+
+def force_to_utf8(s):
+ """Forces a string to UTF-8. If the string isn't already UTF-8,
+it's assumed to be ISO-8859-1."""
+ try:
+ unicode(s, 'utf-8')
+ return s
+ except UnicodeError:
+ latin1_s = unicode(s,'iso8859-1')
+ return latin1_s.encode('utf-8')
+
+def rfc2047_encode(s):
+ """Encodes a (header) string per RFC2047 if necessary. If the
+string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
+ try:
+ codecs.lookup('ascii')[1](s)
+ return s
+ except UnicodeError:
+ pass
+ try:
+ codecs.lookup('utf-8')[1](s)
+ h = email.Header.Header(s, 'utf-8', 998)
+ return str(h)
+ except UnicodeError:
+ h = email.Header.Header(s, 'iso-8859-1', 998)
+ return str(h)
+
+################################################################################
+
+# <Culus> 'The standard sucks, but my tool is supposed to interoperate
+# with it. I know - I'll fix the suckage and make things
+# incompatible!'
+
+def fix_maintainer (maintainer):
+ """Parses a Maintainer or Changed-By field and returns:
+ (1) an RFC822 compatible version,
+ (2) an RFC2047 compatible version,
+ (3) the name
+ (4) the email
+
+The name is forced to UTF-8 for both (1) and (3). If the name field
+contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
+switched to 'email (name)' format."""
+ maintainer = maintainer.strip()
+ if not maintainer:
+ return ('', '', '', '')
+
+ if maintainer.find("<") == -1:
+ email = maintainer
+ name = ""
+ elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
+ email = maintainer[1:-1]
+ name = ""
+ else:
+ m = re_parse_maintainer.match(maintainer)
+ if not m:
+ raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
+ name = m.group(1)
+ email = m.group(2)
+
+ # Get an RFC2047 compliant version of the name
+ rfc2047_name = rfc2047_encode(name)
+
+ # Force the name to be UTF-8
+ name = force_to_utf8(name)
+
+ if name.find(',') != -1 or name.find('.') != -1:
+ rfc822_maint = "%s (%s)" % (email, name)
+ rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
+ else:
+ rfc822_maint = "%s <%s>" % (name, email)
+ rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
+
+ if email.find("@") == -1 and email.find("buildd_") != 0:
+ raise ParseMaintError, "No @ found in email address part."
+
+ return (rfc822_maint, rfc2047_maint, name, email)
+
+################################################################################
+
+# sendmail wrapper, takes _either_ a message string or a file as arguments
+def send_mail (message, filename=""):
+ # If we've been passed a string dump it into a temporary file
+ if message:
+ filename = tempfile.mktemp()
+ fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
+ os.write (fd, message)
+ os.close (fd)
+
+ # Invoke sendmail
+ (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
+ if (result != 0):
+ raise sendmail_failed_exc, output
+
+ # Clean up any temporary files
+ if message:
+ os.unlink (filename)
+
+################################################################################
+
+def poolify (source, component):
+ if component:
+ component += '/'
+ # FIXME: this is nasty
+ component = component.lower().replace("non-us/", "non-US/")
+ if source[:3] == "lib":
+ return component + source[:4] + '/' + source + '/'
+ else:
+ return component + source[:1] + '/' + source + '/'
+
+################################################################################
+
+def move (src, dest, overwrite = 0, perms = 0664):
+ if os.path.exists(dest) and os.path.isdir(dest):
+ dest_dir = dest
+ else:
+ dest_dir = os.path.dirname(dest)
+ if not os.path.exists(dest_dir):
+ umask = os.umask(00000)
+ os.makedirs(dest_dir, 02775)
+ os.umask(umask)
+ #print "Moving %s to %s..." % (src, dest)
+ if os.path.exists(dest) and os.path.isdir(dest):
+ dest += '/' + os.path.basename(src)
+ # Don't overwrite unless forced to
+ if os.path.exists(dest):
+ if not overwrite:
+ fubar("Can't move %s to %s - file already exists." % (src, dest))
+ else:
+ if not os.access(dest, os.W_OK):
+ fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
+ shutil.copy2(src, dest)
+ os.chmod(dest, perms)
+ os.unlink(src)
+
+def copy (src, dest, overwrite = 0, perms = 0664):
+ if os.path.exists(dest) and os.path.isdir(dest):
+ dest_dir = dest
+ else:
+ dest_dir = os.path.dirname(dest)
+ if not os.path.exists(dest_dir):
+ umask = os.umask(00000)
+ os.makedirs(dest_dir, 02775)
+ os.umask(umask)
+ #print "Copying %s to %s..." % (src, dest)
+ if os.path.exists(dest) and os.path.isdir(dest):
+ dest += '/' + os.path.basename(src)
+ # Don't overwrite unless forced to
+ if os.path.exists(dest):
+ if not overwrite:
+ raise file_exists_exc
+ else:
+ if not os.access(dest, os.W_OK):
+ raise cant_overwrite_exc
+ shutil.copy2(src, dest)
+ os.chmod(dest, perms)
+
+################################################################################
+
+def where_am_i ():
+ res = socket.gethostbyaddr(socket.gethostname())
+ database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
+ if database_hostname:
+ return database_hostname
+ else:
+ return res[0]
+
+def which_conf_file ():
+ res = socket.gethostbyaddr(socket.gethostname())
+ if Cnf.get("Config::" + res[0] + "::DakConfig"):
+ return Cnf["Config::" + res[0] + "::DakConfig"]
+ else:
+ return default_config
+
+def which_apt_conf_file ():
+ res = socket.gethostbyaddr(socket.gethostname())
+ if Cnf.get("Config::" + res[0] + "::AptConfig"):
+ return Cnf["Config::" + res[0] + "::AptConfig"]
+ else:
+ return default_apt_config
+
+################################################################################
+
+# Escape characters which have meaning to SQL's regex comparison operator ('~')
+# (woefully incomplete)
+
+def regex_safe (s):
+ s = s.replace('+', '\\\\+')
+ s = s.replace('.', '\\\\.')
+ return s
+
+################################################################################
+
+# Perform a substition of template
+def TemplateSubst(map, filename):
+ file = open_file(filename)
+ template = file.read()
+ for x in map.keys():
+ template = template.replace(x,map[x])
+ file.close()
+ return template
+
+################################################################################
+
+def fubar(msg, exit_code=1):
+ sys.stderr.write("E: %s\n" % (msg))
+ sys.exit(exit_code)
+
+def warn(msg):
+ sys.stderr.write("W: %s\n" % (msg))
+
+################################################################################
+
+# Returns the user name with a laughable attempt at rfc822 conformancy
+# (read: removing stray periods).
+def whoami ():
+ return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
+
+################################################################################
+
+def size_type (c):
+ t = " B"
+ if c > 10240:
+ c = c / 1024
+ t = " KB"
+ if c > 10240:
+ c = c / 1024
+ t = " MB"
+ return ("%d%s" % (c, t))
+
+################################################################################
+
+def cc_fix_changes (changes):
+ o = changes.get("architecture", "")
+ if o:
+ del changes["architecture"]
+ changes["architecture"] = {}
+ for j in o.split():
+ changes["architecture"][j] = 1
+
+# Sort by source name, source version, 'have source', and then by filename
+def changes_compare (a, b):
+ try:
+ a_changes = parse_changes(a)
+ except:
+ return -1
+
+ try:
+ b_changes = parse_changes(b)
+ except:
+ return 1
+
+ cc_fix_changes (a_changes)
+ cc_fix_changes (b_changes)
+
+ # Sort by source name
+ a_source = a_changes.get("source")
+ b_source = b_changes.get("source")
+ q = cmp (a_source, b_source)
+ if q:
+ return q
+
+ # Sort by source version
+ a_version = a_changes.get("version", "0")
+ b_version = b_changes.get("version", "0")
+ q = apt_pkg.VersionCompare(a_version, b_version)
+ if q:
+ return q
+
+ # Sort by 'have source'
+ a_has_source = a_changes["architecture"].get("source")
+ b_has_source = b_changes["architecture"].get("source")
+ if a_has_source and not b_has_source:
+ return -1
+ elif b_has_source and not a_has_source:
+ return 1
+
+ # Fall back to sort by filename
+ return cmp(a, b)
+
+################################################################################
+
+def find_next_free (dest, too_many=100):
+ extra = 0
+ orig_dest = dest
+ while os.path.exists(dest) and extra < too_many:
+ dest = orig_dest + '.' + repr(extra)
+ extra += 1
+ if extra >= too_many:
+ raise tried_too_hard_exc
+ return dest
+
+################################################################################
+
+def result_join (original, sep = '\t'):
+ list = []
+ for i in xrange(len(original)):
+ if original[i] == None:
+ list.append("")
+ else:
+ list.append(original[i])
+ return sep.join(list)
+
+################################################################################
+
+def prefix_multi_line_string(str, prefix, include_blank_lines=0):
+ out = ""
+ for line in str.split('\n'):
+ line = line.strip()
+ if line or include_blank_lines:
+ out += "%s%s\n" % (prefix, line)
+ # Strip trailing new line
+ if out:
+ out = out[:-1]
+ return out
+
+################################################################################
+
+def validate_changes_file_arg(filename, require_changes=1):
+ """'filename' is either a .changes or .dak file. If 'filename' is a
+.dak file, it's changed to be the corresponding .changes file. The
+function then checks if the .changes file a) exists and b) is
+readable and returns the .changes filename if so. If there's a
+problem, the next action depends on the option 'require_changes'
+argument:
+
+ o If 'require_changes' == -1, errors are ignored and the .changes
+ filename is returned.
+ o If 'require_changes' == 0, a warning is given and 'None' is returned.
+ o If 'require_changes' == 1, a fatal error is raised.
+"""
+ error = None
+
+ orig_filename = filename
+ if filename.endswith(".dak"):
+ filename = filename[:-6]+".changes"
+
+ if not filename.endswith(".changes"):
+ error = "invalid file type; not a changes file"
+ else:
+ if not os.access(filename,os.R_OK):
+ if os.path.exists(filename):
+ error = "permission denied"
+ else:
+ error = "file not found"
+
+ if error:
+ if require_changes == 1:
+ fubar("%s: %s." % (orig_filename, error))
+ elif require_changes == 0:
+ warn("Skipping %s - %s" % (orig_filename, error))
+ return None
+ else: # We only care about the .dak file
+ return filename
+ else:
+ return filename
+
+################################################################################
+
+def real_arch(arch):
+ return (arch != "source" and arch != "all")
+
+################################################################################
+
+def join_with_commas_and(list):
+ if len(list) == 0: return "nothing"
+ if len(list) == 1: return list[0]
+ return ", ".join(list[:-1]) + " and " + list[-1]
+
+################################################################################
+
+def pp_deps (deps):
+ pp_deps = []
+ for atom in deps:
+ (pkg, version, constraint) = atom
+ if constraint:
+ pp_dep = "%s (%s %s)" % (pkg, constraint, version)
+ else:
+ pp_dep = pkg
+ pp_deps.append(pp_dep)
+ return " |".join(pp_deps)
+
+################################################################################
+
+def get_conf():
+ return Cnf
+
+################################################################################
+
+# Handle -a, -c and -s arguments; returns them as SQL constraints
+def parse_args(Options):
+ # Process suite
+ if Options["Suite"]:
+ suite_ids_list = []
+ for suite in split_args(Options["Suite"]):
+ suite_id = dak.lib.database.get_suite_id(suite)
+ if suite_id == -1:
+ warn("suite '%s' not recognised." % (suite))
+ else:
+ suite_ids_list.append(suite_id)
+ if suite_ids_list:
+ con_suites = "AND su.id IN (%s)" % ", ".join(map(str, suite_ids_list))
+ else:
+ fubar("No valid suite given.")
+ else:
+ con_suites = ""
+
+ # Process component
+ if Options["Component"]:
+ component_ids_list = []
+ for component in split_args(Options["Component"]):
+ component_id = dak.lib.database.get_component_id(component)
+ if component_id == -1:
+ warn("component '%s' not recognised." % (component))
+ else:
+ component_ids_list.append(component_id)
+ if component_ids_list:
+ con_components = "AND c.id IN (%s)" % ", ".join(map(str, component_ids_list))
+ else:
+ fubar("No valid component given.")
+ else:
+ con_components = ""
+
+ # Process architecture
+ con_architectures = ""
+ if Options["Architecture"]:
+ arch_ids_list = []
+ check_source = 0
+ for architecture in split_args(Options["Architecture"]):
+ if architecture == "source":
+ check_source = 1
+ else:
+ architecture_id = dak.lib.database.get_architecture_id(architecture)
+ if architecture_id == -1:
+ warn("architecture '%s' not recognised." % (architecture))
+ else:
+ arch_ids_list.append(architecture_id)
+ if arch_ids_list:
+ con_architectures = "AND a.id IN (%s)" % ", ".join(map(str, arch_ids_list))
+ else:
+ if not check_source:
+ fubar("No valid architecture given.")
+ else:
+ check_source = 1
+
+ return (con_suites, con_architectures, con_components, check_source)
+
+################################################################################
+
+# Inspired(tm) by Bryn Keller's print_exc_plus (See
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
+
+def print_exc():
+ tb = sys.exc_info()[2]
+ while tb.tb_next:
+ tb = tb.tb_next
+ stack = []
+ frame = tb.tb_frame
+ while frame:
+ stack.append(frame)
+ frame = frame.f_back
+ stack.reverse()
+ traceback.print_exc()
+ for frame in stack:
+ print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
+ frame.f_code.co_filename,
+ frame.f_lineno)
+ for key, value in frame.f_locals.items():
+ print "\t%20s = " % key,
+ try:
+ print value
+ except:
+ print "<unable to print>"
+
+################################################################################
+
+def try_with_debug(function):
+ try:
+ function()
+ except SystemExit:
+ raise
+ except:
+ print_exc()
+
+################################################################################
+
+# Function for use in sorting lists of architectures.
+# Sorts normally except that 'source' dominates all others.
+
+def arch_compare_sw (a, b):
+ if a == "source" and b == "source":
+ return 0
+ elif a == "source":
+ return -1
+ elif b == "source":
+ return 1
+
+ return cmp (a, b)
+
+################################################################################
+
+# Split command line arguments which can be separated by either commas
+# or whitespace. If dwim is set, it will complain about string ending
+# in comma since this usually means someone did 'dak ls -a i386, m68k
+# foo' or something and the inevitable confusion resulting from 'm68k'
+# being treated as an argument is undesirable.
+
+def split_args (s, dwim=1):
+ if s.find(",") == -1:
+ return s.split()
+ else:
+ if s[-1:] == "," and dwim:
+ fubar("split_args: found trailing comma, spurious space maybe?")
+ return s.split(",")
+
+################################################################################
+
+def Dict(**dict): return dict
+
+########################################
+
+# Our very own version of commands.getouputstatus(), hacked to support
+# gpgv's status fd.
+def gpgv_get_status_output(cmd, status_read, status_write):
+ cmd = ['/bin/sh', '-c', cmd]
+ p2cread, p2cwrite = os.pipe()
+ c2pread, c2pwrite = os.pipe()
+ errout, errin = os.pipe()
+ pid = os.fork()
+ if pid == 0:
+ # Child
+ os.close(0)
+ os.close(1)
+ os.dup(p2cread)
+ os.dup(c2pwrite)
+ os.close(2)
+ os.dup(errin)
+ for i in range(3, 256):
+ if i != status_write:
+ try:
+ os.close(i)
+ except:
+ pass
+ try:
+ os.execvp(cmd[0], cmd)
+ finally:
+ os._exit(1)
+
+ # Parent
+ os.close(p2cread)
+ os.dup2(c2pread, c2pwrite)
+ os.dup2(errout, errin)
+
+ output = status = ""
+ while 1:
+ i, o, e = select.select([c2pwrite, errin, status_read], [], [])
+ more_data = []
+ for fd in i:
+ r = os.read(fd, 8196)
+ if len(r) > 0:
+ more_data.append(fd)
+ if fd == c2pwrite or fd == errin:
+ output += r
+ elif fd == status_read:
+ status += r
+ else:
+ fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
+ if not more_data:
+ pid, exit_status = os.waitpid(pid, 0)
+ try:
+ os.close(status_write)
+ os.close(status_read)
+ os.close(c2pread)
+ os.close(c2pwrite)
+ os.close(p2cwrite)
+ os.close(errin)
+ os.close(errout)
+ except:
+ pass
+ break
+
+ return output, status, exit_status
+
+############################################################
+
+
+def check_signature (sig_filename, reject, data_filename="", keyrings=None):
+ """Check the signature of a file and return the fingerprint if the
+signature is valid or 'None' if it's not. The first argument is the
+filename whose signature should be checked. The second argument is a
+reject function and is called when an error is found. The reject()
+function must allow for two arguments: the first is the error message,
+the second is an optional prefix string. It's possible for reject()
+to be called more than once during an invocation of check_signature().
+The third argument is optional and is the name of the files the
+detached signature applies to. The fourth argument is optional and is
+a *list* of keyrings to use.
+"""
+
+ # Ensure the filename contains no shell meta-characters or other badness
+ if not re_taint_free.match(sig_filename):
+ reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
+ return None
+
+ if data_filename and not re_taint_free.match(data_filename):
+ reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
+ return None
+
+ if not keyrings:
+ keyrings = (Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"])
+
+ # Build the command line
+ status_read, status_write = os.pipe();
+ cmd = "gpgv --status-fd %s" % (status_write)
+ for keyring in keyrings:
+ cmd += " --keyring %s" % (keyring)
+ cmd += " %s %s" % (sig_filename, data_filename)
+ # Invoke gpgv on the file
+ (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
+
+ # Process the status-fd output
+ keywords = {}
+ bad = internal_error = ""
+ for line in status.split('\n'):
+ line = line.strip()
+ if line == "":
+ continue
+ split = line.split()
+ if len(split) < 2:
+ internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
+ continue
+ (gnupg, keyword) = split[:2]
+ if gnupg != "[GNUPG:]":
+ internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
+ continue
+ args = split[2:]
+ if keywords.has_key(keyword) and (keyword != "NODATA" and keyword != "SIGEXPIRED"):
+ internal_error += "found duplicate status token ('%s').\n" % (keyword)
+ continue
+ else:
+ keywords[keyword] = args
+
+ # If we failed to parse the status-fd output, let's just whine and bail now
+ if internal_error:
+ reject("internal error while performing signature check on %s." % (sig_filename))
+ reject(internal_error, "")
+ reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
+ return None
+
+ # Now check for obviously bad things in the processed output
+ if keywords.has_key("SIGEXPIRED"):
+ reject("The key used to sign %s has expired." % (sig_filename))
+ bad = 1
+ if keywords.has_key("KEYREVOKED"):
+ reject("The key used to sign %s has been revoked." % (sig_filename))
+ bad = 1
+ if keywords.has_key("BADSIG"):
+ reject("bad signature on %s." % (sig_filename))
+ bad = 1
+ if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
+ reject("failed to check signature on %s." % (sig_filename))
+ bad = 1
+ if keywords.has_key("NO_PUBKEY"):
+ args = keywords["NO_PUBKEY"]
+ if len(args) >= 1:
+ key = args[0]
+ reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
+ bad = 1
+ if keywords.has_key("BADARMOR"):
+ reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
+ bad = 1
+ if keywords.has_key("NODATA"):
+ reject("no signature found in %s." % (sig_filename))
+ bad = 1
+
+ if bad:
+ return None
+
+ # Next check gpgv exited with a zero return code
+ if exit_status:
+ reject("gpgv failed while checking %s." % (sig_filename))
+ if status.strip():
+ reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
+ else:
+ reject(prefix_multi_line_string(output, " [GPG output:] "), "")
+ return None
+
+ # Sanity check the good stuff we expect
+ if not keywords.has_key("VALIDSIG"):
+ reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
+ bad = 1
+ else:
+ args = keywords["VALIDSIG"]
+ if len(args) < 1:
+ reject("internal error while checking signature on %s." % (sig_filename))
+ bad = 1
+ else:
+ fingerprint = args[0]
+ if not keywords.has_key("GOODSIG"):
+ reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
+ bad = 1
+ if not keywords.has_key("SIG_ID"):
+ reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
+ bad = 1
+
+ # Finally ensure there's not something we don't recognise
+ known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
+ SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
+ NODATA="")
+
+ for keyword in keywords.keys():
+ if not known_keywords.has_key(keyword):
+ reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
+ bad = 1
+
+ if bad:
+ return None
+ else:
+ return fingerprint
+
+################################################################################
+
+# Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
+
+def wrap(paragraph, max_length, prefix=""):
+ line = ""
+ s = ""
+ have_started = 0
+ words = paragraph.split()
+
+ for word in words:
+ word_size = len(word)
+ if word_size > max_length:
+ if have_started:
+ s += line + '\n' + prefix
+ s += word + '\n' + prefix
+ else:
+ if have_started:
+ new_length = len(line) + word_size + 1
+ if new_length > max_length:
+ s += line + '\n' + prefix
+ line = word
+ else:
+ line += ' ' + word
+ else:
+ line = word
+ have_started = 1
+
+ if have_started:
+ s += line
+
+ return s
+
+################################################################################
+
+# Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
+# Returns fixed 'src'
+def clean_symlink (src, dest, root):
+ src = src.replace(root, '', 1)
+ dest = dest.replace(root, '', 1)
+ dest = os.path.dirname(dest)
+ new_src = '../' * len(dest.split('/'))
+ return new_src + src
+
+################################################################################
+
+def temp_filename(directory=None, dotprefix=None, perms=0700):
+ """Return a secure and unique filename by pre-creating it.
+If 'directory' is non-null, it will be the directory the file is pre-created in.
+If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
+
+ if directory:
+ old_tempdir = tempfile.tempdir
+ tempfile.tempdir = directory
+
+ filename = tempfile.mktemp()
+
+ if dotprefix:
+ filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
+ fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
+ os.close(fd)
+
+ if directory:
+ tempfile.tempdir = old_tempdir
+
+ return filename
+
+################################################################################
+
+apt_pkg.init()
+
+Cnf = apt_pkg.newConfiguration()
+apt_pkg.ReadConfigFileISC(Cnf,default_config)
+
+if which_conf_file() != default_config:
+ apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
+
+################################################################################