2008-05-02 Joerg Jaspert <joerg@debian.org>
+ * config/debian/extensions.py: used reindent.py from the python
+ 2.x source examples to get all dak code use the same indentation
+ style, no longer a mix of 4 spaces / 1 tab.
+ * dak/check_archive.py: likewise
+ * dak/check_overrides.py: likewise
+ * dak/check_proposed_updates.py: likewise
+ * dak/clean_proposed_updates.py: likewise
+ * dak/clean_queues.py: likewise
+ * dak/clean_suites.py: likewise
+ * dak/compare_suites.py: likewise
+ * dak/control_overrides.py: likewise
+ * dak/control_suite.py: likewise
+ * dak/cruft_report.py: likewise
+ * dak/dak.py: likewise
+ * dak/decode_dot_dak.py: likewise
+ * dak/examine_package.py: likewise
+ * dak/find_null_maintainers.py: likewise
+ * dak/generate_index_diffs.py: likewise
+ * dak/generate_releases.py: likewise
+ * dak/import_archive.py: likewise
+ * dak/import_keyring.py: likewise
+ * dak/import_ldap_fingerprints.py: likewise
+ * dak/import_users_from_passwd.py: likewise
+ * dak/init_db.py: likewise
+ * dak/init_dirs.py: likewise
+ * dak/ls.py: likewise
+ * dak/make_maintainers.py: likewise
+ * dak/make_overrides.py: likewise
+ * dak/make_suite_file_list.py: likewise
+ * dak/mirror_split.py: likewise
+ * dak/new_security_install.py: likewise
+ * dak/override.py: likewise
+ * dak/poolize.py: likewise
+ * dak/process_accepted.py: likewise
+ * dak/process_new.py: likewise
+ * dak/process_unchecked.py: likewise
+ * dak/queue_report.py: likewise
+ * dak/reject_proposed_updates.py: likewise
+ * dak/rm.py: likewise
+ * dak/security_install.py: likewise
+ * dak/show_new.py: likewise
+ * dak/split_done.py: likewise
+ * dak/stats.py: likewise
+ * dak/symlink_dists.py: likewise
+ * dak/test/001/test.py: likewise
+ * dak/test/002/test.py: likewise
+ * dak/transitions.py: likewise
+ * daklib/extensions.py: likewise
+ * daklib/logging.py: likewise
+ * daklib/queue.py: likewise
+ * daklib/utils.py: likewise
+ * scripts/debian/insert_missing_changedby.py: likewise
+
* dak/process_new.py (recheck): Make the path check more robust,
so we no longer have to keep process_new seperate trees between
security and normal archive.
if "source" not in changes["architecture"] or "unstable" not in changes["distribution"]:
return
- # Also only check if there is a file defined (and existant) with
+ # Also only check if there is a file defined (and existant) with
# checks.
transpath = Cnf.get("Dinstall::Reject::ReleaseTransitions", "")
if transpath == "" or not os.path.exists(transpath):
return
-
+
# Parse the yaml file
sourcefile = file(transpath, 'r')
sourcecontent = sourcefile.read()
if fpr == "5906F687BD03ACAD0D8E602EFCF37657" or uid == "iwj":
reject("Upload blocked due to hijack attempt 2008/03/19")
- # NB: 1.15.0, 1.15.2 signed by this key targetted at unstable
- # have been made available in the wild, and should remain
- # blocked until Debian's dpkg has revved past those version
- # numbers
+ # NB: 1.15.0, 1.15.2 signed by this key targetted at unstable
+ # have been made available in the wild, and should remain
+ # blocked until Debian's dpkg has revved past those version
+ # numbers
oldfn()
print "Missing files:"
db_files.clear()
for i in ql:
- filename = os.path.abspath(i[0] + i[1])
+ filename = os.path.abspath(i[0] + i[1])
db_files[filename] = ""
if os.access(filename, os.R_OK) == 0:
- if i[2]:
+ if i[2]:
print "(last used: %s) %s" % (i[2], filename)
- else:
+ else:
print "%s" % (filename)
-
+
filename = Cnf["Dir::Override"]+'override.unreferenced'
if os.path.exists(filename):
print "Checking file md5sums & sizes..."
for i in ql:
- filename = os.path.abspath(i[0] + i[1])
+ filename = os.path.abspath(i[0] + i[1])
db_md5sum = i[2]
db_size = int(i[3])
try:
db_files.clear()
count = 0
for i in ql:
- filename = os.path.abspath(i[0] + i[1])
+ filename = os.path.abspath(i[0] + i[1])
if os.access(filename, os.R_OK):
file = daklib.utils.open_file(filename)
current_file = filename
# q = projectB.query("BEGIN WORK")
for i in q_files:
- filename = os.path.normpath(i[0] + i[1])
+ filename = os.path.normpath(i[0] + i[1])
# file_id = i[2]
if os.access(filename, os.R_OK) == 0:
daklib.utils.warn("%s: doesn't exist." % (filename))
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Check-Archive::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Check-Archive::Options::%s" % (i)):
- Cnf["Check-Archive::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Check-Archive::Options::%s" % (i)):
+ Cnf["Check-Archive::Options::%s" % (i)] = ""
args = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Check-Archive::Options")
if Options["Help"]:
- usage()
+ usage()
if len(args) < 1:
daklib.utils.warn("dak check-archive requires at least one argument")
if not src_packages.has_key(package) or src_packages[package]:
continue
src_packages[package] = 1
-
+
Logger.log(["add missing override", osuite, component,
type, package, "source", sections[i[2]], i[3]])
if not Options["No-Action"]:
suiteids = []
for i in q.getresult():
suiteids.append(i[0])
-
+
if len(suiteids) != len(suites) or len(suiteids) < 1:
daklib.utils.fubar("Couldn't find id's of all suites: %s" % suites)
if __name__ == '__main__':
main()
-
################################################################################
# | > amd64 is more mature than even some released architectures
-# |
+# |
# | This might be true of the architecture, unfortunately it seems to be the
# | exact opposite for most of the people involved with it.
-#
+#
# <1089213290.24029.6.camel@descent.netsplit.com>
################################################################################
('v',"verbose","Check-Proposed-Updates::Options::Verbose"),
('h',"help","Check-Proposed-Updates::Options::Help")]
for i in [ "debug", "quiet", "verbose", "help" ]:
- if not Cnf.has_key("Check-Proposed-Updates::Options::%s" % (i)):
- Cnf["Check-Proposed-Updates::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Check-Proposed-Updates::Options::%s" % (i)):
+ Cnf["Check-Proposed-Updates::Options::%s" % (i)] = ""
arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Check-Proposed-Updates::Options")
('s', "suite", "Clean-Proposed-Updates::Options::Suite", "HasArg"),
('n', "no-action", "Clean-Proposed-Updates::Options::No-Action"),]
for i in [ "debug", "verbose", "help", "no-action" ]:
- if not Cnf.has_key("Clean-Proposed-Updates::Options::%s" % (i)):
- Cnf["Clean-Proposed-Updates::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Clean-Proposed-Updates::Options::%s" % (i)):
+ Cnf["Clean-Proposed-Updates::Options::%s" % (i)] = ""
# suite defaults to proposed-updates
if not Cnf.has_key("Clean-Proposed-Updates::Options::Suite"):
if __name__ == '__main__':
main()
-
Cnf = daklib.utils.get_conf()
for i in ["Help", "Incoming", "No-Action", "Verbose" ]:
- if not Cnf.has_key("Clean-Queues::Options::%s" % (i)):
- Cnf["Clean-Queues::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Clean-Queues::Options::%s" % (i)):
+ Cnf["Clean-Queues::Options::%s" % (i)] = ""
if not Cnf.has_key("Clean-Queues::Options::Days"):
- Cnf["Clean-Queues::Options::Days"] = "14"
+ Cnf["Clean-Queues::Options::Days"] = "14"
Arguments = [('h',"help","Clean-Queues::Options::Help"),
('d',"days","Clean-Queues::Options::Days", "IntLevel"),
Options = Cnf.SubTree("Clean-Queues::Options")
if Options["Help"]:
- usage()
+ usage()
init()
Cnf = daklib.utils.get_conf()
for i in ["Help", "No-Action" ]:
- if not Cnf.has_key("Clean-Suites::Options::%s" % (i)):
- Cnf["Clean-Suites::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Clean-Suites::Options::%s" % (i)):
+ Cnf["Clean-Suites::Options::%s" % (i)] = ""
Arguments = [('h',"help","Clean-Suites::Options::Help"),
('n',"no-action","Clean-Suites::Options::No-Action")]
if __name__ == '__main__':
main()
-
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Compare-Suites::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Compare-Suites::Options::%s" % (i)):
- Cnf["Compare-Suites::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Compare-Suites::Options::%s" % (i)):
+ Cnf["Compare-Suites::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Compare-Suites::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
daklib.database.init(Cnf, projectB)
if __name__ == '__main__':
main()
-
################################################################################
# On 30 Nov 1998, James Troup wrote:
-#
+#
# > James Troup<2> <troup2@debian.org>
-# >
+# >
# > James is a clone of James; he's going to take over the world.
# > After he gets some sleep.
-#
+#
# Could you clone other things too? Sheep? Llamas? Giant mutant turnips?
-#
+#
# Your clone will need some help to take over the world, maybe clone up an
# army of penguins and threaten to unleash them on the world, forcing
# governments to sway to the new James' will!
-#
+#
# Yes, I can envision a day when James' duplicate decides to take a horrific
# vengance on the James that spawned him and unleashes his fury in the form
# of thousands upon thousands of chickens that look just like Captin Blue
# Eye! Oh the horror.
-#
+#
# Now you'll have to were name tags to people can tell you apart, unless of
# course the new clone is truely evil in which case he should be easy to
# identify!
-#
+#
# Jason
# Chicken. Black. Helicopters.
# Be afraid.
if action == "add" or old_priority_id == priority_id and \
old_section_id == section_id and \
((old_maintainer_override == maintainer_override) or \
- (old_maintainer_override == "" and maintainer_override == None)):
+ (old_maintainer_override == "" and maintainer_override == None)):
# If it's unchanged or we're in 'add only' mode, ignore it
c_skipped += 1
continue
# Default arguments
for i in [ "add", "help", "list", "quiet", "set" ]:
- if not Cnf.has_key("Control-Overrides::Options::%s" % (i)):
- Cnf["Control-Overrides::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Control-Overrides::Options::%s" % (i)):
+ Cnf["Control-Overrides::Options::%s" % (i)] = ""
if not Cnf.has_key("Control-Overrides::Options::Component"):
- Cnf["Control-Overrides::Options::Component"] = "main"
+ Cnf["Control-Overrides::Options::Component"] = "main"
if not Cnf.has_key("Control-Overrides::Options::Suite"):
- Cnf["Control-Overrides::Options::Suite"] = "unstable"
+ Cnf["Control-Overrides::Options::Suite"] = "unstable"
if not Cnf.has_key("Control-Overrides::Options::Type"):
- Cnf["Control-Overrides::Options::Type"] = "deb"
+ Cnf["Control-Overrides::Options::Type"] = "deb"
file_list = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
list(suite, component, type)
else:
if Cnf.has_key("Suite::%s::Untouchable" % suite) and Cnf["Suite::%s::Untouchable" % suite] != 0:
- daklib.utils.fubar("%s: suite is untouchable" % suite)
+ daklib.utils.fubar("%s: suite is untouchable" % suite)
Logger = daklib.logging.Logger(Cnf, "control-overrides")
if file_list:
if __name__ == '__main__':
main()
-
('s',"set", "Control-Suite::Options::Set", "HasArg")]
for i in ["add", "help", "list", "remove", "set", "version" ]:
- if not Cnf.has_key("Control-Suite::Options::%s" % (i)):
- Cnf["Control-Suite::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Control-Suite::Options::%s" % (i)):
+ Cnf["Control-Suite::Options::%s" % (i)] = ""
file_list = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Control-Suite::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"],int(Cnf["DB::Port"]))
if __name__ == '__main__':
main()
-
nviu_to_remove = []
print "Newer version in unstable"
print "-------------------------"
- print
+ print
for i in ql:
(source, experimental_version, unstable_version) = i
print " o %s (%s, %s)" % (source, experimental_version, unstable_version)
def do_dubious_nbs(dubious_nbs):
print "Dubious NBS"
print "-----------"
- print
+ print
dubious_nbs_keys = dubious_nbs.keys()
dubious_nbs_keys.sort()
packages.sort()
print " o %s: %s" % (version, ", ".join(packages))
- print
+ print
################################################################################
('m',"mode","Cruft-Report::Options::Mode", "HasArg"),
('s',"suite","Cruft-Report::Options::Suite","HasArg")]
for i in [ "help" ]:
- if not Cnf.has_key("Cruft-Report::Options::%s" % (i)):
- Cnf["Cruft-Report::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Cruft-Report::Options::%s" % (i)):
+ Cnf["Cruft-Report::Options::%s" % (i)] = ""
Cnf["Cruft-Report::Options::Suite"] = Cnf["Dinstall::DefaultSuite"]
if not Cnf.has_key("Cruft-Report::Options::Mode"):
Options = Cnf.SubTree("Cruft-Report::Options")
if Options["Help"]:
- usage()
+ usage()
# Set up checks based on mode
if Options["Mode"] == "daily":
duplicate_bins[key].append(package)
packages.close()
os.unlink(temp_filename)
-
+
if "obsolete source" in checks:
do_obsolete_source(duplicate_bins, bin2source)
binaries = bin_not_built[source].keys()
binaries.sort()
print " o %s: %s" % (source, ", ".join(binaries))
- print
+ print
if "bms" in checks:
print "Built from multiple source packages"
print "-----------------------------------"
- print
+ print
keys = duplicate_bins.keys()
keys.sort()
for key in keys:
(source_a, source_b) = key.split("_")
print " o %s & %s => %s" % (source_a, source_b, ", ".join(duplicate_bins[key]))
- print
+ print
if "anais" in checks:
print "Architecture Not Allowed In Source"
print "----------------------------------"
print anais_output
- print
+ print
if "dubious nbs" in checks:
do_dubious_nbs(dubious_nbs)
class UserExtension:
def __init__(self, user_extension = None):
if user_extension:
- m = imp.load_source("dak_userext", user_extension)
- d = m.__dict__
+ m = imp.load_source("dak_userext", user_extension)
+ d = m.__dict__
else:
m, d = None, {}
- self.__dict__["_module"] = m
- self.__dict__["_d"] = d
+ self.__dict__["_module"] = m
+ self.__dict__["_d"] = d
def __getattr__(self, a):
if a in self.__dict__: return self.__dict__[a]
return self._d.get(a, None)
def __setattr__(self, a, v):
- self._d[a] = v
+ self._d[a] = v
################################################################################
class UserExtension:
def __init__(self, user_extension = None):
if user_extension:
- m = imp.load_source("dak_userext", user_extension)
- d = m.__dict__
+ m = imp.load_source("dak_userext", user_extension)
+ d = m.__dict__
else:
m, d = None, {}
- self.__dict__["_module"] = m
- self.__dict__["_d"] = d
+ self.__dict__["_module"] = m
+ self.__dict__["_d"] = d
def __getattr__(self, a):
if a in self.__dict__: return self.__dict__[a]
return self._d.get(a, None)
def __setattr__(self, a, v):
- self._d[a] = v
+ self._d[a] = v
################################################################################
"Produce a report on NEW and BYHAND packages"),
("show-new",
"Output html for packages in NEW"),
-
+
("rm",
"Remove packages from suites"),
-
+
("process-new",
"Process NEW and BYHAND packages"),
("process-unchecked",
"Process packages in queue/unchecked"),
("process-accepted",
"Install packages into the pool"),
-
+
("make-suite-file-list",
"Generate lists of packages per suite for apt-ftparchive"),
("generate-releases",
"Generate compatability symlinks from dists/ into pool/"),
]
return functionality
-
+
################################################################################
def usage(functionality, exit_code=0):
functionality = init()
modules = [ command for (command, _) in functionality ]
-
+
if len(sys.argv) == 0:
daklib.utils.fubar("err, argc == 0? how is that possible?")
elif (len(sys.argv) == 1
import sys
import apt_pkg
-import daklib.queue
+import daklib.queue
import daklib.utils
################################################################################
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Decode-Dot-Dak::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Decode-Dot-Dak::Options::%s" % (i)):
- Cnf["Decode-Dot-Dak::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Decode-Dot-Dak::Options::%s" % (i)):
+ Cnf["Decode-Dot-Dak::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Decode-Dot-Dak::Options")
if Options["Help"]:
- usage()
+ usage()
k = daklib.queue.Upload(Cnf)
for arg in sys.argv[1:]:
arg = daklib.utils.validate_changes_file_arg(arg,require_changes=-1)
k.pkg.changes_file = arg
print "%s:" % (arg)
- k.init_vars()
+ k.init_vars()
k.update_vars()
changes = k.pkg.changes
if __name__ == '__main__':
main()
-
# probably xml.sax.saxutils would work as well
def html_escape(s):
- return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
+ return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
def escape_if_needed(s):
- if use_html:
- return re_html_escaping.sub(html_escaping.get, s)
- else:
- return s
-
+ if use_html:
+ return re_html_escaping.sub(html_escaping.get, s)
+ else:
+ return s
+
def headline(s, level=2):
- if use_html:
- print "<h%d>%s</h%d>" % (level, html_escape(s), level)
- else:
- print "---- %s ----" % (s)
+ if use_html:
+ print "<h%d>%s</h%d>" % (level, html_escape(s), level)
+ else:
+ print "---- %s ----" % (s)
# Colour definitions, 'end' isn't really for use
'maintainer': ('<span style="color: green">',"</span>")}
def colour_output(s, colour):
- if use_html:
- return ("%s%s%s" % (html_colours[colour][0], html_escape(s), html_colours[colour][1]))
- else:
- return ("%s%s%s" % (ansi_colours[colour], s, ansi_colours['end']))
+ if use_html:
+ return ("%s%s%s" % (html_colours[colour][0], html_escape(s), html_colours[colour][1]))
+ else:
+ return ("%s%s%s" % (ansi_colours[colour], s, ansi_colours['end']))
def print_escaped_text(s):
- if use_html:
- print "<pre>%s</pre>" % (s)
- else:
- print s
+ if use_html:
+ print "<pre>%s</pre>" % (s)
+ else:
+ print s
def print_formatted_text(s):
- if use_html:
- print "<pre>%s</pre>" % (html_escape(s))
- else:
- print s
+ if use_html:
+ print "<pre>%s</pre>" % (html_escape(s))
+ else:
+ print s
################################################################################
def get_depends_parts(depend) :
v_match = re_version.match(depend)
if v_match:
- d_parts = { 'name' : v_match.group(1), 'version' : v_match.group(2) }
+ d_parts = { 'name' : v_match.group(1), 'version' : v_match.group(2) }
else :
- d_parts = { 'name' : depend , 'version' : '' }
+ d_parts = { 'name' : depend , 'version' : '' }
return d_parts
def get_or_list(depend) :
dep_list = get_comma_list(d_str)
d = 0
while d < len(dep_list):
- # put depends into their own list
- depends_tree.append([dep_list[d]])
- d += 1
+ # put depends into their own list
+ depends_tree.append([dep_list[d]])
+ d += 1
d = 0
while d < len(depends_tree):
- k = 0
- # split up Or'd depends into a multi-item list
- depends_tree[d] = get_or_list(depends_tree[d][0])
- while k < len(depends_tree[d]):
- # split depends into {package, version relation}
- depends_tree[d][k] = get_depends_parts(depends_tree[d][k])
- k += 1
- d += 1
+ k = 0
+ # split up Or'd depends into a multi-item list
+ depends_tree[d] = get_or_list(depends_tree[d][0])
+ while k < len(depends_tree[d]):
+ # split depends into {package, version relation}
+ depends_tree[d][k] = get_depends_parts(depends_tree[d][k])
+ k += 1
+ d += 1
return depends_tree
def read_control (filename):
deb_file = daklib.utils.open_file(filename)
try:
- extracts = apt_inst.debExtractControl(deb_file)
- control = apt_pkg.ParseSection(extracts)
+ extracts = apt_inst.debExtractControl(deb_file)
+ control = apt_pkg.ParseSection(extracts)
except:
- print_formatted_text("can't parse control info")
- # TV-COMMENT: this will raise exceptions in two lines
- control = ''
+ print_formatted_text("can't parse control info")
+ # TV-COMMENT: this will raise exceptions in two lines
+ control = ''
deb_file.close()
control_keys = control.keys()
if control.has_key("Depends"):
- depends_str = control.Find("Depends")
- # create list of dependancy lists
- depends = split_depends(depends_str)
+ depends_str = control.Find("Depends")
+ # create list of dependancy lists
+ depends = split_depends(depends_str)
if control.has_key("Recommends"):
- recommends_str = control.Find("Recommends")
- recommends = split_depends(recommends_str)
+ recommends_str = control.Find("Recommends")
+ recommends = split_depends(recommends_str)
if control.has_key("Section"):
- section_str = control.Find("Section")
-
- c_match = re_contrib.search(section_str)
- nf_match = re_nonfree.search(section_str)
- if c_match :
- # contrib colour
- section = colour_output(section_str, 'contrib')
- elif nf_match :
- # non-free colour
- section = colour_output(section_str, 'nonfree')
- else :
- # main
- section = colour_output(section_str, 'main')
+ section_str = control.Find("Section")
+
+ c_match = re_contrib.search(section_str)
+ nf_match = re_nonfree.search(section_str)
+ if c_match :
+ # contrib colour
+ section = colour_output(section_str, 'contrib')
+ elif nf_match :
+ # non-free colour
+ section = colour_output(section_str, 'nonfree')
+ else :
+ # main
+ section = colour_output(section_str, 'main')
if control.has_key("Architecture"):
- arch_str = control.Find("Architecture")
- arch = colour_output(arch_str, 'arch')
+ arch_str = control.Find("Architecture")
+ arch = colour_output(arch_str, 'arch')
if control.has_key("Maintainer"):
- maintainer = control.Find("Maintainer")
- localhost = re_localhost.search(maintainer)
- if localhost:
- #highlight bad email
- maintainer = colour_output(maintainer, 'maintainer')
- else:
- maintainer = escape_if_needed(maintainer)
+ maintainer = control.Find("Maintainer")
+ localhost = re_localhost.search(maintainer)
+ if localhost:
+ #highlight bad email
+ maintainer = colour_output(maintainer, 'maintainer')
+ else:
+ maintainer = escape_if_needed(maintainer)
return (control, control_keys, section, depends, recommends, arch, maintainer)
dsc_file = daklib.utils.open_file(dsc_filename)
try:
- dsc = daklib.utils.parse_changes(dsc_filename)
+ dsc = daklib.utils.parse_changes(dsc_filename)
except:
- print_formatted_text("can't parse control info")
+ print_formatted_text("can't parse control info")
dsc_file.close()
filecontents = escape_if_needed(strip_pgp_signature(dsc_filename))
if dsc.has_key("build-depends"):
- builddep = split_depends(dsc["build-depends"])
- builddepstr = create_depends_string(builddep)
- filecontents = re_builddep.sub("Build-Depends: "+builddepstr, filecontents)
+ builddep = split_depends(dsc["build-depends"])
+ builddepstr = create_depends_string(builddep)
+ filecontents = re_builddep.sub("Build-Depends: "+builddepstr, filecontents)
if dsc.has_key("build-depends-indep"):
- builddepindstr = create_depends_string(split_depends(dsc["build-depends-indep"]))
- filecontents = re_builddepind.sub("Build-Depends-Indep: "+builddepindstr, filecontents)
+ builddepindstr = create_depends_string(split_depends(dsc["build-depends-indep"]))
+ filecontents = re_builddepind.sub("Build-Depends-Indep: "+builddepindstr, filecontents)
if dsc.has_key("architecture") :
- if (dsc["architecture"] != "any"):
- newarch = colour_output(dsc["architecture"], 'arch')
- filecontents = re_arch.sub("Architecture: " + newarch, filecontents)
+ if (dsc["architecture"] != "any"):
+ newarch = colour_output(dsc["architecture"], 'arch')
+ filecontents = re_arch.sub("Architecture: " + newarch, filecontents)
return filecontents
result = ""
comma_count = 1
for l in depends_tree:
- if (comma_count >= 2):
- result += ", "
- or_count = 1
- for d in l:
- if (or_count >= 2 ):
- result += " | "
- # doesn't do version lookup yet.
-
- q = projectB.query("SELECT DISTINCT(b.package), b.version, c.name, su.suite_name FROM binaries b, files fi, location l, component c, bin_associations ba, suite su WHERE b.package='%s' AND b.file = fi.id AND fi.location = l.id AND l.component = c.id AND ba.bin=b.id AND ba.suite = su.id AND su.suite_name='%s' ORDER BY b.version desc" % (d['name'], suite))
- ql = q.getresult()
- if ql:
- i = ql[0]
-
- adepends = d['name']
- if d['version'] != '' :
- adepends += " (%s)" % (d['version'])
-
- if i[2] == "contrib":
- result += colour_output(adepends, "contrib")
- elif i[2] == "non-free":
- result += colour_output(adepends, "nonfree")
- else :
- result += colour_output(adepends, "main")
- else:
- adepends = d['name']
- if d['version'] != '' :
- adepends += " (%s)" % (d['version'])
- result += colour_output(adepends, "bold")
- or_count += 1
- comma_count += 1
+ if (comma_count >= 2):
+ result += ", "
+ or_count = 1
+ for d in l:
+ if (or_count >= 2 ):
+ result += " | "
+ # doesn't do version lookup yet.
+
+ q = projectB.query("SELECT DISTINCT(b.package), b.version, c.name, su.suite_name FROM binaries b, files fi, location l, component c, bin_associations ba, suite su WHERE b.package='%s' AND b.file = fi.id AND fi.location = l.id AND l.component = c.id AND ba.bin=b.id AND ba.suite = su.id AND su.suite_name='%s' ORDER BY b.version desc" % (d['name'], suite))
+ ql = q.getresult()
+ if ql:
+ i = ql[0]
+
+ adepends = d['name']
+ if d['version'] != '' :
+ adepends += " (%s)" % (d['version'])
+
+ if i[2] == "contrib":
+ result += colour_output(adepends, "contrib")
+ elif i[2] == "non-free":
+ result += colour_output(adepends, "nonfree")
+ else :
+ result += colour_output(adepends, "main")
+ else:
+ adepends = d['name']
+ if d['version'] != '' :
+ adepends += " (%s)" % (d['version'])
+ result += colour_output(adepends, "bold")
+ or_count += 1
+ comma_count += 1
return result
def output_deb_info(filename):
to_print = ""
if control == '':
- print_formatted_text("no control info")
+ print_formatted_text("no control info")
else:
- for key in control_keys :
- output = " " + key + ": "
- if key == 'Depends':
- output += create_depends_string(depends)
- elif key == 'Recommends':
- output += create_depends_string(recommends)
- elif key == 'Section':
- output += section
- elif key == 'Architecture':
- output += arch
- elif key == 'Maintainer':
- output += maintainer
- elif key == 'Description':
- desc = control.Find(key)
- desc = re_newlinespace.sub('\n ', desc)
- output += escape_if_needed(desc)
- else:
- output += escape_if_needed(control.Find(key))
+ for key in control_keys :
+ output = " " + key + ": "
+ if key == 'Depends':
+ output += create_depends_string(depends)
+ elif key == 'Recommends':
+ output += create_depends_string(recommends)
+ elif key == 'Section':
+ output += section
+ elif key == 'Architecture':
+ output += arch
+ elif key == 'Maintainer':
+ output += maintainer
+ elif key == 'Description':
+ desc = control.Find(key)
+ desc = re_newlinespace.sub('\n ', desc)
+ output += escape_if_needed(desc)
+ else:
+ output += escape_if_needed(control.Find(key))
to_print += output + '\n'
print_escaped_text(to_print)
if printed_copyrights.has_key(copyrightmd5) and printed_copyrights[copyrightmd5] != "%s (%s)" % (package, deb_filename):
print_formatted_text( "NOTE: Copyright is the same as %s.\n" % \
- (printed_copyrights[copyrightmd5]))
+ (printed_copyrights[copyrightmd5]))
else:
- printed_copyrights[copyrightmd5] = "%s (%s)" % (package, deb_filename)
+ printed_copyrights[copyrightmd5] = "%s (%s)" % (package, deb_filename)
print_formatted_text(copyright)
filename = os.path.basename(deb_filename)
if filename.endswith(".udeb"):
- is_a_udeb = 1
+ is_a_udeb = 1
else:
- is_a_udeb = 0
+ is_a_udeb = 0
headline("control file for %s" % (filename))
#do_command ("dpkg -I", deb_filename)
output_deb_info(deb_filename)
if is_a_udeb:
- headline("skipping lintian check for udeb")
- print
+ headline("skipping lintian check for udeb")
+ print
else:
- headline("lintian check for %s" % (filename))
+ headline("lintian check for %s" % (filename))
do_lintian(deb_filename)
headline("contents of %s" % (filename))
do_command ("dpkg -c", deb_filename)
if is_a_udeb:
- headline("skipping copyright for udeb")
+ headline("skipping copyright for udeb")
else:
- headline("copyright of %s" % (filename))
+ headline("copyright of %s" % (filename))
print_copyright(deb_filename)
headline("file listing of %s" % (filename))
if line.startswith("-----END PGP SIGNATURE"):
inside_signature = 0
continue
- contents += line
+ contents += line
file.close()
return contents
changes = daklib.utils.parse_changes (changes_filename)
files = daklib.utils.build_file_list(changes)
for file in files.keys():
- if file.endswith(".deb") or file.endswith(".udeb"):
- check_deb(file)
+ if file.endswith(".deb") or file.endswith(".udeb"):
+ check_deb(file)
if file.endswith(".dsc"):
check_dsc(file)
# else: => byhand
('H',"html-output","Examine-Package::Options::Html-Output"),
]
for i in [ "Help", "Html-Output", "partial-html" ]:
- if not Cnf.has_key("Examine-Package::Options::%s" % (i)):
- Cnf["Examine-Package::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Examine-Package::Options::%s" % (i)):
+ Cnf["Examine-Package::Options::%s" % (i)] = ""
args = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Examine-Package::Options")
if Options["Help"]:
- usage()
+ usage()
stdout_fd = sys.stdout
for file in args:
try:
- if not Options["Html-Output"]:
- # Pipe output for each argument through less
- less_fd = os.popen("less -R -", 'w', 0)
- # -R added to display raw control chars for colour
- sys.stdout = less_fd
+ if not Options["Html-Output"]:
+ # Pipe output for each argument through less
+ less_fd = os.popen("less -R -", 'w', 0)
+ # -R added to display raw control chars for colour
+ sys.stdout = less_fd
try:
if file.endswith(".changes"):
check_changes(file)
else:
daklib.utils.fubar("Unrecognised file type: '%s'." % (file))
finally:
- if not Options["Html-Output"]:
- # Reset stdout here so future less invocations aren't FUBAR
- less_fd.close()
- sys.stdout = stdout_fd
+ if not Options["Html-Output"]:
+ # Reset stdout here so future less invocations aren't FUBAR
+ less_fd.close()
+ sys.stdout = stdout_fd
except IOError, e:
if errno.errorcode[e.errno] == 'EPIPE':
daklib.utils.warn("[examine-package] Caught EPIPE; skipping.")
if __name__ == '__main__':
main()
-
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Find-Null-Maintainers::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Find-Null-Maintainers::Options::%s" % (i)):
- Cnf["Find-Null-Maintainers::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Find-Null-Maintainers::Options::%s" % (i)):
+ Cnf["Find-Null-Maintainers::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Find-Null-Maintainers::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
self.filesizesha1 = None
if readpath:
- try:
- f = open(readpath + "/Index")
- x = f.readline()
+ try:
+ f = open(readpath + "/Index")
+ x = f.readline()
- def read_hashs(ind, f, self, x=x):
- while 1:
- x = f.readline()
- if not x or x[0] != " ": break
+ def read_hashs(ind, f, self, x=x):
+ while 1:
+ x = f.readline()
+ if not x or x[0] != " ": break
+ l = x.split()
+ if not self.history.has_key(l[2]):
+ self.history[l[2]] = [None,None]
+ self.history_order.append(l[2])
+ self.history[l[2]][ind] = (l[0], int(l[1]))
+ return x
+
+ while x:
l = x.split()
- if not self.history.has_key(l[2]):
- self.history[l[2]] = [None,None]
- self.history_order.append(l[2])
- self.history[l[2]][ind] = (l[0], int(l[1]))
- return x
- while x:
- l = x.split()
-
- if len(l) == 0:
- x = f.readline()
- continue
+ if len(l) == 0:
+ x = f.readline()
+ continue
- if l[0] == "SHA1-History:":
- x = read_hashs(0,f,self)
- continue
+ if l[0] == "SHA1-History:":
+ x = read_hashs(0,f,self)
+ continue
- if l[0] == "SHA1-Patches:":
- x = read_hashs(1,f,self)
- continue
+ if l[0] == "SHA1-Patches:":
+ x = read_hashs(1,f,self)
+ continue
- if l[0] == "Canonical-Name:" or l[0]=="Canonical-Path:":
- self.can_path = l[1]
+ if l[0] == "Canonical-Name:" or l[0]=="Canonical-Path:":
+ self.can_path = l[1]
- if l[0] == "SHA1-Current:" and len(l) == 3:
- self.filesizesha1 = (l[1], int(l[2]))
+ if l[0] == "SHA1-Current:" and len(l) == 3:
+ self.filesizesha1 = (l[1], int(l[2]))
- x = f.readline()
+ x = f.readline()
- except IOError:
- 0
+ except IOError:
+ 0
def dump(self, out=sys.stdout):
if self.can_path:
out.write("Canonical-Path: %s\n" % (self.can_path))
-
+
if self.filesizesha1:
out.write("SHA1-Current: %s %7d\n" % (self.filesizesha1))
tryunlink("%s/%s.gz" % (self.readpath, h))
del hs[h]
l = l[cnt-self.max:]
- self.history_order = l[:]
+ self.history_order = l[:]
out.write("SHA1-History:\n")
for h in l:
return (sha1sum, size)
def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14):
- if Options.has_key("NoAct"):
+ if Options.has_key("NoAct"):
return
patchname = Options["PatchName"]
print "%s: unchanged" % (origfile)
else:
if not os.path.isdir(outdir): os.mkdir(outdir)
- w = os.popen("diff --ed - %s | gzip -c -9 > %s.gz" %
+ w = os.popen("diff --ed - %s | gzip -c -9 > %s.gz" %
(newfile, difffile), "w")
pipe_file(oldf, w)
oldf.close()
('r', "rootdir", "Generate-Index-Diffs::Options::RootDir", "hasArg"),
('d', "tmpdir", "Generate-Index-Diffs::Options::TempDir", "hasArg"),
('m', "maxdiffs", "Generate-Index-Diffs::Options::MaxDiffs", "hasArg"),
- ('n', "n-act", "Generate-Index-Diffs::Options::NoAct"),
+ ('n', "n-act", "Generate-Index-Diffs::Options::NoAct"),
]
suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Generate-Index-Diffs::Options")
cl = compress.split()
uncompress = ("." not in cl)
for mode in compress.split():
- if mode == ".":
- result.append(file)
- elif mode == "gzip":
- if uncompress:
- result.append("<zcat/.gz>" + file)
- uncompress = 0
- result.append(file + ".gz")
- elif mode == "bzip2":
- if uncompress:
- result.append("<bzcat/.bz2>" + file)
- uncompress = 0
- result.append(file + ".bz2")
+ if mode == ".":
+ result.append(file)
+ elif mode == "gzip":
+ if uncompress:
+ result.append("<zcat/.gz>" + file)
+ uncompress = 0
+ result.append(file + ".gz")
+ elif mode == "bzip2":
+ if uncompress:
+ result.append("<bzcat/.bz2>" + file)
+ uncompress = 0
+ result.append(file + ".bz2")
return result
def create_temp_file (cmd):
r = r[0]
size = 0
while 1:
- x = r.readline()
- if not x:
- r.close()
- del x,r
- break
- f.write(x)
- size += len(x)
+ x = r.readline()
+ if not x:
+ r.close()
+ del x,r
+ break
+ f.write(x)
+ size += len(x)
f.flush()
f.seek(0)
return (size, f)
path = Cnf["Dir::Root"] + tree + "/"
for name in files:
try:
- if name[0] == "<":
- j = name.index("/")
- k = name.index(">")
- (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
- (size, file_handle) = create_temp_file("%s %s%s%s" %
- (cat, path, name, ext))
- else:
- size = os.stat(path + name)[stat.ST_SIZE]
- file_handle = daklib.utils.open_file(path + name)
+ if name[0] == "<":
+ j = name.index("/")
+ k = name.index(">")
+ (cat, ext, name) = (name[1:j], name[j+1:k], name[k+1:])
+ (size, file_handle) = create_temp_file("%s %s%s%s" %
+ (cat, path, name, ext))
+ else:
+ size = os.stat(path + name)[stat.ST_SIZE]
+ file_handle = daklib.utils.open_file(path + name)
except daklib.utils.cant_open_exc:
print "ALERT: Couldn't open " + path + name
else:
- hash = hashop(file_handle)
- file_handle.close()
- out.write(" %s %8d %s\n" % (hash, size, name))
+ hash = hashop(file_handle)
+ file_handle.close()
+ out.write(" %s %8d %s\n" % (hash, size, name))
def print_md5_files (tree, files):
print_md5sha_files (tree, files, apt_pkg.md5sum)
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Generate-Releases::Options::Help"),
- ('a',"apt-conf","Generate-Releases::Options::Apt-Conf", "HasArg"),
- ('f',"force-touch","Generate-Releases::Options::Force-Touch"),
- ]
+ ('a',"apt-conf","Generate-Releases::Options::Apt-Conf", "HasArg"),
+ ('f',"force-touch","Generate-Releases::Options::Force-Touch"),
+ ]
for i in [ "help", "apt-conf", "force-touch" ]:
- if not Cnf.has_key("Generate-Releases::Options::%s" % (i)):
- Cnf["Generate-Releases::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Generate-Releases::Options::%s" % (i)):
+ Cnf["Generate-Releases::Options::%s" % (i)] = ""
suites = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Generate-Releases::Options")
if Options["Help"]:
- usage()
+ usage()
if not Options["Apt-Conf"]:
Options["Apt-Conf"] = daklib.utils.which_apt_conf_file()
for suite in suites:
print "Processing: " + suite
- SuiteBlock = Cnf.SubTree("Suite::" + suite)
+ SuiteBlock = Cnf.SubTree("Suite::" + suite)
- if SuiteBlock.has_key("Untouchable") and not Options["Force-Touch"]:
+ if SuiteBlock.has_key("Untouchable") and not Options["Force-Touch"]:
print "Skipping: " + suite + " (untouchable)"
continue
- suite = suite.lower()
+ suite = suite.lower()
- origin = SuiteBlock["Origin"]
- label = SuiteBlock.get("Label", origin)
- codename = SuiteBlock.get("CodeName", "")
+ origin = SuiteBlock["Origin"]
+ label = SuiteBlock.get("Label", origin)
+ codename = SuiteBlock.get("CodeName", "")
- version = ""
- description = ""
+ version = ""
+ description = ""
- q = projectB.query("SELECT version, description FROM suite WHERE suite_name = '%s'" % (suite))
- qs = q.getresult()
- if len(qs) == 1:
- if qs[0][0] != "-": version = qs[0][0]
- if qs[0][1]: description = qs[0][1]
+ q = projectB.query("SELECT version, description FROM suite WHERE suite_name = '%s'" % (suite))
+ qs = q.getresult()
+ if len(qs) == 1:
+ if qs[0][0] != "-": version = qs[0][0]
+ if qs[0][1]: description = qs[0][1]
- if SuiteBlock.has_key("NotAutomatic"):
- notautomatic = "yes"
- else:
- notautomatic = ""
+ if SuiteBlock.has_key("NotAutomatic"):
+ notautomatic = "yes"
+ else:
+ notautomatic = ""
- if SuiteBlock.has_key("Components"):
- components = SuiteBlock.ValueList("Components")
- else:
- components = []
+ if SuiteBlock.has_key("Components"):
+ components = SuiteBlock.ValueList("Components")
+ else:
+ components = []
suite_suffix = Cnf.Find("Dinstall::SuiteSuffix")
if components and suite_suffix:
else:
longsuite = suite
- tree = SuiteBlock.get("Tree", "dists/%s" % (longsuite))
+ tree = SuiteBlock.get("Tree", "dists/%s" % (longsuite))
- if AptCnf.has_key("tree::%s" % (tree)):
- pass
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- pass
- else:
+ if AptCnf.has_key("tree::%s" % (tree)):
+ pass
+ elif AptCnf.has_key("bindirectory::%s" % (tree)):
+ pass
+ else:
aptcnf_filename = os.path.basename(daklib.utils.which_apt_conf_file())
- print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
- continue
-
- print Cnf["Dir::Root"] + tree + "/Release"
- out = open(Cnf["Dir::Root"] + tree + "/Release", "w")
-
- out.write("Origin: %s\n" % (origin))
- out.write("Label: %s\n" % (label))
- out.write("Suite: %s\n" % (suite))
- if version != "":
- out.write("Version: %s\n" % (version))
- if codename != "":
- out.write("Codename: %s\n" % (codename))
- out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
- if notautomatic != "":
- out.write("NotAutomatic: %s\n" % (notautomatic))
- out.write("Architectures: %s\n" % (" ".join(filter(daklib.utils.real_arch, SuiteBlock.ValueList("Architectures")))))
- if components:
+ print "ALERT: suite %s not in %s, nor untouchable!" % (suite, aptcnf_filename)
+ continue
+
+ print Cnf["Dir::Root"] + tree + "/Release"
+ out = open(Cnf["Dir::Root"] + tree + "/Release", "w")
+
+ out.write("Origin: %s\n" % (origin))
+ out.write("Label: %s\n" % (label))
+ out.write("Suite: %s\n" % (suite))
+ if version != "":
+ out.write("Version: %s\n" % (version))
+ if codename != "":
+ out.write("Codename: %s\n" % (codename))
+ out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
+ if notautomatic != "":
+ out.write("NotAutomatic: %s\n" % (notautomatic))
+ out.write("Architectures: %s\n" % (" ".join(filter(daklib.utils.real_arch, SuiteBlock.ValueList("Architectures")))))
+ if components:
out.write("Components: %s\n" % (" ".join(components)))
- if description:
- out.write("Description: %s\n" % (description))
-
- files = []
-
- if AptCnf.has_key("tree::%s" % (tree)):
- for sec in AptCnf["tree::%s::Sections" % (tree)].split():
- for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
- if arch == "source":
- filepath = "%s/%s/Sources" % (sec, arch)
- for file in compressnames("tree::%s" % (tree), "Sources", filepath):
- files.append(file)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
- else:
- disks = "%s/disks-%s" % (sec, arch)
- diskspath = Cnf["Dir::Root"]+tree+"/"+disks
- if os.path.exists(diskspath):
- for dir in os.listdir(diskspath):
- if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
- files.append("%s/%s/md5sum.txt" % (disks, dir))
-
- filepath = "%s/binary-%s/Packages" % (sec, arch)
- for file in compressnames("tree::%s" % (tree), "Packages", filepath):
- files.append(file)
- add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
-
- if arch == "source":
- rel = "%s/%s/Release" % (sec, arch)
- else:
- rel = "%s/binary-%s/Release" % (sec, arch)
- relpath = Cnf["Dir::Root"]+tree+"/"+rel
+ if description:
+ out.write("Description: %s\n" % (description))
+
+ files = []
+
+ if AptCnf.has_key("tree::%s" % (tree)):
+ for sec in AptCnf["tree::%s::Sections" % (tree)].split():
+ for arch in AptCnf["tree::%s::Architectures" % (tree)].split():
+ if arch == "source":
+ filepath = "%s/%s/Sources" % (sec, arch)
+ for file in compressnames("tree::%s" % (tree), "Sources", filepath):
+ files.append(file)
+ add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
+ else:
+ disks = "%s/disks-%s" % (sec, arch)
+ diskspath = Cnf["Dir::Root"]+tree+"/"+disks
+ if os.path.exists(diskspath):
+ for dir in os.listdir(diskspath):
+ if os.path.exists("%s/%s/md5sum.txt" % (diskspath, dir)):
+ files.append("%s/%s/md5sum.txt" % (disks, dir))
+
+ filepath = "%s/binary-%s/Packages" % (sec, arch)
+ for file in compressnames("tree::%s" % (tree), "Packages", filepath):
+ files.append(file)
+ add_tiffani(files, Cnf["Dir::Root"] + tree, filepath)
+
+ if arch == "source":
+ rel = "%s/%s/Release" % (sec, arch)
+ else:
+ rel = "%s/binary-%s/Release" % (sec, arch)
+ relpath = Cnf["Dir::Root"]+tree+"/"+rel
try:
- if os.access(relpath, os.F_OK):
- if os.stat(relpath).st_nlink > 1:
- os.unlink(relpath)
+ if os.access(relpath, os.F_OK):
+ if os.stat(relpath).st_nlink > 1:
+ os.unlink(relpath)
release = open(relpath, "w")
#release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
except IOError:
release.close()
files.append(rel)
- if AptCnf.has_key("tree::%s/main" % (tree)):
- for dis in ["main", "contrib", "non-free"]:
- if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
- sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (tree)
-
- for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
- if arch != "source": # always true
- for file in compressnames("tree::%s/%s" % (tree,dis),
- "Packages",
- "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
- files.append(file)
- elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
- usetree = AptCnf["tree::%s::FakeDI" % (tree)]
- sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
- if sec != "debian-installer":
- print "ALERT: weird non debian-installer section in %s" % (usetree)
-
- for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
- if arch != "source": # always true
- for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
- files.append(file)
-
- elif AptCnf.has_key("bindirectory::%s" % (tree)):
- for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
- files.append(file.replace(tree+"/","",1))
- for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
- files.append(file.replace(tree+"/","",1))
- else:
- print "ALERT: no tree/bindirectory for %s" % (tree)
-
- out.write("MD5Sum:\n")
- print_md5_files(tree, files)
- out.write("SHA1:\n")
- print_sha1_files(tree, files)
- out.write("SHA256:\n")
- print_sha256_files(tree, files)
-
- out.close()
- if Cnf.has_key("Dinstall::SigningKeyring"):
- keyring = "--secret-keyring \"%s\"" % Cnf["Dinstall::SigningKeyring"]
- if Cnf.has_key("Dinstall::SigningPubKeyring"):
- keyring += " --keyring \"%s\"" % Cnf["Dinstall::SigningPubKeyring"]
-
- arguments = "--no-options --batch --no-tty --armour"
- if Cnf.has_key("Dinstall::SigningKeyIds"):
- signkeyids = Cnf["Dinstall::SigningKeyIds"].split()
- else:
- signkeyids = [""]
-
- dest = Cnf["Dir::Root"] + tree + "/Release.gpg"
- if os.path.exists(dest):
- os.unlink(dest)
-
- for keyid in signkeyids:
- if keyid != "": defkeyid = "--default-key %s" % keyid
- else: defkeyid = ""
- os.system("gpg %s %s %s --detach-sign <%s >>%s" %
- (keyring, defkeyid, arguments,
- Cnf["Dir::Root"] + tree + "/Release", dest))
+ if AptCnf.has_key("tree::%s/main" % (tree)):
+ for dis in ["main", "contrib", "non-free"]:
+ if not AptCnf.has_key("tree::%s/%s" % (tree, dis)): continue
+ sec = AptCnf["tree::%s/%s::Sections" % (tree,dis)].split()[0]
+ if sec != "debian-installer":
+ print "ALERT: weird non debian-installer section in %s" % (tree)
+
+ for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
+ if arch != "source": # always true
+ for file in compressnames("tree::%s/%s" % (tree,dis),
+ "Packages",
+ "%s/%s/binary-%s/Packages" % (dis, sec, arch)):
+ files.append(file)
+ elif AptCnf.has_key("tree::%s::FakeDI" % (tree)):
+ usetree = AptCnf["tree::%s::FakeDI" % (tree)]
+ sec = AptCnf["tree::%s/main::Sections" % (usetree)].split()[0]
+ if sec != "debian-installer":
+ print "ALERT: weird non debian-installer section in %s" % (usetree)
+
+ for arch in AptCnf["tree::%s/main::Architectures" % (usetree)].split():
+ if arch != "source": # always true
+ for file in compressnames("tree::%s/main" % (usetree), "Packages", "main/%s/binary-%s/Packages" % (sec, arch)):
+ files.append(file)
+
+ elif AptCnf.has_key("bindirectory::%s" % (tree)):
+ for file in compressnames("bindirectory::%s" % (tree), "Packages", AptCnf["bindirectory::%s::Packages" % (tree)]):
+ files.append(file.replace(tree+"/","",1))
+ for file in compressnames("bindirectory::%s" % (tree), "Sources", AptCnf["bindirectory::%s::Sources" % (tree)]):
+ files.append(file.replace(tree+"/","",1))
+ else:
+ print "ALERT: no tree/bindirectory for %s" % (tree)
+
+ out.write("MD5Sum:\n")
+ print_md5_files(tree, files)
+ out.write("SHA1:\n")
+ print_sha1_files(tree, files)
+ out.write("SHA256:\n")
+ print_sha256_files(tree, files)
+
+ out.close()
+ if Cnf.has_key("Dinstall::SigningKeyring"):
+ keyring = "--secret-keyring \"%s\"" % Cnf["Dinstall::SigningKeyring"]
+ if Cnf.has_key("Dinstall::SigningPubKeyring"):
+ keyring += " --keyring \"%s\"" % Cnf["Dinstall::SigningPubKeyring"]
+
+ arguments = "--no-options --batch --no-tty --armour"
+ if Cnf.has_key("Dinstall::SigningKeyIds"):
+ signkeyids = Cnf["Dinstall::SigningKeyIds"].split()
+ else:
+ signkeyids = [""]
+
+ dest = Cnf["Dir::Root"] + tree + "/Release.gpg"
+ if os.path.exists(dest):
+ os.unlink(dest)
+
+ for keyid in signkeyids:
+ if keyid != "": defkeyid = "--default-key %s" % keyid
+ else: defkeyid = ""
+ os.system("gpg %s %s %s --detach-sign <%s >>%s" %
+ (keyring, defkeyid, arguments,
+ Cnf["Dir::Root"] + tree + "/Release", dest))
#######################################################################################
if __name__ == '__main__':
main()
-
Arguments = [('a', "action", "Import-Archive::Options::Action"),
('h', "help", "Import-Archive::Options::Help")]
for i in [ "action", "help" ]:
- if not Cnf.has_key("Import-Archive::Options::%s" % (i)):
- Cnf["Import-Archive::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Import-Archive::Options::%s" % (i)):
+ Cnf["Import-Archive::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Import-Archive::Options")
if Options["Help"]:
- usage()
+ usage()
if not Options["Action"]:
daklib.utils.warn("""no -a/--action given; not doing anything.
q = projectB.query("SELECT id, uid, name FROM uid")
for (id, uid, name) in q.getresult():
byname[uid] = (id, name)
- byid[id] = (uid, name)
+ byid[id] = (uid, name)
return (byname, byid)
def get_fingerprint_info():
################################################################################
def get_ldap_name(entry):
- name = []
- for k in ["cn", "mn", "sn"]:
- ret = entry.get(k)
- if ret and ret[0] != "" and ret[0] != "-":
- name.append(ret[0])
- return " ".join(name)
+ name = []
+ for k in ["cn", "mn", "sn"]:
+ ret = entry.get(k)
+ if ret and ret[0] != "" and ret[0] != "-":
+ name.append(ret[0])
+ return " ".join(name)
################################################################################
class Keyring:
- gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
- " --with-colons --fingerprint --fingerprint"
- keys = {}
- fpr_lookup = {}
-
- def de_escape_gpg_str(self, str):
- esclist = re.split(r'(\\x..)', str)
- for x in range(1,len(esclist),2):
- esclist[x] = "%c" % (int(esclist[x][2:],16))
- return "".join(esclist)
-
- def __init__(self, keyring):
- k = os.popen(self.gpg_invocation % keyring, "r")
- keys = self.keys
- key = None
- fpr_lookup = self.fpr_lookup
- signingkey = False
- for line in k.xreadlines():
- field = line.split(":")
- if field[0] == "pub":
- key = field[4]
- (name, addr) = email.Utils.parseaddr(field[9])
- name = re.sub(r"\s*[(].*[)]", "", name)
- if name == "" or addr == "" or "@" not in addr:
- name = field[9]
- addr = "invalid-uid"
- name = self.de_escape_gpg_str(name)
- keys[key] = {"email": addr}
- if name != "": keys[key]["name"] = name
- keys[key]["aliases"] = [name]
- keys[key]["fingerprints"] = []
- signingkey = True
- elif key and field[0] == "sub" and len(field) >= 12:
- signingkey = ("s" in field[11])
- elif key and field[0] == "uid":
- (name, addr) = email.Utils.parseaddr(field[9])
- if name and name not in keys[key]["aliases"]:
- keys[key]["aliases"].append(name)
- elif signingkey and field[0] == "fpr":
- keys[key]["fingerprints"].append(field[9])
- fpr_lookup[field[9]] = key
-
- def generate_desired_users(self):
- if Options["Generate-Users"]:
- format = Options["Generate-Users"]
- return self.generate_users_from_keyring(format)
- if Options["Import-Ldap-Users"]:
- return self.import_users_from_ldap()
- return ({}, {})
-
- def import_users_from_ldap(self):
- LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
- LDAPServer = Cnf["Import-LDAP-Fingerprints::LDAPServer"]
- l = ldap.open(LDAPServer)
- l.simple_bind_s("","")
- Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
- "(&(keyfingerprint=*)(gidnumber=%s))" % (Cnf["Import-Users-From-Passwd::ValidGID"]),
- ["uid", "keyfingerprint", "cn", "mn", "sn"])
-
- ldap_fin_uid_id = {}
-
- byuid = {}
- byname = {}
- keys = self.keys
- fpr_lookup = self.fpr_lookup
-
- for i in Attrs:
- entry = i[1]
- uid = entry["uid"][0]
- name = get_ldap_name(entry)
- fingerprints = entry["keyFingerPrint"]
- id = None
- for f in fingerprints:
- key = fpr_lookup.get(f, None)
- if key not in keys: continue
- keys[key]["uid"] = uid
-
- if id != None: continue
- id = daklib.database.get_or_set_uid_id(uid)
- byuid[id] = (uid, name)
- byname[uid] = (id, name)
-
- return (byname, byuid)
-
- def generate_users_from_keyring(self, format):
- byuid = {}
- byname = {}
- keys = self.keys
- any_invalid = False
- for x in keys.keys():
- if keys[x]["email"] == "invalid-uid":
- any_invalid = True
- keys[x]["uid"] = format % "invalid-uid"
- else:
- uid = format % keys[x]["email"]
- id = daklib.database.get_or_set_uid_id(uid)
- byuid[id] = (uid, keys[x]["name"])
- byname[uid] = (id, keys[x]["name"])
- keys[x]["uid"] = uid
- if any_invalid:
- uid = format % "invalid-uid"
- id = daklib.database.get_or_set_uid_id(uid)
- byuid[id] = (uid, "ungeneratable user id")
- byname[uid] = (id, "ungeneratable user id")
- return (byname, byuid)
+ gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
+ " --with-colons --fingerprint --fingerprint"
+ keys = {}
+ fpr_lookup = {}
+
+ def de_escape_gpg_str(self, str):
+ esclist = re.split(r'(\\x..)', str)
+ for x in range(1,len(esclist),2):
+ esclist[x] = "%c" % (int(esclist[x][2:],16))
+ return "".join(esclist)
+
+ def __init__(self, keyring):
+ k = os.popen(self.gpg_invocation % keyring, "r")
+ keys = self.keys
+ key = None
+ fpr_lookup = self.fpr_lookup
+ signingkey = False
+ for line in k.xreadlines():
+ field = line.split(":")
+ if field[0] == "pub":
+ key = field[4]
+ (name, addr) = email.Utils.parseaddr(field[9])
+ name = re.sub(r"\s*[(].*[)]", "", name)
+ if name == "" or addr == "" or "@" not in addr:
+ name = field[9]
+ addr = "invalid-uid"
+ name = self.de_escape_gpg_str(name)
+ keys[key] = {"email": addr}
+ if name != "": keys[key]["name"] = name
+ keys[key]["aliases"] = [name]
+ keys[key]["fingerprints"] = []
+ signingkey = True
+ elif key and field[0] == "sub" and len(field) >= 12:
+ signingkey = ("s" in field[11])
+ elif key and field[0] == "uid":
+ (name, addr) = email.Utils.parseaddr(field[9])
+ if name and name not in keys[key]["aliases"]:
+ keys[key]["aliases"].append(name)
+ elif signingkey and field[0] == "fpr":
+ keys[key]["fingerprints"].append(field[9])
+ fpr_lookup[field[9]] = key
+
+ def generate_desired_users(self):
+ if Options["Generate-Users"]:
+ format = Options["Generate-Users"]
+ return self.generate_users_from_keyring(format)
+ if Options["Import-Ldap-Users"]:
+ return self.import_users_from_ldap()
+ return ({}, {})
+
+ def import_users_from_ldap(self):
+ LDAPDn = Cnf["Import-LDAP-Fingerprints::LDAPDn"]
+ LDAPServer = Cnf["Import-LDAP-Fingerprints::LDAPServer"]
+ l = ldap.open(LDAPServer)
+ l.simple_bind_s("","")
+ Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+ "(&(keyfingerprint=*)(gidnumber=%s))" % (Cnf["Import-Users-From-Passwd::ValidGID"]),
+ ["uid", "keyfingerprint", "cn", "mn", "sn"])
+
+ ldap_fin_uid_id = {}
+
+ byuid = {}
+ byname = {}
+ keys = self.keys
+ fpr_lookup = self.fpr_lookup
+
+ for i in Attrs:
+ entry = i[1]
+ uid = entry["uid"][0]
+ name = get_ldap_name(entry)
+ fingerprints = entry["keyFingerPrint"]
+ id = None
+ for f in fingerprints:
+ key = fpr_lookup.get(f, None)
+ if key not in keys: continue
+ keys[key]["uid"] = uid
+
+ if id != None: continue
+ id = daklib.database.get_or_set_uid_id(uid)
+ byuid[id] = (uid, name)
+ byname[uid] = (id, name)
+
+ return (byname, byuid)
+
+ def generate_users_from_keyring(self, format):
+ byuid = {}
+ byname = {}
+ keys = self.keys
+ any_invalid = False
+ for x in keys.keys():
+ if keys[x]["email"] == "invalid-uid":
+ any_invalid = True
+ keys[x]["uid"] = format % "invalid-uid"
+ else:
+ uid = format % keys[x]["email"]
+ id = daklib.database.get_or_set_uid_id(uid)
+ byuid[id] = (uid, keys[x]["name"])
+ byname[uid] = (id, keys[x]["name"])
+ keys[x]["uid"] = uid
+ if any_invalid:
+ uid = format % "invalid-uid"
+ id = daklib.database.get_or_set_uid_id(uid)
+ byuid[id] = (uid, "ungeneratable user id")
+ byname[uid] = (id, "ungeneratable user id")
+ return (byname, byuid)
################################################################################
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Import-Keyring::Options::Help"),
- ('L',"import-ldap-users","Import-Keyring::Options::Import-Ldap-Users"),
- ('U',"generate-users","Import-Keyring::Options::Generate-Users", "HasArg"),
- ]
+ ('L',"import-ldap-users","Import-Keyring::Options::Import-Ldap-Users"),
+ ('U',"generate-users","Import-Keyring::Options::Generate-Users", "HasArg"),
+ ]
for i in [ "help", "report-changes", "generate-users", "import-ldap-users" ]:
if not Cnf.has_key("Import-Keyring::Options::%s" % (i)):
usage()
if len(keyring_names) != 1:
- usage(1)
+ usage(1)
### Keep track of changes made
keyring = Keyring(keyringname)
keyring_id = daklib.database.get_or_set_keyring_id(
- keyringname.split("/")[-1])
+ keyringname.split("/")[-1])
### Generate new uid entries if they're needed (from LDAP or the keyring)
(desuid_byname, desuid_byid) = keyring.generate_desired_users()
for id in desuid_byid.keys():
uid = (id, desuid_byid[id][0])
name = desuid_byid[id][1]
- oname = db_uid_byid[id][1]
- if name and oname != name:
- changes.append((uid[1], "Full name: %s" % (name)))
+ oname = db_uid_byid[id][1]
+ if name and oname != name:
+ changes.append((uid[1], "Full name: %s" % (name)))
projectB.query("UPDATE uid SET name = '%s' WHERE id = %s" %
- (pg.escape_string(name), id))
+ (pg.escape_string(name), id))
# The fingerprint table (fpr) points to a uid and a keyring.
# If the uid is being decided here (ldap/generate) we set it to it.
for z in keyring.keys.keys():
id = db_uid_byname.get(keyring.keys[z].get("uid", None), [None])[0]
if id == None:
- id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
- for y in keyring.keys[z]["fingerprints"]:
- fpr[y] = (id,keyring_id)
+ id = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
+ for y in keyring.keys[z]["fingerprints"]:
+ fpr[y] = (id,keyring_id)
# For any keys that used to be in this keyring, disassociate them.
# We don't change the uid, leaving that for historical info; if
for f,(u,fid,kr) in db_fin_info.iteritems():
if kr != keyring_id: continue
- if f in fpr: continue
- changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
- projectB.query("UPDATE fingerprint SET keyring = NULL WHERE id = %d" % (fid))
+ if f in fpr: continue
+ changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
+ projectB.query("UPDATE fingerprint SET keyring = NULL WHERE id = %d" % (fid))
# For the keys in this keyring, add/update any fingerprints that've
# changed.
for f in fpr:
newuid = fpr[f][0]
- newuiduid = db_uid_byid.get(newuid, [None])[0]
- (olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
- if olduid == None: olduid = -1
- if oldkid == None: oldkid = -1
- if oldfid == -1:
- changes.append((newuiduid, "Added key: %s" % (f)))
+ newuiduid = db_uid_byid.get(newuid, [None])[0]
+ (olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
+ if olduid == None: olduid = -1
+ if oldkid == None: oldkid = -1
+ if oldfid == -1:
+ changes.append((newuiduid, "Added key: %s" % (f)))
if newuid:
- projectB.query("INSERT INTO fingerprint (fingerprint, uid, keyring) VALUES ('%s', %d, %d)" % (f, newuid, keyring_id))
- else:
- projectB.query("INSERT INTO fingerprint (fingerprint, keyring) VALUES ('%s', %d)" % (f, keyring_id))
- else:
- if newuid and olduid != newuid:
- if olduid != -1:
- changes.append((newuiduid, "Linked key: %s" % f))
- changes.append((newuiduid, " (formerly belonging to %s)" % (db_uid_byid[olduid][0])))
- else:
- changes.append((newuiduid, "Linked key: %s" % f))
- changes.append((newuiduid, " (formerly unowned)"))
- projectB.query("UPDATE fingerprint SET uid = %d WHERE id = %d" % (newuid, oldfid))
-
- if oldkid != keyring_id:
- projectB.query("UPDATE fingerprint SET keyring = %d WHERE id = %d" % (keyring_id, oldfid))
+ projectB.query("INSERT INTO fingerprint (fingerprint, uid, keyring) VALUES ('%s', %d, %d)" % (f, newuid, keyring_id))
+ else:
+ projectB.query("INSERT INTO fingerprint (fingerprint, keyring) VALUES ('%s', %d)" % (f, keyring_id))
+ else:
+ if newuid and olduid != newuid:
+ if olduid != -1:
+ changes.append((newuiduid, "Linked key: %s" % f))
+ changes.append((newuiduid, " (formerly belonging to %s)" % (db_uid_byid[olduid][0])))
+ else:
+ changes.append((newuiduid, "Linked key: %s" % f))
+ changes.append((newuiduid, " (formerly unowned)"))
+ projectB.query("UPDATE fingerprint SET uid = %d WHERE id = %d" % (newuid, oldfid))
+
+ if oldkid != keyring_id:
+ projectB.query("UPDATE fingerprint SET keyring = %d WHERE id = %d" % (keyring_id, oldfid))
# All done!
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Import-LDAP-Fingerprints::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Import-LDAP-Fingerprints::Options::%s" % (i)):
- Cnf["Import-LDAP-Fingerprints::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Import-LDAP-Fingerprints::Options::%s" % (i)):
+ Cnf["Import-LDAP-Fingerprints::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Import-LDAP-Fingerprints::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
daklib.database.init(Cnf, projectB)
uid_id = daklib.database.get_or_set_uid_id(uid)
if not db_uid_name.has_key(uid_id) or db_uid_name[uid_id] != name:
- q = projectB.query("UPDATE uid SET name = '%s' WHERE id = %d" % (escape_string(name), uid_id))
- print "Assigning name of %s as %s" % (uid, name)
+ q = projectB.query("UPDATE uid SET name = '%s' WHERE id = %d" % (escape_string(name), uid_id))
+ print "Assigning name of %s as %s" % (uid, name)
for fingerprint in fingerprints:
ldap_fin_uid_id[fingerprint] = (uid, uid_id)
if not existing_uid:
q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
print "Assigning %s to 0x%s." % (uid, fingerprint)
- elif existing_uid == uid:
- pass
- elif existing_uid[:3] == "dm:":
+ elif existing_uid == uid:
+ pass
+ elif existing_uid[:3] == "dm:":
q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
print "Promoting DM %s to DD %s with keyid 0x%s." % (existing_uid, uid, fingerprint)
else:
primary_key = primary_key.replace(" ","")
if not ldap_fin_uid_id.has_key(primary_key):
daklib.utils.warn("0x%s (from 0x%s): no UID found in LDAP" % (primary_key, fingerprint))
- else:
- (uid, uid_id) = ldap_fin_uid_id[primary_key]
- q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
- print "Assigning %s to 0x%s." % (uid, fingerprint)
+ else:
+ (uid, uid_id) = ldap_fin_uid_id[primary_key]
+ q = projectB.query("UPDATE fingerprint SET uid = %s WHERE id = %s" % (uid_id, fingerprint_id))
+ print "Assigning %s to 0x%s." % (uid, fingerprint)
else:
extra_keyrings = ""
for keyring in Cnf.ValueList("Import-LDAP-Fingerprints::ExtraKeyrings"):
('v', "verbose", "Import-Users-From-Passwd::Options::Verbose"),
('h', "help", "Import-Users-From-Passwd::Options::Help")]
for i in [ "no-action", "quiet", "verbose", "help" ]:
- if not Cnf.has_key("Import-Users-From-Passwd::Options::%s" % (i)):
- Cnf["Import-Users-From-Passwd::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Import-Users-From-Passwd::Options::%s" % (i)):
+ Cnf["Import-Users-From-Passwd::Options::%s" % (i)] = ""
arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Import-Users-From-Passwd::Options")
if __name__ == '__main__':
main()
-
projectB.query("COMMIT WORK")
################################################################################
-
+
def main ():
"""Sync dak.conf configuartion file and the SQL database"""
def process_file(config, config_name):
"""Create directories for a config entry that's a filename."""
-
+
if config.has_key(config_name):
target = os.path.dirname(config[config_name])
do_dir(target, config_name)
def process_tree(config, tree):
"""Create directories for a config tree."""
-
+
for entry in config.SubTree(tree).List():
entry = entry.lower()
if tree == "Dir":
def process_morguesubdir(subdir):
"""Create directories for morgue sub directories."""
-
+
config_name = "%s::MorgueSubDir" % (subdir)
if Cnf.has_key(config_name):
target = os.path.join(Cnf["Dir::Morgue"], Cnf[config_name])
if __name__ == '__main__':
main()
-
for i in [ "architecture", "binarytype", "component", "format",
"greaterorequal", "greaterthan", "regex", "suite",
"source-and-binary", "help" ]:
- if not Cnf.has_key("Ls::Options::%s" % (i)):
- Cnf["Ls::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Ls::Options::%s" % (i)):
+ Cnf["Ls::Options::%s" % (i)] = ""
packages = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Ls::Options")
if __name__ == '__main__':
main()
-
Arguments = [('h',"help","Make-Maintainers::Options::Help")]
if not Cnf.has_key("Make-Maintainers::Options::Help"):
- Cnf["Make-Maintainers::Options::Help"] = ""
+ Cnf["Make-Maintainers::Options::Help"] = ""
extra_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Make-Maintainers::Options")
if __name__ == '__main__':
main()
-
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Make-Overrides::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Make-Overrides::Options::%s" % (i)):
- Cnf["Make-Overrides::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Make-Overrides::Options::%s" % (i)):
+ Cnf["Make-Overrides::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Make-Overrides::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
daklib.database.init(Cnf, projectB)
delete_version = version[0]
delete_id = packages[delete_unique_id]["id"]
delete_arch = packages[delete_unique_id]["arch"]
- if not Cnf.Find("Suite::%s::Untouchable" % (suite)) or Options["Force"]:
+ if not Cnf.Find("Suite::%s::Untouchable" % (suite)) or Options["Force"]:
if Options["No-Delete"]:
print "Would delete %s_%s_%s in %s in favour of %s_%s" % (pkg, delete_arch, delete_version, suite, dominant_version, dominant_arch)
else:
daklib.utils.warn("Adding %s as %s maps Arch: all from it." % (archall_suite, suite))
suites.append(archall_suite)
Options["Suite"] = ",".join(suites)
-
+
(con_suites, con_architectures, con_components, check_source) = \
daklib.utils.parse_args(Options)
('c', "component", "Make-Suite-File-List::Options::Component", "HasArg"),
('h', "help", "Make-Suite-File-List::Options::Help"),
('n', "no-delete", "Make-Suite-File-List::Options::No-Delete"),
- ('f', "force", "Make-Suite-File-List::Options::Force"),
+ ('f', "force", "Make-Suite-File-List::Options::Force"),
('s', "suite", "Make-Suite-File-List::Options::Suite", "HasArg")]
for i in ["architecture", "component", "help", "no-delete", "suite", "force-touch" ]:
- if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
- Cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Make-Suite-File-List::Options::%s" % (i)):
+ Cnf["Make-Suite-File-List::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Make-Suite-File-List::Options")
if Options["Help"]:
if path.find("/installer-") != -1:
return 0
return 1
-
+
##############################################################################
# The applicable function is basically a predicate. Given a path and a
# target object its job is to decide if the path conforms for the
cPickle.dump( self.root, f, 1 )
f.close()
-
+
##############################################################################
# Helper functions for the tree syncing...
##################
#print "-L-", _pth(path,k)
do_unlink(targ, _pth(path,k))
del targdir.links[k]
-
+
# Remove any files in targdir which aren't in srcdir
# Or which aren't applicable
rm = []
for k in srcdir.links.keys():
if applicable( _pth(path,k), targ ):
if not targdir.links.has_key(k):
- targdir.links[k] = srcdir.links[k];
+ targdir.links[k] = srcdir.links[k];
#print "+L+",_pth(path,k), "->", srcdir.links[k]
do_symlink( targ, _pth(path,k), targdir.links[k] )
else:
MASTER_PATH = Cnf["Mirror-Split::FTPPath"]
TREE_ROOT = Cnf["Mirror-Split::TreeRootPath"]
TREE_DB_ROOT = Cnf["Mirror-Split::TreeDatabasePath"]
-
+
for a in Cnf.ValueList("Mirror-Split::BasicTrees"):
trees.append( MirrorSplitTarget( a, "%s,all" % a, 1 ) )
print " [source]"
else:
print ""
-
+
def do_help ():
print """Usage: dak mirror-split [OPTIONS]
Generate hardlink trees of certain architectures
if Options.has_key("List"):
do_list()
return
-
+
src = MirrorSplitDB()
print "Scanning", MASTER_PATH
print "Saving updated DB...",
tree.save_db()
print "Done"
-
+
##############################################################################
if __name__ == '__main__':
continue
if c not in changes: changes.append(c)
- srcver = "%s %s" % (Upload.pkg.changes["source"],
+ srcver = "%s %s" % (Upload.pkg.changes["source"],
Upload.pkg.changes["version"])
srcverarches.setdefault(srcver, {})
for arch in Upload.pkg.changes["architecture"].keys():
while a not in v:
a = daklib.utils.our_raw_input(p) + default
a = a[:1].upper()
-
+
return v[a]
def add_changes(extras):
actually_upload(changes)
else:
child = os.fork()
- if child == 0:
- actually_upload(changes)
- os._exit(0)
- print "Uploading in the background"
+ if child == 0:
+ actually_upload(changes)
+ os._exit(0)
+ print "Uploading in the background"
def actually_upload(changes_files):
file_list = ""
if Options["Sudo"]:
if advisory == None:
daklib.utils.fubar("Must set advisory name")
- os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
+ os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
"/usr/local/bin/dak", "new-security-install", "-"+arg, "--", advisory)
else:
fn()
if os.getcwd() == Cnf["Dir::Queue::Embargoed"].rstrip("/"):
opts.append("Disembargo")
opts += ["Show advisory", "Reject", "Quit"]
-
+
advisory_info()
what = prompt(opts, default)
('s',"suite","Override::Options::Suite", "HasArg"),
]
for i in ["help", "no-action"]:
- if not Cnf.has_key("Override::Options::%s" % (i)):
- Cnf["Override::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Override::Options::%s" % (i)):
+ Cnf["Override::Options::%s" % (i)] = ""
if not Cnf.has_key("Override::Options::Suite"):
- Cnf["Override::Options::Suite"] = "unstable"
+ Cnf["Override::Options::Suite"] = "unstable"
arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Override::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
daklib.database.init(Cnf, projectB)
Cnf = daklib.utils.get_conf()
for i in ["help", "limit", "no-action", "verbose" ]:
- if not Cnf.has_key("Poolize::Options::%s" % (i)):
- Cnf["Poolize::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Poolize::Options::%s" % (i)):
+ Cnf["Poolize::Options::%s" % (i)] = ""
Arguments = [('h',"help","Poolize::Options::Help"),
if __name__ == '__main__':
main()
-
import apt_pkg
import daklib.database
import daklib.logging
-import daklib.queue
+import daklib.queue
import daklib.utils
###############################################################################
# propogate in the case it is in the override tables:
if changes.has_key("propdistribution"):
for suite in changes["propdistribution"].keys():
- if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
- propogate[suite] = 1
- else:
- nopropogate[suite] = 1
+ if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
+ propogate[suite] = 1
+ else:
+ nopropogate[suite] = 1
for suite in propogate.keys():
- if suite in nopropogate:
- continue
- changes["distribution"][suite] = 1
+ if suite in nopropogate:
+ continue
+ changes["distribution"][suite] = 1
for file in files.keys():
# Check the package is still in the override tables
('s',"no-mail", "Dinstall::Options::No-Mail")]
for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "version"]:
- if not Cnf.has_key("Dinstall::Options::%s" % (i)):
- Cnf["Dinstall::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Dinstall::Options::%s" % (i)):
+ Cnf["Dinstall::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Dinstall::Options")
if Options["Automatic"]:
answer = 'R'
else:
- print "INSTALL to " + ", ".join(changes["distribution"].keys())
- print reject_message + summary,
+ print "INSTALL to " + ", ".join(changes["distribution"].keys())
+ print reject_message + summary,
prompt = "[I]nstall, Skip, Quit ?"
if Options["Automatic"]:
answer = 'I'
source = files[file]["source package"]
source_version = files[file]["source version"]
filename = files[file]["pool name"] + file
- if not files[file].has_key("location id") or not files[file]["location id"]:
- files[file]["location id"] = daklib.database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],daklib.utils.where_am_i())
+ if not files[file].has_key("location id") or not files[file]["location id"]:
+ files[file]["location id"] = daklib.database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],daklib.utils.where_am_i())
if not files[file].has_key("files id") or not files[file]["files id"]:
files[file]["files id"] = daklib.database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
source_id = daklib.database.get_source_id (source, source_version)
import examine_package
import daklib.database
import daklib.logging
-import daklib.queue
+import daklib.queue
import daklib.utils
# Globals
for suite in changes["suite"].keys():
override = Cnf.Find("Suite::%s::OverrideSuite" % (suite))
if override:
- (olderr, newerr) = (daklib.database.get_suite_id(suite) == -1,
- daklib.database.get_suite_id(override) == -1)
- if olderr or newerr:
- (oinv, newinv) = ("", "")
- if olderr: oinv = "invalid "
- if newerr: ninv = "invalid "
- print "warning: overriding %ssuite %s to %ssuite %s" % (
- oinv, suite, ninv, override)
+ (olderr, newerr) = (daklib.database.get_suite_id(suite) == -1,
+ daklib.database.get_suite_id(override) == -1)
+ if olderr or newerr:
+ (oinv, newinv) = ("", "")
+ if olderr: oinv = "invalid "
+ if newerr: ninv = "invalid "
+ print "warning: overriding %ssuite %s to %ssuite %s" % (
+ oinv, suite, ninv, override)
del changes["suite"][suite]
changes["suite"][override] = 1
# Validate suites
retry = 0
while retry < 10:
try:
- lock_fd = os.open(Cnf["Process-New::AcceptedLockFile"], os.O_RDONLY | os.O_CREAT | os.O_EXCL)
+ lock_fd = os.open(Cnf["Process-New::AcceptedLockFile"], os.O_RDONLY | os.O_CREAT | os.O_EXCL)
retry = 10
- except OSError, e:
- if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EEXIST':
- retry += 1
- if (retry >= 10):
- daklib.utils.fubar("Couldn't obtain lock; assuming 'dak process-unchecked' is already running.")
- else:
- print("Unable to get accepted lock (try %d of 10)" % retry)
- time.sleep(60)
- else:
- raise
+ except OSError, e:
+ if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EEXIST':
+ retry += 1
+ if (retry >= 10):
+ daklib.utils.fubar("Couldn't obtain lock; assuming 'dak process-unchecked' is already running.")
+ else:
+ print("Unable to get accepted lock (try %d of 10)" % retry)
+ time.sleep(60)
+ else:
+ raise
def move_to_dir (dest, perms=0660, changesperms=0664):
daklib.utils.move (Upload.pkg.changes_file, dest, perms=changesperms)
if len(lines) == 0 or lines[0] != line + "\n": continue
changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
and x.endswith(".changes") ]
- changes_files = sort_changes(changes_files)
+ changes_files = sort_changes(changes_files)
for f in changes_files:
- f = daklib.utils.validate_changes_file_arg(f, 0)
- if not f: continue
- print "\n" + f
- fn(f, "".join(lines[1:]))
+ f = daklib.utils.validate_changes_file_arg(f, 0)
+ if not f: continue
+ print "\n" + f
+ fn(f, "".join(lines[1:]))
if opref != npref and not Options["No-Action"]:
- newcomm = npref + comm[len(opref):]
- os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
+ newcomm = npref + comm[len(opref):]
+ os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
################################################################################
import apt_inst, apt_pkg
import daklib.database
import daklib.logging
-import daklib.queue
+import daklib.queue
import daklib.utils
from types import *
# Strip a source version in brackets from the source field
if re_strip_srcver.search(changes["source"]):
- changes["source"] = re_strip_srcver.sub('', changes["source"])
+ changes["source"] = re_strip_srcver.sub('', changes["source"])
# Ensure the source field is a valid package name.
if not re_valid_pkg_name.match(changes["source"]):
except daklib.utils.ParseMaintError, msg:
(changes["changedby822"], changes["changedby2047"],
changes["changedbyname"], changes["changedbyemail"]) = \
- ("", "", "", "")
+ ("", "", "", "")
reject("%s: Changed-By field ('%s') failed to parse: %s" \
% (filename, changes["changed-by"], msg))
for file in file_keys:
# Ensure the file does not already exist in one of the accepted directories
for dir in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
- if not Cnf.has_key("Dir::Queue::%s" % (dir)): continue
+ if not Cnf.has_key("Dir::Queue::%s" % (dir)): continue
if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+file):
reject("%s file already exists in the %s directory." % (file, dir))
if not daklib.utils.re_taint_free.match(file):
elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
files[file]["new"] = 1
else:
- dsc_file_exists = 0
+ dsc_file_exists = 0
for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
- if Cnf.has_key("Dir::Queue::%s" % (myq)):
- if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
- dsc_file_exists = 1
- break
- if not dsc_file_exists:
+ if Cnf.has_key("Dir::Queue::%s" % (myq)):
+ if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
+ dsc_file_exists = 1
+ break
+ if not dsc_file_exists:
reject("no source found for %s %s (%s)." % (source_package, source_version, file))
# Check the version and for file overwrites
reject(Upload.check_binary_against_db(file),"")
m = daklib.utils.re_issource.match(f)
if not m:
reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
- continue
+ continue
type = m.group(3)
if type == "orig.tar.gz" or type == "tar.gz":
has_tar = 1
def check_source():
# Bail out if:
- # a) there's no source
+ # a) there's no source
# or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
# or c) the orig.tar.gz is MIA
if not changes["architecture"].has_key("source") or reprocess == 2 \
for x in changes:
if x.startswith("checksum-"):
- h = x.split("-",1)[1]
- if h not in dict(hashes):
- reject("Unsupported checksum field in .changes" % (h))
+ h = x.split("-",1)[1]
+ if h not in dict(hashes):
+ reject("Unsupported checksum field in .changes" % (h))
for x in dsc:
if x.startswith("checksum-"):
- h = x.split("-",1)[1]
- if h not in dict(hashes):
- reject("Unsupported checksum field in .dsc" % (h))
+ h = x.split("-",1)[1]
+ if h not in dict(hashes):
+ reject("Unsupported checksum field in .dsc" % (h))
for h,f in hashes:
try:
fs = daklib.utils.build_file_list(changes, 0, "checksums-%s" % h, h)
check_hash(".changes %s" % (h), fs, h, f, files)
- except daklib.utils.no_files_exc:
- reject("No Checksums-%s: field in .changes file" % (h))
+ except daklib.utils.no_files_exc:
+ reject("No Checksums-%s: field in .changes file" % (h))
if "source" not in changes["architecture"]: continue
try:
fs = daklib.utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
- except daklib.utils.no_files_exc:
- reject("No Checksums-%s: field in .changes file" % (h))
+ except daklib.utils.no_files_exc:
+ reject("No Checksums-%s: field in .changes file" % (h))
################################################################################
if uid == None:
uid, uid_email = changes["fingerprint"], uid
may_nmu, may_sponsor = 1, 1
- # XXX by default new dds don't have a fingerprint/uid in the db atm,
- # and can't get one in there if we don't allow nmu/sponsorship
+ # XXX by default new dds don't have a fingerprint/uid in the db atm,
+ # and can't get one in there if we don't allow nmu/sponsorship
elif uid[:3] == "dm:":
uid_email = uid[3:]
may_nmu, may_sponsor = 0, 0
changes["changedbyemail"] not in sponsor_addresses):
changes["sponsoremail"] = uid_email
- if sponsored and not may_sponsor:
+ if sponsored and not may_sponsor:
reject("%s is not authorised to sponsor uploads" % (uid))
if not sponsored and not may_nmu:
source_ids = []
- check_suites = changes["distribution"].keys()
- if "unstable" not in check_suites: check_suites.append("unstable")
+ check_suites = changes["distribution"].keys()
+ if "unstable" not in check_suites: check_suites.append("unstable")
for suite in check_suites:
suite_id = daklib.database.get_suite_id(suite)
q = Upload.projectB.query("SELECT s.id FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND sa.suite = %d" % (changes["source"], suite_id))
for b in changes["binary"].keys():
for suite in changes["distribution"].keys():
suite_id = daklib.database.get_suite_id(suite)
- q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id))
- for s in q.getresult():
+ q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id))
+ for s in q.getresult():
if s[0] != changes["source"]:
reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
for file in files.keys():
- if files[file].has_key("byhand"):
+ if files[file].has_key("byhand"):
reject("%s may not upload BYHAND file %s" % (uid, file))
if files[file].has_key("new"):
reject("%s may not upload NEW file %s" % (uid, file))
# q-unapproved hax0ring
queue_info = {
"New": { "is": is_new, "process": acknowledge_new },
- "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
+ "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
"Byhand" : { "is": is_byhand, "process": do_byhand },
- "OldStableUpdate" : { "is": is_oldstableupdate,
- "process": do_oldstableupdate },
+ "OldStableUpdate" : { "is": is_oldstableupdate,
+ "process": do_oldstableupdate },
"StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
"Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
"Embargo" : { "is": is_embargo, "process": queue_embargo },
break
if queue:
print "%s for %s\n%s%s" % (
- queue.upper(), ", ".join(changes["distribution"].keys()),
+ queue.upper(), ", ".join(changes["distribution"].keys()),
reject_message, summary),
queuekey = queue[0].upper()
if queuekey in "RQSA":
def is_unembargo ():
q = Upload.projectB.query(
- "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" %
+ "SELECT package FROM disembargo WHERE package = '%s' AND version = '%s'" %
(changes["source"], changes["version"]))
ql = q.getresult()
if ql:
if Options["No-Action"]: return 1
Upload.projectB.query(
- "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" %
+ "INSERT INTO disembargo (package, version) VALUES ('%s', '%s')" %
(changes["source"], changes["version"]))
return 1
def is_stableupdate ():
if not changes["distribution"].has_key("proposed-updates"):
- return 0
+ return 0
if not changes["architecture"].has_key("source"):
pusuite = daklib.database.get_suite_id("proposed-updates")
q = Upload.projectB.query(
- "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
+ "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
(changes["source"], changes["version"], pusuite))
ql = q.getresult()
if ql:
def is_oldstableupdate ():
if not changes["distribution"].has_key("oldstable-proposed-updates"):
- return 0
+ return 0
if not changes["architecture"].has_key("source"):
pusuite = daklib.database.get_suite_id("oldstable-proposed-updates")
q = Upload.projectB.query(
- "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
+ "SELECT S.source FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.version = '%s' AND sa.suite = %d" %
(changes["source"], changes["version"], pusuite))
ql = q.getresult()
if ql:
any_auto = 0
for file in files.keys():
if files[file].has_key("byhand"):
- any_auto = 1
-
- # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
- # don't contain underscores, and ARCH doesn't contain dots.
- # further VER matches the .changes Version:, and ARCH should be in
- # the .changes Architecture: list.
- if file.count("_") < 2:
- all_auto = 0
- continue
-
- (pkg, ver, archext) = file.split("_", 2)
- if archext.count(".") < 1 or changes["version"] != ver:
- all_auto = 0
- continue
-
- ABH = Cnf.SubTree("AutomaticByHandPackages")
- if not ABH.has_key(pkg) or \
- ABH["%s::Source" % (pkg)] != changes["source"]:
- print "not match %s %s" % (pkg, changes["source"])
- all_auto = 0
- continue
-
- (arch, ext) = archext.split(".", 1)
- if arch not in changes["architecture"]:
- all_auto = 0
- continue
-
- files[file]["byhand-arch"] = arch
- files[file]["byhand-script"] = ABH["%s::Script" % (pkg)]
+ any_auto = 1
+
+ # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
+ # don't contain underscores, and ARCH doesn't contain dots.
+ # further VER matches the .changes Version:, and ARCH should be in
+ # the .changes Architecture: list.
+ if file.count("_") < 2:
+ all_auto = 0
+ continue
+
+ (pkg, ver, archext) = file.split("_", 2)
+ if archext.count(".") < 1 or changes["version"] != ver:
+ all_auto = 0
+ continue
+
+ ABH = Cnf.SubTree("AutomaticByHandPackages")
+ if not ABH.has_key(pkg) or \
+ ABH["%s::Source" % (pkg)] != changes["source"]:
+ print "not match %s %s" % (pkg, changes["source"])
+ all_auto = 0
+ continue
+
+ (arch, ext) = archext.split(".", 1)
+ if arch not in changes["architecture"]:
+ all_auto = 0
+ continue
+
+ files[file]["byhand-arch"] = arch
+ files[file]["byhand-script"] = ABH["%s::Script" % (pkg)]
return any_auto and all_auto
os.system("ls -l %s" % byhandfile)
result = os.system("%s %s %s %s %s" % (
- files[file]["byhand-script"], byhandfile,
+ files[file]["byhand-script"], byhandfile,
changes["version"], files[file]["byhand-arch"],
os.path.abspath(pkg.changes_file)))
if result == 0:
raise
except:
print "ERROR"
- traceback.print_exc(file=sys.stderr)
+ traceback.print_exc(file=sys.stderr)
pass
# Restore previous WD
if __name__ == '__main__':
main()
-
import copy, glob, os, stat, sys, time
import apt_pkg
-import daklib.queue
+import daklib.queue
import daklib.utils
Cnf = None
nf=notes, first nl=notes, last
Age Keys: m=minutes, h=hours, d=days, w=weeks, o=months, y=years
-
+
"""
sys.exit(exit_code)
############################################################
def sortfunc(a,b):
- for sorting in direction:
- (sortkey, way, time) = sorting
- ret = 0
- if time == "m":
- x=int(a[sortkey]/60)
- y=int(b[sortkey]/60)
- elif time == "h":
- x=int(a[sortkey]/3600)
- y=int(b[sortkey]/3600)
- elif time == "d":
- x=int(a[sortkey]/86400)
- y=int(b[sortkey]/86400)
- elif time == "w":
- x=int(a[sortkey]/604800)
- y=int(b[sortkey]/604800)
- elif time == "o":
- x=int(a[sortkey]/2419200)
- y=int(b[sortkey]/2419200)
- elif time == "y":
- x=int(a[sortkey]/29030400)
- y=int(b[sortkey]/29030400)
- else:
- x=a[sortkey]
- y=b[sortkey]
- if x < y:
- ret = -1
- elif x > y:
- ret = 1
- if ret != 0:
- if way < 0:
- ret = ret*-1
- return ret
- return 0
+ for sorting in direction:
+ (sortkey, way, time) = sorting
+ ret = 0
+ if time == "m":
+ x=int(a[sortkey]/60)
+ y=int(b[sortkey]/60)
+ elif time == "h":
+ x=int(a[sortkey]/3600)
+ y=int(b[sortkey]/3600)
+ elif time == "d":
+ x=int(a[sortkey]/86400)
+ y=int(b[sortkey]/86400)
+ elif time == "w":
+ x=int(a[sortkey]/604800)
+ y=int(b[sortkey]/604800)
+ elif time == "o":
+ x=int(a[sortkey]/2419200)
+ y=int(b[sortkey]/2419200)
+ elif time == "y":
+ x=int(a[sortkey]/29030400)
+ y=int(b[sortkey]/29030400)
+ else:
+ x=a[sortkey]
+ y=b[sortkey]
+ if x < y:
+ ret = -1
+ elif x > y:
+ ret = 1
+ if ret != 0:
+ if way < 0:
+ ret = ret*-1
+ return ret
+ return 0
############################################################
def header():
print """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
- <html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8">
- <title>Debian NEW and BYHAND Packages</title>
- <link type="text/css" rel="stylesheet" href="style.css">
- <link rel="shortcut icon" href="http://www.debian.org/favicon.ico">
- </head>
- <body>
- <div align="center">
- <a href="http://www.debian.org/">
+ <html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+ <title>Debian NEW and BYHAND Packages</title>
+ <link type="text/css" rel="stylesheet" href="style.css">
+ <link rel="shortcut icon" href="http://www.debian.org/favicon.ico">
+ </head>
+ <body>
+ <div align="center">
+ <a href="http://www.debian.org/">
<img src="http://www.debian.org/logos/openlogo-nd-50.png" border="0" hspace="0" vspace="0" alt=""></a>
- <a href="http://www.debian.org/">
+ <a href="http://www.debian.org/">
<img src="http://www.debian.org/Pics/debian.png" border="0" hspace="0" vspace="0" alt="Debian Project"></a>
- </div>
- <br />
- <table class="reddy" width="100%">
- <tr>
- <td class="reddy">
+ </div>
+ <br />
+ <table class="reddy" width="100%">
+ <tr>
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-upperleft.png" align="left" border="0" hspace="0" vspace="0"
alt="" width="15" height="16"></td>
- <td rowspan="2" class="reddy">Debian NEW and BYHAND Packages</td>
- <td class="reddy">
+ <td rowspan="2" class="reddy">Debian NEW and BYHAND Packages</td>
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-upperright.png" align="right" border="0" hspace="0" vspace="0"
alt="" width="16" height="16"></td>
- </tr>
- <tr>
- <td class="reddy">
+ </tr>
+ <tr>
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-lowerleft.png" align="left" border="0" hspace="0" vspace="0"
alt="" width="16" height="16"></td>
- <td class="reddy">
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-lowerright.png" align="right" border="0" hspace="0" vspace="0"
alt="" width="15" height="16"></td>
- </tr>
- </table>
- """
+ </tr>
+ </table>
+ """
def footer():
print "<p class=\"validate\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
print "<p>You may want to look at <a href=\"http://ftp-master.debian.org/REJECT-FAQ.html\">the REJECT-FAQ</a> for possible reasons why one of the above packages may get rejected.</p>"
print """<a href="http://validator.w3.org/check?uri=referer">
<img border="0" src="http://www.w3.org/Icons/valid-html401" alt="Valid HTML 4.01!" height="31" width="88"></a>
- <a href="http://jigsaw.w3.org/css-validator/check/referer">
+ <a href="http://jigsaw.w3.org/css-validator/check/referer">
<img border="0" src="http://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!"
height="31" width="88"></a>
"""
def table_header(type):
print "<h1>Summary for: %s</h1>" % (type)
print """<center><table border="0">
- <tr>
- <th align="center">Package</th>
- <th align="center">Version</th>
- <th align="center">Arch</th>
- <th align="center">Distribution</th>
- <th align="center">Age</th>
- <th align="center">Maintainer</th>
- <th align="center">Closes</th>
- </tr>
- """
+ <tr>
+ <th align="center">Package</th>
+ <th align="center">Version</th>
+ <th align="center">Arch</th>
+ <th align="center">Distribution</th>
+ <th align="center">Age</th>
+ <th align="center">Maintainer</th>
+ <th align="center">Closes</th>
+ </tr>
+ """
def table_footer(type, source_count, total_count):
print "</table></center><br>\n"
print "<a href=\"http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br>" % (close, close)
print "</td></tr>"
row_number+=1
-
+
############################################################
def process_changes_files(changes_files, type):
# If we produce html we always have oldest first.
direction.append([4,-1,"ao"])
else:
- if Cnf.has_key("Queue-Report::Options::Sort"):
- for i in Cnf["Queue-Report::Options::Sort"].split(","):
- if i == "ao":
- # Age, oldest first.
- direction.append([4,-1,age])
- elif i == "an":
- # Age, newest first.
- direction.append([4,1,age])
- elif i == "na":
- # Name, Ascending.
- direction.append([0,1,0])
- elif i == "nd":
- # Name, Descending.
- direction.append([0,-1,0])
- elif i == "nl":
- # Notes last.
- direction.append([3,1,0])
- elif i == "nf":
- # Notes first.
- direction.append([3,-1,0])
+ if Cnf.has_key("Queue-Report::Options::Sort"):
+ for i in Cnf["Queue-Report::Options::Sort"].split(","):
+ if i == "ao":
+ # Age, oldest first.
+ direction.append([4,-1,age])
+ elif i == "an":
+ # Age, newest first.
+ direction.append([4,1,age])
+ elif i == "na":
+ # Name, Ascending.
+ direction.append([0,1,0])
+ elif i == "nd":
+ # Name, Descending.
+ direction.append([0,-1,0])
+ elif i == "nl":
+ # Notes last.
+ direction.append([3,1,0])
+ elif i == "nf":
+ # Notes first.
+ direction.append([3,-1,0])
entries.sort(lambda x, y: sortfunc(x, y))
# Yes, in theory you can add several sort options at the commandline with. But my mind is to small
# at the moment to come up with a real good sorting function that considers all the sidesteps you
('s',"sort","Queue-Report::Options::Sort", "HasArg"),
('a',"age","Queue-Report::Options::Age", "HasArg")]
for i in [ "help" ]:
- if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
- Cnf["Queue-Report::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
+ Cnf["Queue-Report::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Queue-Report::Options")
if Options["Help"]:
- usage()
+ usage()
Upload = daklib.queue.Upload(Cnf)
import apt_pkg
import daklib.database
import daklib.logging
-import daklib.queue
+import daklib.queue
import daklib.utils
################################################################################
('m',"manual-reject","Reject-Proposed-Updates::Options::Manual-Reject", "HasArg"),
('s',"no-mail", "Reject-Proposed-Updates::Options::No-Mail")]
for i in [ "help", "manual-reject", "no-mail" ]:
- if not Cnf.has_key("Reject-Proposed-Updates::Options::%s" % (i)):
- Cnf["Reject-Proposed-Updates::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Reject-Proposed-Updates::Options::%s" % (i)):
+ Cnf["Reject-Proposed-Updates::Options::%s" % (i)] = ""
arguments = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Reject-Proposed-Updates::Options")
if Options["Help"]:
- usage()
+ usage()
if not arguments:
daklib.utils.fubar("need at least one .changes filename as an argument.")
for arg in arguments:
arg = daklib.utils.validate_changes_file_arg(arg)
Upload.pkg.changes_file = arg
- Upload.init_vars()
+ Upload.init_vars()
cwd = os.getcwd()
os.chdir(Cnf["Suite::Proposed-Updates::CopyDotDak"])
Upload.update_vars()
# If we fail here someone is probably trying to exploit the race
# so let's just raise an exception ...
if os.path.exists(reject_filename):
- os.unlink(reject_filename)
+ os.unlink(reject_filename)
reject_fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
# Build up the rejection email
unsat = 0
for dep_package, _, _ in dep:
if dep_package in removals:
- unsat += 1
+ unsat += 1
if unsat == len(dep):
component = p2c[package]
if component != "main":
unsat = 0
for dep_package, _, _ in dep:
if dep_package in removals:
- unsat += 1
+ unsat += 1
if unsat == len(dep):
if component != "main":
source = "%s/%s" % (source, component)
else:
print "No dependency problem found."
print
-
+
################################################################################
def main ():
for i in [ "architecture", "binary-only", "carbon-copy", "component",
"done", "help", "no-action", "partial", "rdep-check", "reason",
"source-only" ]:
- if not Cnf.has_key("Rm::Options::%s" % (i)):
- Cnf["Rm::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Rm::Options::%s" % (i)):
+ Cnf["Rm::Options::%s" % (i)] = ""
if not Cnf.has_key("Rm::Options::Suite"):
- Cnf["Rm::Options::Suite"] = "unstable"
+ Cnf["Rm::Options::Suite"] = "unstable"
arguments = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Rm::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
daklib.database.init(Cnf, projectB)
if __name__ == '__main__':
main()
-
import commands, os, pwd, re, sys, time
import apt_pkg
-import daklib.queue
+import daklib.queue
import daklib.utils
################################################################################
components = {}
upload_uris = {}
file_list = []
- Upload.init_vars()
+ Upload.init_vars()
# Parse the .dak file for the .changes file
Upload.pkg.changes_file = changes_file
Upload.update_vars()
for arg in changes_files:
arg = daklib.utils.validate_changes_file_arg(arg)
- Upload.pkg.changes_file = arg
- Upload.init_vars()
- Upload.update_vars()
+ Upload.pkg.changes_file = arg
+ Upload.init_vars()
+ Upload.update_vars()
- src = Upload.pkg.changes["source"]
- if src not in adv_packages:
- adv_packages += [src]
+ src = Upload.pkg.changes["source"]
+ if src not in adv_packages:
+ adv_packages += [src]
- suites = Upload.pkg.changes["distribution"].keys()
- for suite in suites:
- if not updated_pkgs.has_key(suite):
+ suites = Upload.pkg.changes["distribution"].keys()
+ for suite in suites:
+ if not updated_pkgs.has_key(suite):
updated_pkgs[suite] = {}
- files = Upload.pkg.files
- for file in files.keys():
- arch = files[file]["architecture"]
- md5 = files[file]["md5sum"]
- size = files[file]["size"]
- poolname = Cnf["Dir::PoolRoot"] + \
- daklib.utils.poolify(src, files[file]["component"])
- if arch == "source" and file.endswith(".dsc"):
- dscpoolname = poolname
- for suite in suites:
- if not updated_pkgs[suite].has_key(arch):
- updated_pkgs[suite][arch] = {}
- updated_pkgs[suite][arch][file] = {
+ files = Upload.pkg.files
+ for file in files.keys():
+ arch = files[file]["architecture"]
+ md5 = files[file]["md5sum"]
+ size = files[file]["size"]
+ poolname = Cnf["Dir::PoolRoot"] + \
+ daklib.utils.poolify(src, files[file]["component"])
+ if arch == "source" and file.endswith(".dsc"):
+ dscpoolname = poolname
+ for suite in suites:
+ if not updated_pkgs[suite].has_key(arch):
+ updated_pkgs[suite][arch] = {}
+ updated_pkgs[suite][arch][file] = {
"md5": md5, "size": size,
"poolname": poolname }
- dsc_files = Upload.pkg.dsc_files
- for file in dsc_files.keys():
- arch = "source"
- if not dsc_files[file].has_key("files id"):
+ dsc_files = Upload.pkg.dsc_files
+ for file in dsc_files.keys():
+ arch = "source"
+ if not dsc_files[file].has_key("files id"):
continue
- # otherwise, it's already in the pool and needs to be
- # listed specially
- md5 = dsc_files[file]["md5sum"]
- size = dsc_files[file]["size"]
- for suite in suites:
- if not updated_pkgs[suite].has_key(arch):
- updated_pkgs[suite][arch] = {}
- updated_pkgs[suite][arch][file] = {
+ # otherwise, it's already in the pool and needs to be
+ # listed specially
+ md5 = dsc_files[file]["md5sum"]
+ size = dsc_files[file]["size"]
+ for suite in suites:
+ if not updated_pkgs[suite].has_key(arch):
+ updated_pkgs[suite][arch] = {}
+ updated_pkgs[suite][arch][file] = {
"md5": md5, "size": size,
"poolname": dscpoolname }
username = whoamifull[4].split(",")[0]
Subst = {
- "__ADVISORY__": advisory_nr,
- "__WHOAMI__": username,
- "__DATE__": time.strftime("%B %d, %Y", time.gmtime(time.time())),
- "__PACKAGE__": ", ".join(adv_packages),
+ "__ADVISORY__": advisory_nr,
+ "__WHOAMI__": username,
+ "__DATE__": time.strftime("%B %d, %Y", time.gmtime(time.time())),
+ "__PACKAGE__": ", ".join(adv_packages),
"__DAK_ADDRESS__": Cnf["Dinstall::MyEmailAddress"]
}
Cnf["Suite::%s::Version" % suite], suite)
adv += "%s\n%s\n\n" % (suite_header, "-"*len(suite_header))
- arches = Cnf.ValueList("Suite::%s::Architectures" % suite)
- if "source" in arches:
+ arches = Cnf.ValueList("Suite::%s::Architectures" % suite)
+ if "source" in arches:
arches.remove("source")
- if "all" in arches:
+ if "all" in arches:
arches.remove("all")
- arches.sort()
+ arches.sort()
- adv += " %s was released for %s.\n\n" % (
- suite.capitalize(), daklib.utils.join_with_commas_and(arches))
+ adv += " %s was released for %s.\n\n" % (
+ suite.capitalize(), daklib.utils.join_with_commas_and(arches))
- for a in ["source", "all"] + arches:
- if not updated_pkgs[suite].has_key(a):
+ for a in ["source", "all"] + arches:
+ if not updated_pkgs[suite].has_key(a):
continue
- if a == "source":
- adv += " Source archives:\n\n"
- elif a == "all":
- adv += " Architecture independent packages:\n\n"
- else:
- adv += " %s architecture (%s)\n\n" % (a,
- Cnf["Architectures::%s" % a])
-
- for file in updated_pkgs[suite][a].keys():
- adv += " http://%s/%s%s\n" % (
- archive, updated_pkgs[suite][a][file]["poolname"], file)
- adv += " Size/MD5 checksum: %8s %s\n" % (
- updated_pkgs[suite][a][file]["size"],
- updated_pkgs[suite][a][file]["md5"])
- adv += "\n"
+ if a == "source":
+ adv += " Source archives:\n\n"
+ elif a == "all":
+ adv += " Architecture independent packages:\n\n"
+ else:
+ adv += " %s architecture (%s)\n\n" % (a,
+ Cnf["Architectures::%s" % a])
+
+ for file in updated_pkgs[suite][a].keys():
+ adv += " http://%s/%s%s\n" % (
+ archive, updated_pkgs[suite][a][file]["poolname"], file)
+ adv += " Size/MD5 checksum: %8s %s\n" % (
+ updated_pkgs[suite][a][file]["size"],
+ updated_pkgs[suite][a][file]["md5"])
+ adv += "\n"
adv = adv.rstrip()
Subst["__ADVISORY_TEXT__"] = adv
import apt_pkg
import examine_package
import daklib.database
-import daklib.queue
+import daklib.queue
import daklib.utils
# Globals
<html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8">"""
print "<title>%s - Debian NEW package overview</title>" % (name)
print """<link type="text/css" rel="stylesheet" href="/style.css">
- <link rel="shortcut icon" href="http://www.debian.org/favicon.ico">
- </head>
- <body>
- <div align="center">
- <a href="http://www.debian.org/">
+ <link rel="shortcut icon" href="http://www.debian.org/favicon.ico">
+ </head>
+ <body>
+ <div align="center">
+ <a href="http://www.debian.org/">
<img src="http://www.debian.org/logos/openlogo-nd-50.png" border="0" hspace="0" vspace="0" alt=""></a>
- <a href="http://www.debian.org/">
+ <a href="http://www.debian.org/">
<img src="http://www.debian.org/Pics/debian.png" border="0" hspace="0" vspace="0" alt="Debian Project"></a>
- </div>
- <br />
- <table class="reddy" width="100%">
- <tr>
- <td class="reddy">
+ </div>
+ <br />
+ <table class="reddy" width="100%">
+ <tr>
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-upperleft.png" align="left" border="0" hspace="0" vspace="0"
alt="" width="15" height="16"></td>"""
print """<td rowspan="2" class="reddy">Debian NEW package overview for %s</td>""" % (name)
print """<td class="reddy">
<img src="http://www.debian.org/Pics/red-upperright.png" align="right" border="0" hspace="0" vspace="0"
alt="" width="16" height="16"></td>
- </tr>
- <tr>
- <td class="reddy">
+ </tr>
+ <tr>
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-lowerleft.png" align="left" border="0" hspace="0" vspace="0"
alt="" width="16" height="16"></td>
- <td class="reddy">
+ <td class="reddy">
<img src="http://www.debian.org/Pics/red-lowerright.png" align="right" border="0" hspace="0" vspace="0"
alt="" width="15" height="16"></td>
- </tr>
- </table>
- """
+ </tr>
+ </table>
+ """
def html_footer():
print "<p class=\"validate\">Timestamp: %s (UTC)</p>" % (time.strftime("%d.%m.%Y / %H:%M:%S", time.gmtime()))
print """<a href="http://validator.w3.org/check?uri=referer">
<img border="0" src="http://www.w3.org/Icons/valid-html401" alt="Valid HTML 4.01!" height="31" width="88"></a>
- <a href="http://jigsaw.w3.org/css-validator/check/referer">
+ <a href="http://jigsaw.w3.org/css-validator/check/referer">
<img border="0" src="http://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!"
height="31" width="88"></a>
"""
dest = dirname + '/' + os.path.basename(filename)
if os.path.exists(dest):
daklib.utils.fubar("%s already exists." % (dest))
- print "Move: %s -> %s" % (filename, dest)
+ print "Move: %s -> %s" % (filename, dest)
os.rename(filename, dest)
count = count + 1
print "Moved %d files." % (count)
Cnf = daklib.utils.get_conf()
Arguments = [('h',"help","Stats::Options::Help")]
for i in [ "help" ]:
- if not Cnf.has_key("Stats::Options::%s" % (i)):
- Cnf["Stats::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Stats::Options::%s" % (i)):
+ Cnf["Stats::Options::%s" % (i)] = ""
args = apt_pkg.ParseCommandLine(Cnf, Arguments, sys.argv)
Options = Cnf.SubTree("Stats::Options")
if Options["Help"]:
- usage()
+ usage()
if len(args) < 1:
daklib.utils.warn("dak stats requires a MODE argument")
if __name__ == '__main__':
main()
-
section=""
dest = "%sdists/%s/%s/source/%s%s" % (Cnf["Dir::Root"], codename, component, section, os.path.basename(i[3]))
if not os.path.exists(dest):
- src = i[2]+i[3]
- src = daklib.utils.clean_symlink(src, dest, Cnf["Dir::Root"])
+ src = i[2]+i[3]
+ src = daklib.utils.clean_symlink(src, dest, Cnf["Dir::Root"])
if Cnf.Find("Symlink-Dists::Options::Verbose"):
print src+' -> '+dest
os.symlink(src, dest)
Arguments = [('h',"help","Symlink-Dists::Options::Help"),
('v',"verbose","Symlink-Dists::Options::Verbose")]
for i in ["help", "verbose" ]:
- if not Cnf.has_key("Symlink-Dists::Options::%s" % (i)):
- Cnf["Symlink-Dists::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Symlink-Dists::Options::%s" % (i)):
+ Cnf["Symlink-Dists::Options::%s" % (i)] = ""
apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Symlink-Dists::Options")
if Options["Help"]:
- usage()
+ usage()
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
if __name__ == '__main__':
main()
-
def fail(message):
sys.stderr.write("%s\n" % (message))
sys.exit(1)
-
+
################################################################################
def main ():
def fail(message):
sys.stderr.write("%s\n" % (message))
sys.exit(1)
-
+
################################################################################
def main ():
projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
daklib.database.init(Cnf, projectB)
-
+
################################################################################
def usage (exit_code=0):
try:
for test in trans:
t = trans[test]
-
+
# First check if we know all the keys for the transition and if they have
# the right type (and for the packages also if the list has the right types
# included, ie. not a list in list, but only str in the list)
if key not in checkkeys:
print "ERROR: Unknown key %s in transition %s" % (key, test)
failure = True
-
+
if key == "packages":
if type(t[key]) != list:
print "ERROR: Unknown type %s for packages in transition %s." % (type(t[key]), test)
print "ERROR: No packages defined in transition %s" % (test)
failure = True
continue
-
+
elif type(t[key]) != str:
if key == "new" and type(t[key]) == int:
# Ok, debian native version
else:
print "ERROR: Unknown type %s for key %s in transition %s" % (type(t[key]), key, test)
failure = True
-
+
# And now the other way round - are all our keys defined?
for key in checkkeys:
if key not in t:
trans_file = Cnf["Dinstall::Reject::ReleaseTransitions"]
trans_temp = trans_file + ".tmp"
-
+
trans_lock = lock_file(trans_file)
temp_lock = lock_file(trans_temp)
sys.exit(3)
if Options["sudo"]:
- os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
+ os.spawnl(os.P_WAIT, "/usr/bin/sudo", "/usr/bin/sudo", "-u", "dak", "-H",
"/usr/local/bin/dak", "transitions", "--import", from_file)
else:
trans = load_transitions(from_file)
# NB: file is unlinked by caller, but fd is never actually closed.
# We need the chmod, as the file is (most possibly) copied from a
# sudo-ed script and would be unreadable if it has default mkstemp mode
-
+
(fd, path) = tempfile.mkstemp("", "transitions", Cnf["Transitions::TempPath"])
os.chmod(path, 0644)
f = open(path, "w")
if result != 0:
os.unlink(edit_file)
daklib.utils.fubar("%s invocation failed for %s, not removing tempfile." % (editor, edit_file))
-
+
# Now try to load the new file
test = load_transitions(edit_file)
print "------------------------------------------------------------------------"
transition_info(test)
- prompt = "[S]ave, Edit again, Drop changes?"
- default = "S"
+ prompt = "[S]ave, Edit again, Drop changes?"
+ default = "S"
answer = "XXX"
while prompt.find(answer) == -1:
print "Committing"
for remove in to_remove:
del transitions[remove]
-
+
edit_file = temp_transitions_file(transitions)
write_transitions_from_file(edit_file)
################################################################################
def print_info(trans, source, expected, rm, reason, packages):
- print """Looking at transition: %s
- Source: %s
- New Version: %s
- Responsible: %s
- Description: %s
- Blocked Packages (total: %d): %s
+ print """Looking at transition: %s
+Source: %s
+New Version: %s
+Responsible: %s
+Description: %s
+Blocked Packages (total: %d): %s
""" % (trans, source, expected, rm, reason, len(packages), ", ".join(packages))
- return
+ return
################################################################################
#### This can run within sudo !! ####
#####################################
init()
-
+
# Check if there is a file defined (and existant)
transpath = Cnf.get("Dinstall::Reject::ReleaseTransitions", "")
if transpath == "":
daklib.utils.warn("Temporary path %s not found." %
(Cnf["Transitions::TempPath"]))
sys.exit(1)
-
+
if Options["import"]:
try:
write_transitions_from_file(Options["import"])
transition_info(transitions)
sys.exit(0)
-
+
################################################################################
if __name__ == '__main__':
myfunc.__doc__ = f.__doc__
myfunc.__dict__.update(f.__dict__)
- fnname = "%s:%s" % (module, name)
- if fnname in dak_functions_to_replace:
- raise Exception, \
- "%s in %s already marked to be replaced" % (name, module)
+ fnname = "%s:%s" % (module, name)
+ if fnname in dak_functions_to_replace:
+ raise Exception, \
+ "%s in %s already marked to be replaced" % (name, module)
dak_functions_to_replace["%s:%s" % (module,name)] = myfunc
return f
return x
if len(f) > 0 and m == name:
dak_replaced_functions[f] = module.__dict__[f]
module.__dict__[f] = newfunc
-
-
if not os.path.exists(logdir):
umask = os.umask(00000)
os.makedirs(logdir, 02775)
- os.umask(umask)
+ os.umask(umask)
# Open the logfile
logfilename = "%s/%s" % (logdir, time.strftime("%Y-%m"))
- logfile = None
- if debug:
- logfile = sys.stderr
- else:
- umask = os.umask(00002)
- logfile = utils.open_file(logfilename, 'a')
- os.umask(umask)
+ logfile = None
+ if debug:
+ logfile = sys.stderr
+ else:
+ umask = os.umask(00002)
+ logfile = utils.open_file(logfilename, 'a')
+ os.umask(umask)
self.logfile = logfile
# Log the start of the program
user = pwd.getpwuid(os.getuid())[0]
files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
summary += file + "\n to " + destination + "\n"
- if not files[file].has_key("type"):
- files[file]["type"] = "unknown"
+ if not files[file].has_key("type"):
+ files[file]["type"] = "unknown"
if files[file]["type"] in ["deb", "udeb", "dsc"]:
# (queue/unchecked), there we have override entries already, use them
# (process-new), there we dont have override entries, use the newly generated ones.
# (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
def source_exists (self, package, source_version, suites = ["any"]):
- okay = 1
- for suite in suites:
- if suite == "any":
- que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
- (package)
- else:
- # source must exist in suite X, or in some other suite that's
- # mapped to X, recursively... silent-maps are counted too,
- # unreleased-maps aren't.
- maps = self.Cnf.ValueList("SuiteMappings")[:]
- maps.reverse()
- maps = [ m.split() for m in maps ]
- maps = [ (x[1], x[2]) for x in maps
- if x[0] == "map" or x[0] == "silent-map" ]
- s = [suite]
- for x in maps:
- if x[1] in s and x[0] not in s:
- s.append(x[0])
-
- que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
+ okay = 1
+ for suite in suites:
+ if suite == "any":
+ que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
+ (package)
+ else:
+ # source must exist in suite X, or in some other suite that's
+ # mapped to X, recursively... silent-maps are counted too,
+ # unreleased-maps aren't.
+ maps = self.Cnf.ValueList("SuiteMappings")[:]
+ maps.reverse()
+ maps = [ m.split() for m in maps ]
+ maps = [ (x[1], x[2]) for x in maps
+ if x[0] == "map" or x[0] == "silent-map" ]
+ s = [suite]
+ for x in maps:
+ if x[1] in s and x[0] not in s:
+ s.append(x[0])
+
+ que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
q = self.projectB.query(que)
# Reduce the query results to a list of version numbers
# No source found...
okay = 0
- break
- return okay
+ break
+ return okay
################################################################################
-
+
def in_override_p (self, package, component, suite, binary_type, file):
files = self.pkg.files
ch = self.pkg.changes
cansave = 0
if ch.get('distribution-version', {}).has_key(suite):
- # we really use the other suite, ignoring the conflicting one ...
+ # we really use the other suite, ignoring the conflicting one ...
addsuite = ch["distribution-version"][suite]
-
+
add_version = self.get_anyversion(query_result, addsuite)
target_version = self.get_anyversion(query_result, target_suite)
-
+
if not add_version:
# not add_version can only happen if we map to a suite
# that doesn't enhance the suite we're propup'ing from.
self.pkg.changes.setdefault("propdistribution", {})
self.pkg.changes["propdistribution"][addsuite] = 1
cansave = 1
-
+
if not cansave:
self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
# See process_it() in 'dak process-unchecked' for explanation of this
- # in_unchecked check dropped by ajt 2007-08-28, how did that
- # ever make sense?
+ # in_unchecked check dropped by ajt 2007-08-28, how did that
+ # ever make sense?
if os.path.exists(in_unchecked) and False:
return (self.reject_message, in_unchecked)
else:
def open_file(filename, mode='r'):
try:
- f = open(filename, mode)
+ f = open(filename, mode)
except IOError:
raise cant_open_exc, filename
return f
lines = changes_in.readlines()
if not lines:
- raise changes_parse_error_exc, "[Empty changes file]"
+ raise changes_parse_error_exc, "[Empty changes file]"
# Reindex by line number so we can easily verify the format of
# .dsc files...
if slf:
field = slf.groups()[0].lower()
changes[field] = slf.groups()[1]
- first = 1
+ first = 1
continue
if line == " .":
changes[field] += '\n'
if first == 1 and changes[field] != "":
changes[field] += '\n'
first = 0
- changes[field] += mlf.groups()[0] + '\n'
+ changes[field] += mlf.groups()[0] + '\n'
continue
- error += line
+ error += line
if signing_rules == 1 and inside_signature:
raise invalid_dsc_format_exc, index
if changes.has_key("source"):
# Strip the source version in brackets from the source field,
- # put it in the "source-version" field instead.
+ # put it in the "source-version" field instead.
srcver = re_srchasver.search(changes["source"])
- if srcver:
+ if srcver:
changes["source"] = srcver.group(1)
- changes["source-version"] = srcver.group(2)
+ changes["source-version"] = srcver.group(2)
if error:
- raise changes_parse_error_exc, error
+ raise changes_parse_error_exc, error
return changes
else:
if (format < (1,5) or format > (1,8)):
raise nk_format_exc, "%s" % (changes.get("format","0.0"))
- if field != "files" and format < (1,8):
+ if field != "files" and format < (1,8):
raise nk_format_exc, "%s" % (changes.get("format","0.0"))
includes_section = (not is_a_dsc) and field == "files"
# sendmail wrapper, takes _either_ a message string or a file as arguments
def send_mail (message, filename=""):
- # If we've been passed a string dump it into a temporary file
- if message:
- filename = tempfile.mktemp()
- fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
- os.write (fd, message)
- os.close (fd)
-
- # Invoke sendmail
- (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
- if (result != 0):
- raise sendmail_failed_exc, output
-
- # Clean up any temporary files
- if message:
- os.unlink (filename)
+ # If we've been passed a string dump it into a temporary file
+ if message:
+ filename = tempfile.mktemp()
+ fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
+ os.write (fd, message)
+ os.close (fd)
+
+ # Invoke sendmail
+ (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
+ if (result != 0):
+ raise sendmail_failed_exc, output
+
+ # Clean up any temporary files
+ if message:
+ os.unlink (filename)
################################################################################
def poolify (source, component):
if component:
- component += '/'
+ component += '/'
if source[:3] == "lib":
- return component + source[:4] + '/' + source + '/'
+ return component + source[:4] + '/' + source + '/'
else:
- return component + source[:1] + '/' + source + '/'
+ return component + source[:1] + '/' + source + '/'
################################################################################
def move (src, dest, overwrite = 0, perms = 0664):
if os.path.exists(dest) and os.path.isdir(dest):
- dest_dir = dest
+ dest_dir = dest
else:
- dest_dir = os.path.dirname(dest)
+ dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
- umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
- os.umask(umask)
+ umask = os.umask(00000)
+ os.makedirs(dest_dir, 02775)
+ os.umask(umask)
#print "Moving %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
- dest += '/' + os.path.basename(src)
+ dest += '/' + os.path.basename(src)
# Don't overwrite unless forced to
if os.path.exists(dest):
if not overwrite:
def copy (src, dest, overwrite = 0, perms = 0664):
if os.path.exists(dest) and os.path.isdir(dest):
- dest_dir = dest
+ dest_dir = dest
else:
- dest_dir = os.path.dirname(dest)
+ dest_dir = os.path.dirname(dest)
if not os.path.exists(dest_dir):
- umask = os.umask(00000)
- os.makedirs(dest_dir, 02775)
- os.umask(umask)
+ umask = os.umask(00000)
+ os.makedirs(dest_dir, 02775)
+ os.umask(umask)
#print "Copying %s to %s..." % (src, dest)
if os.path.exists(dest) and os.path.isdir(dest):
- dest += '/' + os.path.basename(src)
+ dest += '/' + os.path.basename(src)
# Don't overwrite unless forced to
if os.path.exists(dest):
if not overwrite:
res = socket.gethostbyaddr(socket.gethostname())
database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
if database_hostname:
- return database_hostname
+ return database_hostname
else:
return res[0]
def which_conf_file ():
res = socket.gethostbyaddr(socket.gethostname())
if Cnf.get("Config::" + res[0] + "::DakConfig"):
- return Cnf["Config::" + res[0] + "::DakConfig"]
+ return Cnf["Config::" + res[0] + "::DakConfig"]
else:
- return default_config
+ return default_config
def which_apt_conf_file ():
res = socket.gethostbyaddr(socket.gethostname())
if Cnf.get("Config::" + res[0] + "::AptConfig"):
- return Cnf["Config::" + res[0] + "::AptConfig"]
+ return Cnf["Config::" + res[0] + "::AptConfig"]
else:
- return default_apt_config
+ return default_apt_config
def which_alias_file():
hostname = socket.gethostbyaddr(socket.gethostname())[0]
################################################################################
def join_with_commas_and(list):
- if len(list) == 0: return "nothing"
- if len(list) == 1: return list[0]
- return ", ".join(list[:-1]) + " and " + list[-1]
+ if len(list) == 0: return "nothing"
+ if len(list) == 1: return list[0]
+ return ", ".join(list[:-1]) + " and " + list[-1]
################################################################################
################################################################################
def get_conf():
- return Cnf
+ return Cnf
################################################################################
return "%s: tainted filename" % (filename)
# Invoke gpgv on the file
- status_read, status_write = os.pipe();
+ status_read, status_write = os.pipe();
cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
(_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
return None
# Build the command line
- status_read, status_write = os.pipe();
+ status_read, status_write = os.pipe();
cmd = "gpgv --status-fd %s %s %s %s" % (
status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
################################################################################
def gpg_get_key_addresses(fingerprint):
- """retreive email addresses from gpg key uids for a given fingerprint"""
- addresses = key_uid_email_cache.get(fingerprint)
- if addresses != None:
- return addresses
- addresses = set()
- cmd = "gpg --no-default-keyring %s --fingerprint %s" \
- % (gpg_keyring_args(), fingerprint)
- (result, output) = commands.getstatusoutput(cmd)
- if result == 0:
- for l in output.split('\n'):
- m = re_gpg_uid.match(l)
- if m:
- addresses.add(m.group(1))
- key_uid_email_cache[fingerprint] = addresses
- return addresses
+ """retreive email addresses from gpg key uids for a given fingerprint"""
+ addresses = key_uid_email_cache.get(fingerprint)
+ if addresses != None:
+ return addresses
+ addresses = set()
+ cmd = "gpg --no-default-keyring %s --fingerprint %s" \
+ % (gpg_keyring_args(), fingerprint)
+ (result, output) = commands.getstatusoutput(cmd)
+ if result == 0:
+ for l in output.split('\n'):
+ m = re_gpg_uid.match(l)
+ if m:
+ addresses.add(m.group(1))
+ key_uid_email_cache[fingerprint] = addresses
+ return addresses
################################################################################
apt_pkg.ReadConfigFileISC(Cnf,default_config)
if which_conf_file() != default_config:
- apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
+ apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
################################################################################
if isinstance(maintainer, basestring):
if not isinstance(maintainer, unicode):
- try:
+ try:
maintainer = unicode(maintainer, 'utf-8')
except:
maintainer = unicode(maintainer, 'iso8859-15')
sqliteConn.text_factory = unicode
if res:
changedby_id = get_or_set_maintainer_id(res[0])
-
+
cur = projectBdb.cursor()
cur.execute("UPDATE source SET changedby=%s WHERE id=%s" % (changedby_id, row[0]))
cur.close()