import errno, fcntl, os, sys, time, re
import apt_pkg
-import dak.lib.database as database
-import dak.lib.logging as logging
-import dak.lib.queue as queue
-import dak.lib.utils as utils
+from daklib import database
+from daklib import logging
+from daklib import queue
+from daklib import utils
+from daklib.dak_exceptions import *
###############################################################################
self.timestamp = time.strftime("%Y%m%d%H%M%S")
# Create the log directory if it doesn't exist
self.log_dir = Cnf["Dir::UrgencyLog"]
- if not os.path.exists(self.log_dir):
- umask = os.umask(00000)
- os.makedirs(self.log_dir, 02775)
+ if not os.path.exists(self.log_dir) or not os.access(self.log_dir, os.W_OK):
+ utils.warn("UrgencyLog directory %s does not exist or is not writeable, using /srv/ftp.debian.org/tmp/ instead" % (self.log_dir))
+ self.log_dir = '/srv/ftp.debian.org/tmp/'
# Open the logfile
self.log_filename = "%s/.install-urgencies-%s.new" % (self.log_dir, self.timestamp)
self.log_file = utils.open_file(self.log_filename, 'w')
# propogate in the case it is in the override tables:
if changes.has_key("propdistribution"):
for suite in changes["propdistribution"].keys():
- if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
- propogate[suite] = 1
- else:
- nopropogate[suite] = 1
+ if Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
+ propogate[suite] = 1
+ else:
+ nopropogate[suite] = 1
for suite in propogate.keys():
- if suite in nopropogate:
- continue
- changes["distribution"][suite] = 1
+ if suite in nopropogate:
+ continue
+ changes["distribution"][suite] = 1
for file in files.keys():
# Check the package is still in the override tables
('s',"no-mail", "Dinstall::Options::No-Mail")]
for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "version"]:
- if not Cnf.has_key("Dinstall::Options::%s" % (i)):
- Cnf["Dinstall::Options::%s" % (i)] = ""
+ if not Cnf.has_key("Dinstall::Options::%s" % (i)):
+ Cnf["Dinstall::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
Options = Cnf.SubTree("Dinstall::Options")
if Options["Automatic"]:
answer = 'R'
else:
- print "INSTALL to " + ", ".join(changes["distribution"].keys())
- print reject_message + summary,
+ print "INSTALL to " + ", ".join(changes["distribution"].keys())
+ print reject_message + summary,
prompt = "[I]nstall, Skip, Quit ?"
if Options["Automatic"]:
answer = 'I'
# Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
projectB.query("BEGIN WORK")
+ # Ensure that we have all the hashes we need below.
+ rejmsg = utils.ensure_hashes(changes, dsc, files, dsc_files)
+ if len(rejmsg) > 0:
+ # There were errors. Print them and SKIP the changes.
+ for msg in rejmsg:
+ utils.warn(msg)
+ return
+
# Add the .dsc file to the DB
for file in files.keys():
if files[file]["type"] == "dsc":
maintainer = dsc["maintainer"]
maintainer = maintainer.replace("'", "\\'")
maintainer_id = database.get_or_set_maintainer_id(maintainer)
+ changedby = changes["changed-by"]
+ changedby = changedby.replace("'", "\\'")
+ changedby_id = database.get_or_set_maintainer_id(changedby)
fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"])
install_date = time.strftime("%Y-%m-%d")
filename = files[file]["pool name"] + file
dsc_component = files[file]["component"]
dsc_location_id = files[file]["location id"]
+ if dsc.has_key("dm-upload-allowed") and dsc["dm-upload-allowed"] == "yes":
+ dm_upload_allowed = "true"
+ else:
+ dm_upload_allowed = "false"
if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], dsc_location_id)
- projectB.query("INSERT INTO source (source, version, maintainer, file, install_date, sig_fpr) VALUES ('%s', '%s', %d, %d, '%s', %s)"
- % (package, version, maintainer_id, files[file]["files id"], install_date, fingerprint_id))
+ files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], dsc_location_id)
+ projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)"
+ % (package, version, maintainer_id, changedby_id, files[file]["files id"], install_date, fingerprint_id, dm_upload_allowed))
for suite in changes["distribution"].keys():
suite_id = database.get_suite_id(suite)
files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
# FIXME: needs to check for -1/-2 and or handle exception
if files_id == None:
- files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
+ files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[dsc_file]["sha1sum"], files[dsc_file]["sha256sum"], dsc_location_id)
projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
+ # Add the src_uploaders to the DB
+ uploader_ids = [maintainer_id]
+ if dsc.has_key("uploaders"):
+ for u in dsc["uploaders"].split(","):
+ u = u.replace("'", "\\'")
+ u = u.strip()
+ uploader_ids.append(
+ database.get_or_set_maintainer_id(u))
+ added_ids = {}
+ for u in uploader_ids:
+ if added_ids.has_key(u):
+ utils.warn("Already saw uploader %s for source %s" % (u, package))
+ continue
+ added_ids[u]=1
+ projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
+
+
# Add the .deb files to the DB
for file in files.keys():
if files[file]["type"] == "deb":
source = files[file]["source package"]
source_version = files[file]["source version"]
filename = files[file]["pool name"] + file
- if not files[file].has_key("location id") or not files[file]["location id"]:
- files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
+ if not files[file].has_key("location id") or not files[file]["location id"]:
+ files[file]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[file]["component"],utils.where_am_i())
if not files[file].has_key("files id") or not files[file]["files id"]:
- files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
+ files[file]["files id"] = database.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["sha1sum"], files[file]["sha256sum"], files[file]["location id"])
source_id = database.get_source_id (source, source_version)
if source_id:
projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
% (package, version, maintainer_id, source_id, architecture_id, files[file]["files id"], type, fingerprint_id))
else:
- projectB.query("INSERT INTO binaries (package, version, maintainer, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, '%s', %d)"
- % (package, version, maintainer_id, architecture_id, files[file]["files id"], type, fingerprint_id))
+ raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, file, type, sig_fpr)
for suite in changes["distribution"].keys():
suite_id = database.get_suite_id(suite)
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
#
if changes["architecture"].has_key("source") and orig_tar_id and \
orig_tar_location != "legacy" and orig_tar_location != dsc_location_id:
- q = projectB.query("SELECT l.path, f.filename, f.size, f.md5sum FROM files f, location l WHERE f.id = %s AND f.location = l.id" % (orig_tar_id))
+ q = projectB.query("SELECT l.path, f.filename, f.size, f.md5sum, f.sha1sum, f.sha256sum FROM files f, location l WHERE f.id = %s AND f.location = l.id" % (orig_tar_id))
ql = q.getresult()[0]
old_filename = ql[0] + ql[1]
file_size = ql[2]
file_md5sum = ql[3]
+ file_sha1sum = ql[4]
+ file_sha256sum = ql[5]
new_filename = utils.poolify(changes["source"], dsc_component) + os.path.basename(old_filename)
new_files_id = database.get_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
if new_files_id == None:
utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
- new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
- projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, source_id, orig_tar_id))
+ new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, file_sha1sum, file_sha256sum, dsc_location_id)
+ projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, database.get_source_id(changes["source"], changes["version"]), orig_tar_id))
# Install the files into the pool
for file in files.keys():
# Add the binaries to stable (and remove it/them from proposed-updates)
for file in files.keys():
if files[file]["type"] == "deb":
- binNMU = 0
package = files[file]["package"]
version = files[file]["version"]
architecture = files[file]["architecture"]
q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture))
ql = q.getresult()
if not ql:
- suite_id = database.get_suite_id('proposed-updates')
- que = "SELECT b.version FROM binaries b JOIN bin_associations ba ON (b.id = ba.bin) JOIN suite su ON (ba.suite = su.id) WHERE b.package = '%s' AND (ba.suite = '%s')" % (package, suite_id)
- q = projectB.query(que)
-
- # Reduce the query results to a list of version numbers
- ql = map(lambda x: x[0], q.getresult())
- if not ql:
- utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
- else:
- for x in ql:
- if re.match(re.compile(r"%s((\.0)?\.)|(\+b)\d+$" % re.escape(version)),x):
- binNMU = 1
- break
- if not binNMU:
- binary_id = ql[0][0]
- suite_id = database.get_suite_id('proposed-updates')
- projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
- suite_id = database.get_suite_id('stable')
- projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id))
- else:
- del files[file]
+ utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
+
+ binary_id = ql[0][0]
+ suite_id = database.get_suite_id('proposed-updates')
+ projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
+ suite_id = database.get_suite_id('stable')
+ projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id))
projectB.query("COMMIT WORK")
if not Options["No-Mail"] and changes["architecture"].has_key("source"):
Subst["__SUITE__"] = " into stable"
Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.installed")
+ mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.install")
utils.send_mail(mail_message)
Upload.announce(short_summary, 1)
Urgency_Logger = Urgency_Log(Cnf)
# Initialize the substitution template mapping global
- bcc = "X-DAK: dak process-accepted\nX-Katie: this header is obsolete"
+ bcc = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
if Cnf.has_key("Dinstall::Bcc"):
Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
else: