import fcntl
import os
import sys
-import time
+from datetime import datetime
import re
import apt_pkg, commands
-from daklib import database
+
from daklib import daklog
from daklib import queue
from daklib import utils
+from daklib.dbconn import *
from daklib.binary import copy_temporary_contents
from daklib.dak_exceptions import *
from daklib.regexes import re_default_answer, re_issource, re_fdnic
+from daklib.urgencylog import UrgencyLog
+from daklib.summarystats import SummaryStats
###############################################################################
-Cnf = None
Options = None
Logger = None
-Urgency_Logger = None
-projectB = None
-Upload = None
-pkg = None
-
-reject_message = ""
-changes = None
-dsc = None
-dsc_files = None
-files = None
-Subst = None
-
-install_count = 0
-install_bytes = 0.0
-
-installing_to_stable = 0
-
-###############################################################################
-
-# FIXME: this should go away to some Debian specific file
-# FIXME: should die if file already exists
-
-class Urgency_Log:
- "Urgency Logger object"
- def __init__ (self, Cnf):
- "Initialize a new Urgency Logger object"
- self.Cnf = Cnf
- self.timestamp = time.strftime("%Y%m%d%H%M%S")
- # Create the log directory if it doesn't exist
- self.log_dir = Cnf["Dir::UrgencyLog"]
- if not os.path.exists(self.log_dir) or not os.access(self.log_dir, os.W_OK):
- utils.warn("UrgencyLog directory %s does not exist or is not writeable, using /srv/ftp.debian.org/tmp/ instead" % (self.log_dir))
- self.log_dir = '/srv/ftp.debian.org/tmp/'
- # Open the logfile
- self.log_filename = "%s/.install-urgencies-%s.new" % (self.log_dir, self.timestamp)
- self.log_file = utils.open_file(self.log_filename, 'w')
- self.writes = 0
-
- def log (self, source, version, urgency):
- "Log an event"
- self.log_file.write(" ".join([source, version, urgency])+'\n')
- self.log_file.flush()
- self.writes += 1
-
- def close (self):
- "Close a Logger object"
- self.log_file.flush()
- self.log_file.close()
- if self.writes:
- new_filename = "%s/install-urgencies-%s" % (self.log_dir, self.timestamp)
- utils.move(self.log_filename, new_filename)
- else:
- os.unlink(self.log_filename)
-
-
-###############################################################################
-
-
-def reject (str, prefix="Rejected: "):
- global reject_message
- if str:
- reject_message += prefix + str + "\n"
-
-# Recheck anything that relies on the database; since that's not
-# frozen between accept and our run time.
-
-def check():
- propogate={}
- nopropogate={}
- for checkfile in files.keys():
- # The .orig.tar.gz can disappear out from under us is it's a
- # duplicate of one in the archive.
- if not files.has_key(checkfile):
- continue
- # Check that the source still exists
- if files[checkfile]["type"] == "deb":
- source_version = files[checkfile]["source version"]
- source_package = files[checkfile]["source package"]
- if not changes["architecture"].has_key("source") \
- and not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
- reject("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
-
- # Version and file overwrite checks
- if not installing_to_stable:
- if files[checkfile]["type"] == "deb":
- reject(Upload.check_binary_against_db(checkfile), "")
- elif files[checkfile]["type"] == "dsc":
- reject(Upload.check_source_against_db(checkfile), "")
- (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(checkfile)
- reject(reject_msg, "")
-
- # propogate in the case it is in the override tables:
- if changes.has_key("propdistribution"):
- for suite in changes["propdistribution"].keys():
- if Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile):
- propogate[suite] = 1
- else:
- nopropogate[suite] = 1
-
- for suite in propogate.keys():
- if suite in nopropogate:
- continue
- changes["distribution"][suite] = 1
-
- for checkfile in files.keys():
- # Check the package is still in the override tables
- for suite in changes["distribution"].keys():
- if not Upload.in_override_p(files[checkfile]["package"], files[checkfile]["component"], suite, files[checkfile].get("dbtype",""), checkfile):
- reject("%s is NEW for %s." % (checkfile, suite))
###############################################################################
def init():
- global Cnf, Options, Upload, projectB, changes, dsc, dsc_files, files, pkg, Subst
+ global Options
- Cnf = utils.get_conf()
+ # Initialize config and connection to db
+ cnf = Config()
+ DBConn()
Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
('h',"help","Dinstall::Options::Help"),
for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
"version", "directory"]:
- if not Cnf.has_key("Dinstall::Options::%s" % (i)):
- Cnf["Dinstall::Options::%s" % (i)] = ""
+ if not cnf.has_key("Dinstall::Options::%s" % (i)):
+ cnf["Dinstall::Options::%s" % (i)] = ""
- changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
- Options = Cnf.SubTree("Dinstall::Options")
+ changes_files = apt_pkg.ParseCommandLine(cnf, Arguments, sys.argv)
+ Options = cnf.SubTree("Dinstall::Options")
if Options["Help"]:
usage()
# If we have a directory flag, use it to find our files
- if Cnf["Dinstall::Options::Directory"] != "":
+ if cnf["Dinstall::Options::Directory"] != "":
# Note that we clobber the list of files we were given in this case
# so warn if the user has done both
if len(changes_files) > 0:
utils.warn("Directory provided so ignoring files given on command line")
- changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
-
- Upload = queue.Upload(Cnf)
- projectB = Upload.projectB
-
- changes = Upload.pkg.changes
- dsc = Upload.pkg.dsc
- dsc_files = Upload.pkg.dsc_files
- files = Upload.pkg.files
- pkg = Upload.pkg
- Subst = Upload.Subst
+ changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"])
return changes_files
###############################################################################
-def action (queue=""):
- (summary, short_summary) = Upload.build_summaries()
+def action (u, stable_queue=None, log_urgency=True):
+ (summary, short_summary) = u.build_summaries()
+ pi = u.package_info()
(prompt, answer) = ("", "XXX")
if Options["No-Action"] or Options["Automatic"]:
answer = 'S'
- if reject_message.find("Rejected") != -1:
- print "REJECT\n" + reject_message,
+ if len(u.rejects) > 0:
+ print "REJECT\n" + pi
prompt = "[R]eject, Skip, Quit ?"
if Options["Automatic"]:
answer = 'R'
else:
- print "INSTALL to " + ", ".join(changes["distribution"].keys())
- print reject_message + summary,
+ print "INSTALL to " + ", ".join(u.pkg.changes["distribution"].keys())
+ print pi + summary,
prompt = "[I]nstall, Skip, Quit ?"
if Options["Automatic"]:
answer = 'I'
answer = answer[:1].upper()
if answer == 'R':
- do_reject ()
+ u.do_unaccept()
+ Logger.log(["unaccepted", u.pkg.changes_file])
elif answer == 'I':
- if not installing_to_stable:
- install()
+ if stable_queue:
+ stable_install(u, summary, short_summary, stable_queue, log_urgency)
else:
- stable_install(summary, short_summary, queue)
+ install(u, log_urgency)
elif answer == 'Q':
sys.exit(0)
+
###############################################################################
+def add_poolfile(filename, datadict, location_id, session):
+ poolfile = PoolFile()
+ poolfile.filename = filename
+ poolfile.filesize = datadict["size"]
+ poolfile.md5sum = datadict["md5sum"]
+ poolfile.sha1sum = datadict["sha1sum"]
+ poolfile.sha256sum = datadict["sha256sum"]
+ poolfile.location_id = location_id
+
+ session.add(poolfile)
+ # Flush to get a file id (NB: This is not a commit)
+ session.flush()
+
+ return poolfile
+
+def add_dsc_to_db(u, filename, session):
+ entry = u.pkg.files[filename]
+ source = DBSource()
+
+ source.source = u.pkg.dsc["source"]
+ source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
+ source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
+ source.changedby_id = get_or_set_maintainer(u.pkg.dsc["changed-by"], session).maintainer_id
+ source.fingerprint_id = get_or_set_fingerprint(u.pkg.dsc["fingerprint"], session).fingerprint_id
+ source.install_date = datetime.now().date()
+
+ dsc_component = entry["component"]
+ dsc_location_id = entry["location id"]
+
+ source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
+
+ # Set up a new poolfile if necessary
+ if not entry.has_key("files id") or not entry["files id"]:
+ filename = entry["pool name"] + filename
+ poolfile = add_poolfile(filename, entry, dsc_location_id, session)
+ entry["files id"] = poolfile.file_id
+
+ source.poolfile_id = entry["files id"]
+ session.add(source)
+ session.flush()
+
+ for suite_name in u.pkg.changes["distribution"].keys():
+ sa = SrcAssociation()
+ sa.source_id = source.source_id
+ sa.suite_id = get_suite(suite_name).suite_id
+ session.add(sa)
+
+ session.flush()
+
+ # Add the source files to the DB (files and dsc_files)
+ dscfile = DSCFile()
+ dscfile.source_id = source.source_id
+ dscfile.poolfile_id = entry["files id"]
+ session.add(dscfile)
+
+ for dsc_file, dentry in u.pkg.dsc_files.keys():
+ df = DSCFile()
+ df.source_id = source.source_id
+
+ # If the .orig.tar.gz is already in the pool, it's
+ # files id is stored in dsc_files by check_dsc().
+ files_id = dentry.get("files id", None)
+
+ if files_id is None:
+ filename = dentry["pool name"] + dsc_file
+
+ (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
+ # FIXME: needs to check for -1/-2 and or handle exception
+ if found and obj is not None:
+ files_id = obj.file_id
+
+ # If still not found, add it
+ if files_id is None:
+ poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
+ files_id = poolfile.file_id
+
+ df.poolfile_id = files_id
+ session.add(df)
+
+ session.flush()
+
+ # Add the src_uploaders to the DB
+ uploader_ids = [maintainer_id]
+ if u.pkg.dsc.has_key("uploaders"):
+ for up in u.pkg.dsc["uploaders"].split(","):
+ up = up.strip()
+ uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
+
+ added_ids = {}
+ for up in uploader_ids:
+ if added_ids.has_key(up):
+ utils.warn("Already saw uploader %s for source %s" % (up, source.source))
+ continue
-# Our reject is not really a reject, but an unaccept, but since a) the
-# code for that is non-trivial (reopen bugs, unannounce etc.), b) this
-# should be exteremly rare, for now we'll go with whining at our admin
-# folks...
-
-def do_reject ():
- Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
- Subst["__REJECT_MESSAGE__"] = reject_message
- Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.unaccept")
-
- # Write the rejection email out as the <foo>.reason file
- reason_filename = os.path.basename(pkg.changes_file[:-8]) + ".reason"
- reject_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
- # If we fail here someone is probably trying to exploit the race
- # so let's just raise an exception ...
- if os.path.exists(reject_filename):
- os.unlink(reject_filename)
- fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- os.write(fd, reject_mail_message)
- os.close(fd)
-
- utils.send_mail(reject_mail_message)
- Logger.log(["unaccepted", pkg.changes_file])
+ added_ids[u]=1
+
+ su = SrcUploader()
+ su.maintainer_id = up
+ su.source_id = source_id
+ session.add(su)
+
+ session.flush()
+
+ return dsc_component, dsc_location_id
+
+def add_deb_to_db(u, filename, session):
+ """
+ Contrary to what you might expect, this routine deals with both
+ debs and udebs. That info is in 'dbtype', whilst 'type' is
+ 'deb' for both of them
+ """
+ cnf = Config()
+ entry = u.pkg.files[filename]
+
+ bin = DBBinary()
+ bin.package = entry["package"]
+ bin.version = entry["version"]
+ bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
+ bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
+ bin.arch_id = get_architecture(entry["architecture"], session).arch_id
+ bin.binarytype = entry["dbtype"]
+
+ # Find poolfile id
+ filename = entry["pool name"] + filename
+ if not entry.get("location id", None):
+ entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
+
+ if not entry.get("files id", None):
+ poolfile = add_poolfile(filename, entry, entry["location id"], session)
+ entry["files id"] = poolfile.file_id
+
+ bin.poolfile_id = entry["files id"]
+
+ # Find source id
+ bin_sources = get_sources_from_name(entry["source package"], entry["source version"])
+ if len(bin_sources) != 1:
+ raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
+ (bin.package, bin.version, bin.architecture.arch_string,
+ filename, bin.binarytype, u.pkg.changes["fingerprint"])
+
+ bin.source_id = bin_sources[0].source_id
+
+ # Add and flush object so it has an ID
+ session.add(bin)
+ session.flush()
+
+ # Add BinAssociations
+ for suite_name in u.pkg.changes["distribution"].keys():
+ ba = BinAssociation()
+ ba.binary_id = bin.binary_id
+ ba.suite_id = get_suite(suite_name).suite_id
+ session.add(sa)
+
+ session.flush()
+
+ # Deal with contents
+ contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, filename, reject=None)
+ if not contents:
+ print "REJECT\n" + "\n".join(contents.rejects)
+ session.rollback()
+ raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
-###############################################################################
-def install ():
- global install_count, install_bytes
+def install(u, log_urgency=True):
+ cnf = Config()
+ summarystats = SummaryStats()
print "Installing."
Logger.log(["installing changes",pkg.changes_file])
# Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
- projectB.query("BEGIN WORK")
+ session = DBConn().session()
# Ensure that we have all the hashes we need below.
- rejmsg = utils.ensure_hashes(changes, dsc, files, dsc_files)
- if len(rejmsg) > 0:
+ u.ensure_hashes()
+ if len(u.rejects) > 0:
# There were errors. Print them and SKIP the changes.
- for msg in rejmsg:
+ for msg in u.rejects:
utils.warn(msg)
return
- # Add the .dsc file to the DB
- for newfile in files.keys():
- if files[newfile]["type"] == "dsc":
- package = dsc["source"]
- version = dsc["version"] # NB: not files[file]["version"], that has no epoch
- maintainer = dsc["maintainer"]
- maintainer = maintainer.replace("'", "\\'")
- maintainer_id = database.get_or_set_maintainer_id(maintainer)
- changedby = changes["changed-by"]
- changedby = changedby.replace("'", "\\'")
- changedby_id = database.get_or_set_maintainer_id(changedby)
- fingerprint_id = database.get_or_set_fingerprint_id(dsc["fingerprint"])
- install_date = time.strftime("%Y-%m-%d")
- filename = files[newfile]["pool name"] + newfile
- dsc_component = files[newfile]["component"]
- dsc_location_id = files[newfile]["location id"]
- if dsc.has_key("dm-upload-allowed") and dsc["dm-upload-allowed"] == "yes":
- dm_upload_allowed = "true"
- else:
- dm_upload_allowed = "false"
- if not files[newfile].has_key("files id") or not files[newfile]["files id"]:
- files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], dsc_location_id)
- projectB.query("INSERT INTO source (source, version, maintainer, changedby, file, install_date, sig_fpr, dm_upload_allowed) VALUES ('%s', '%s', %d, %d, %d, '%s', %s, %s)"
- % (package, version, maintainer_id, changedby_id, files[newfile]["files id"], install_date, fingerprint_id, dm_upload_allowed))
-
- for suite in changes["distribution"].keys():
- suite_id = database.get_suite_id(suite)
- projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id))
-
- # Add the source files to the DB (files and dsc_files)
- projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[newfile]["files id"]))
- for dsc_file in dsc_files.keys():
- filename = files[newfile]["pool name"] + dsc_file
- # If the .orig.tar.gz is already in the pool, it's
- # files id is stored in dsc_files by check_dsc().
- files_id = dsc_files[dsc_file].get("files id", None)
- if files_id == None:
- files_id = database.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id)
- # FIXME: needs to check for -1/-2 and or handle exception
- if files_id == None:
- files_id = database.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], files[dsc_file]["sha1sum"], files[dsc_file]["sha256sum"], dsc_location_id)
- projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id))
-
- # Add the src_uploaders to the DB
- uploader_ids = [maintainer_id]
- if dsc.has_key("uploaders"):
- for u in dsc["uploaders"].split(","):
- u = u.replace("'", "\\'")
- u = u.strip()
- uploader_ids.append(
- database.get_or_set_maintainer_id(u))
- added_ids = {}
- for u in uploader_ids:
- if added_ids.has_key(u):
- utils.warn("Already saw uploader %s for source %s" % (u, package))
- continue
- added_ids[u]=1
- projectB.query("INSERT INTO src_uploaders (source, maintainer) VALUES (currval('source_id_seq'), %d)" % (u))
-
-
- # Add the .deb files to the DB
- for newfile in files.keys():
- if files[newfile]["type"] == "deb":
- package = files[newfile]["package"]
- version = files[newfile]["version"]
- maintainer = files[newfile]["maintainer"]
- maintainer = maintainer.replace("'", "\\'")
- maintainer_id = database.get_or_set_maintainer_id(maintainer)
- fingerprint_id = database.get_or_set_fingerprint_id(changes["fingerprint"])
- architecture = files[newfile]["architecture"]
- architecture_id = database.get_architecture_id (architecture)
- filetype = files[newfile]["dbtype"]
- source = files[newfile]["source package"]
- source_version = files[newfile]["source version"]
- filename = files[newfile]["pool name"] + newfile
- if not files[newfile].has_key("location id") or not files[newfile]["location id"]:
- files[newfile]["location id"] = database.get_location_id(Cnf["Dir::Pool"],files[newfile]["component"],utils.where_am_i())
- if not files[newfile].has_key("files id") or not files[newfile]["files id"]:
- files[newfile]["files id"] = database.set_files_id (filename, files[newfile]["size"], files[newfile]["md5sum"], files[newfile]["sha1sum"], files[newfile]["sha256sum"], files[newfile]["location id"])
- source_id = database.get_source_id (source, source_version)
- if source_id:
- projectB.query("INSERT INTO binaries (package, version, maintainer, source, architecture, file, type, sig_fpr) VALUES ('%s', '%s', %d, %d, %d, %d, '%s', %d)"
- % (package, version, maintainer_id, source_id, architecture_id, files[newfile]["files id"], filetype, fingerprint_id))
- else:
- raise NoSourceFieldError, "Unable to find a source id for %s (%s), %s, file %s, type %s, signed by %s" % (package, version, architecture, newfile, filetype, changes["fingerprint"])
- for suite in changes["distribution"].keys():
- suite_id = database.get_suite_id(suite)
- projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id))
-
- if not copy_temporary_contents(package, version, architecture, newfile, reject):
- print "REJECT\n" + reject_message,
- projectB.query("ROLLBACK")
- raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (package, newfile )
+ # Add the .dsc file to the DB first
+ for newfile in u.pkg.files.keys():
+ if entry["type"] == "dsc":
+ dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
-
- orig_tar_id = Upload.pkg.orig_tar_id
- orig_tar_location = Upload.pkg.orig_tar_location
+ # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
+ for newfile in u.pkg.files.keys():
+ if entry["type"] == "deb":
+ add_deb_to_db(u, newfile, session)
# If this is a sourceful diff only upload that is moving
# cross-component we need to copy the .orig.tar.gz into the new
# component too for the same reasons as above.
#
- if changes["architecture"].has_key("source") and orig_tar_id and \
- orig_tar_location != dsc_location_id:
- q = projectB.query("SELECT l.path, f.filename, f.size, f.md5sum, f.sha1sum, f.sha256sum FROM files f, location l WHERE f.id = %s AND f.location = l.id" % (orig_tar_id))
- ql = q.getresult()[0]
- old_filename = ql[0] + ql[1]
- file_size = ql[2]
- file_md5sum = ql[3]
- file_sha1sum = ql[4]
- file_sha256sum = ql[5]
- new_filename = utils.poolify(changes["source"], dsc_component) + os.path.basename(old_filename)
- new_files_id = database.get_files_id(new_filename, file_size, file_md5sum, dsc_location_id)
- if new_files_id == None:
- utils.copy(old_filename, Cnf["Dir::Pool"] + new_filename)
- new_files_id = database.set_files_id(new_filename, file_size, file_md5sum, file_sha1sum, file_sha256sum, dsc_location_id)
- projectB.query("UPDATE dsc_files SET file = %s WHERE source = %s AND file = %s" % (new_files_id, database.get_source_id(changes["source"], changes["version"]), orig_tar_id))
+ if u.pkg.changes["architecture"].has_key("source") and u.pkg.orig_tar_id and \
+ u.pkg.orig_tar_location != dsc_location_id:
+
+ oldf = get_poolfile_by_id(u.pkg.orig_tar_id, session)
+ old_filename = os.path.join(oldf.location.path, oldf.filename)
+ old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
+ 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
+
+ new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
+
+ # TODO: Care about size/md5sum collisions etc
+ (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
+
+ if newf is None:
+ utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
+ newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
+
+ # TODO: Check that there's only 1 here
+ source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
+ dscf = get_dscfiles(source_id = source.source_id, poolfile_id=u.pkg.orig_tar_id, session=session)[0]
+ dscf.poolfile_id = newf.file_id
+ session.add(dscf)
+ session.flush()
# Install the files into the pool
- for newfile in files.keys():
- destination = Cnf["Dir::Pool"] + files[newfile]["pool name"] + newfile
+ for newfile, entry in u.pkg.files.items():
+ destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
utils.move(newfile, destination)
- Logger.log(["installed", newfile, files[newfile]["type"], files[newfile]["size"], files[newfile]["architecture"]])
- install_bytes += float(files[newfile]["size"])
+ Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
+ summarystats.accept_bytes += float(entry["size"])
# Copy the .changes file across for suite which need it.
copy_changes = {}
copy_dot_dak = {}
- for suite in changes["distribution"].keys():
- if Cnf.has_key("Suite::%s::CopyChanges" % (suite)):
- copy_changes[Cnf["Suite::%s::CopyChanges" % (suite)]] = ""
+ for suite_name in changes["distribution"].keys():
+ if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
+ copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
# and the .dak file...
- if Cnf.has_key("Suite::%s::CopyDotDak" % (suite)):
- copy_dot_dak[Cnf["Suite::%s::CopyDotDak" % (suite)]] = ""
+ if cnf.has_key("Suite::%s::CopyDotDak" % (suite_name)):
+ copy_dot_dak[cnf["Suite::%s::CopyDotDak" % (suite_name)]] = ""
+
for dest in copy_changes.keys():
- utils.copy(pkg.changes_file, Cnf["Dir::Root"] + dest)
+ utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
+
for dest in copy_dot_dak.keys():
- utils.copy(Upload.pkg.changes_file[:-8]+".dak", dest)
- projectB.query("COMMIT WORK")
+ utils.copy(u.pkg.changes_file[:-8]+".dak", dest)
+
+ # We're done - commit the database changes
+ session.commit()
# Move the .changes into the 'done' directory
- utils.move (pkg.changes_file,
- os.path.join(Cnf["Dir::Queue::Done"], os.path.basename(pkg.changes_file)))
+ utils.move(u.pkg.changes_file,
+ os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
# Remove the .dak file
- os.unlink(Upload.pkg.changes_file[:-8]+".dak")
+ os.unlink(u.pkg.changes_file[:-8] + ".dak")
+
+ if u.pkg.changes["architecture"].has_key("source") and log_urgency:
+ UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
- if changes["architecture"].has_key("source") and Urgency_Logger:
- Urgency_Logger.log(dsc["source"], dsc["version"], changes["urgency"])
+ # Our SQL session will automatically start a new transaction after
+ # the last commit
# Undo the work done in queue.py(accept) to help auto-building
# from accepted.
- projectB.query("BEGIN WORK")
- for suite in changes["distribution"].keys():
- if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
+ now_date = datetime.now()
+
+ for suite_name in u.pkg.changes["distribution"].keys():
+ if suite_name not in cnf.ValueList("Dinstall::QueueBuildSuites"):
continue
- now_date = time.strftime("%Y-%m-%d %H:%M")
- suite_id = database.get_suite_id(suite)
- dest_dir = Cnf["Dir::QueueBuild"]
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- dest_dir = os.path.join(dest_dir, suite)
- for newfile in files.keys():
+
+ suite = get_suite(suite_name, session)
+ dest_dir = cnf["Dir::QueueBuild"]
+
+ if cnf.FindB("Dinstall::SecurityQueueBuild"):
+ dest_dir = os.path.join(dest_dir, suite_name)
+
+ for newfile, entry in u.pkg.files.items():
dest = os.path.join(dest_dir, newfile)
+
+ qb = get_queue_build(dest, suite.suite_id, session)
+
# Remove it from the list of packages for later processing by apt-ftparchive
- projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, dest, suite_id))
- if not Cnf.FindB("Dinstall::SecurityQueueBuild"):
+ if qb:
+ qb.last_used = now_date
+ qb.in_queue = False
+ session.add(qb)
+
+ if not cnf.FindB("Dinstall::SecurityQueueBuild"):
# Update the symlink to point to the new location in the pool
- pool_location = utils.poolify (changes["source"], files[newfile]["component"])
- src = os.path.join(Cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
+ pool_location = utils.poolify(u.pkg.changes["source"], entry["component"])
+ src = os.path.join(cnf["Dir::Pool"], pool_location, os.path.basename(newfile))
if os.path.islink(dest):
os.unlink(dest)
os.symlink(src, dest)
+
# Update last_used on any non-upload .orig.tar.gz symlink
- if orig_tar_id:
+ if u.pkg.orig_tar_id:
# Determine the .orig.tar.gz file name
- for dsc_file in dsc_files.keys():
+ for dsc_file in u.pkg.dsc_files.keys():
if dsc_file.endswith(".orig.tar.gz"):
- orig_tar_gz = os.path.join(dest_dir, dsc_file)
+ u.pkg.orig_tar_gz = os.path.join(dest_dir, dsc_file)
+
# Remove it from the list of packages for later processing by apt-ftparchive
- projectB.query("UPDATE queue_build SET in_queue = 'f', last_used = '%s' WHERE filename = '%s' AND suite = %s" % (now_date, orig_tar_gz, suite_id))
- projectB.query("COMMIT WORK")
+ qb = get_queue_build(u.pkg.orig_tar_gz, suite.suite_id, session)
+ if qb:
+ qb.in_queue = False
+ qb.last_used = now_date
+ session.add(qb)
+
+ session.commit()
# Finally...
- install_count += 1
+ summarystats.accept_count += 1
################################################################################
+### XXX: UP TO HERE
+
+def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates"):
+ summarystats = SummaryStats()
-def stable_install (summary, short_summary, fromsuite="proposed-updates"):
- global install_count
+ fromsuite_name = fromsuite_name.lower()
+ tosuite_name = "Stable"
+ if fromsuite_name == "oldstable-proposed-updates":
+ tosuite_name = "OldStable"
- fromsuite = fromsuite.lower()
- tosuite = "Stable"
- if fromsuite == "oldstable-proposed-updates":
- tosuite = "OldStable"
+ print "Installing from %s to %s." % (fromsuite_name, tosuite_name)
- print "Installing from %s to %s." % (fromsuite, tosuite)
+ fromsuite = get_suite(fromsuite_name)
+ tosuite = get_suite(tosuite_name)
# Begin a transaction; if we bomb out anywhere between here and
# the COMMIT WORK below, the DB won't be changed.
- projectB.query("BEGIN WORK")
+ session = DBConn().session()
# Add the source to stable (and remove it from proposed-updates)
- for newfile in files.keys():
- if files[newfile]["type"] == "dsc":
- package = dsc["source"]
- version = dsc["version"]; # NB: not files[file]["version"], that has no epoch
- q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
- ql = q.getresult()
- if not ql:
+ for newfile, entry in u.pkg.files.items():
+ if entry["type"] == "dsc":
+ package = u.pkg.dsc["source"]
+ # NB: not files[file]["version"], that has no epoch
+ version = u.pkg.dsc["version"]
+
+ source = get_sources_from_name(package, version, session)
+ if len(source) < 1:
utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
- source_id = ql[0][0]
- suite_id = database.get_suite_id(fromsuite)
- projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id))
- suite_id = database.get_suite_id(tosuite.lower())
- projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id))
+ source = source[0]
+
+ # Remove from old suite
+ old = session.query(SrcAssociation).filter_by(source_id = source.source_id)
+ old = old.filter_by(suite_id = fromsuite.suite_id)
+ old.delete()
+
+ # Add to new suite
+ new = SrcAssociation()
+ new.source_id = source.source_id
+ new.suite_id = tosuite.suite_id
+ session.add(new)
# Add the binaries to stable (and remove it/them from proposed-updates)
- for newfile in files.keys():
- if files[newfile]["type"] == "deb":
- package = files[newfile]["package"]
- version = files[newfile]["version"]
- architecture = files[newfile]["architecture"]
- q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture))
- ql = q.getresult()
- if not ql:
+ for newfile, entry in u.pkg.files.items():
+ if entry["type"] == "deb":
+ package = entry["package"]
+ version = entry["version"]
+ architecture = entry["architecture"]
+
+ binary = get_binaries_from_name(package, version, [architecture, 'all'])
+
+ if len(binary) < 1:
utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
+ binary = binary[0]
- binary_id = ql[0][0]
- suite_id = database.get_suite_id(fromsuite)
- projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
- suite_id = database.get_suite_id(tosuite.lower())
- projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id))
+ # Remove from old suite
+ old = session.query(BinAssociation).filter_by(binary_id = binary.binary_id)
+ old = old.filter_by(suite_id = fromsuite.suite_id)
+ old.delete()
- projectB.query("COMMIT WORK")
+ # Add to new suite
+ new = BinAssociation()
+ new.binary_id = binary.binary_id
+ new.suite_id = tosuite.suite_id
+ session.add(new)
- utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file))
+ session.commit()
+
+ utils.move(u.pkg.changes_file,
+ os.path.join(cnf["Dir::Morgue"], 'process-accepted', os.path.basename(u.pkg.changes_file)))
## Update the Stable ChangeLog file
- new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + ".ChangeLog"
- changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + "ChangeLog"
+ # TODO: URGH - Use a proper tmp file
+ new_changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + ".ChangeLog"
+ changelog_filename = cnf["Dir::Root"] + cnf["Suite::%s::ChangeLogBase" % (tosuite.suite_name)] + "ChangeLog"
if os.path.exists(new_changelog_filename):
- os.unlink (new_changelog_filename)
+ os.unlink(new_changelog_filename)
new_changelog = utils.open_file(new_changelog_filename, 'w')
- for newfile in files.keys():
- if files[newfile]["type"] == "deb":
- new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.lower(), files[newfile]["component"], files[newfile]["architecture"], newfile))
+ for newfile, entry in u.pkg.files.items():
+ if entry["type"] == "deb":
+ new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.suite_name,
+ entry["component"],
+ entry["architecture"],
+ newfile))
elif re_issource.match(newfile):
- new_changelog.write("%s/%s/source/%s\n" % (tosuite.lower(), files[newfile]["component"], newfile))
+ new_changelog.write("%s/%s/source/%s\n" % (tosuite.suite_name,
+ entry["component"],
+ newfile))
else:
new_changelog.write("%s\n" % (newfile))
- chop_changes = re_fdnic.sub("\n", changes["changes"])
+
+ chop_changes = re_fdnic.sub("\n", u.pkg.changes["changes"])
new_changelog.write(chop_changes + '\n\n')
+
if os.access(changelog_filename, os.R_OK) != 0:
changelog = utils.open_file(changelog_filename)
new_changelog.write(changelog.read())
+
new_changelog.close()
+
if os.access(changelog_filename, os.R_OK) != 0:
os.unlink(changelog_filename)
utils.move(new_changelog_filename, changelog_filename)
- install_count += 1
+ summarystats.accept_count += 1
- if not Options["No-Mail"] and changes["architecture"].has_key("source"):
- Subst["__SUITE__"] = " into %s" % (tosuite)
- Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.install")
+ if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
+ u.Subst["__SUITE__"] = " into %s" % (tosuite)
+ u.Subst["__SUMMARY__"] = summary
+ u.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
+
+ if cnf.has_key("Dinstall::Bcc"):
+ u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
+
+ template = os.path.join(cnf["Dir::Templates"], 'process-accepted.install')
+
+ mail_message = utils.TemplateSubst(u.Subst, template)
utils.send_mail(mail_message)
- Upload.announce(short_summary, 1)
+ u.announce(short_summary, True)
# Finally remove the .dak file
- dot_dak_file = os.path.join(Cnf["Suite::%s::CopyDotDak" % (fromsuite)], os.path.basename(Upload.pkg.changes_file[:-8]+".dak"))
+ dot_dak_file = os.path.join(cnf["Suite::%s::CopyDotDak" % (fromsuite.suite_name)],
+ os.path.basename(u.pkg.changes_file[:-8]+".dak"))
os.unlink(dot_dak_file)
################################################################################
-def process_it (changes_file, queue=""):
- global reject_message
+def process_it(changes_file, stable_queue=None, log_urgency=True):
+ cnf = Config()
+ u = Upload()
- reject_message = ""
+ overwrite_checks = True
# Absolutize the filename to avoid the requirement of being in the
# same directory as the .changes file.
- pkg.changes_file = os.path.abspath(changes_file)
+ cfile = os.path.abspath(changes_file)
# And since handling of installs to stable munges with the CWD
# save and restore it.
- pkg.directory = os.getcwd()
+ u.prevdir = os.getcwd()
- if installing_to_stable:
- old = Upload.pkg.changes_file
- Upload.pkg.changes_file = os.path.basename(old)
- os.chdir(Cnf["Suite::%s::CopyDotDak" % (queue)])
+ if stable_queue:
+ old = cfile
+ cfile = os.path.basename(old)
+ os.chdir(cnf["Suite::%s::CopyDotDak" % (stable_queue)])
+ # overwrite_checks should not be performed if installing to stable
+ overwrite_checks = False
- Upload.init_vars()
- Upload.update_vars()
- Upload.update_subst()
+ u.load_dot_dak(cfile)
+ u.update_subst()
- if installing_to_stable:
- Upload.pkg.changes_file = old
+ if stable_queue:
+ u.pkg.changes_file = old
- check()
- action(queue)
+ u.accepted_checks(overwrite_checks)
+ action(u, stable_queue, log_urgency)
# Restore CWD
- os.chdir(pkg.directory)
+ os.chdir(u.prevdir)
###############################################################################
def main():
- global projectB, Logger, Urgency_Logger, installing_to_stable
+ global Logger
+ cnf = Config()
+ summarystats = SummaryStats()
changes_files = init()
+ log_urgency = False
+ stable_queue = None
# -n/--dry-run invalidates some other options which would involve things happening
if Options["No-Action"]:
# Check that we aren't going to clash with the daily cron job
- if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (Cnf["Dir::Root"])) and not Options["No-Lock"]:
+ if not Options["No-Action"] and os.path.exists("%s/Archive_Maintenance_In_Progress" % (cnf["Dir::Root"])) and not Options["No-Lock"]:
utils.fubar("Archive maintenance in progress. Try again later.")
# If running from within proposed-updates; assume an install to stable
queue = ""
if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
- queue = "Oldstable-Proposed-Updates"
- installing_to_stable = 1
+ stable_queue = "Oldstable-Proposed-Updates"
elif os.getenv('PWD').find('proposed-updates') != -1:
- queue = "Proposed-Updates"
- installing_to_stable = 1
+ stable_queue = "Proposed-Updates"
# Obtain lock if not in no-action mode and initialize the log
if not Options["No-Action"]:
- lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
+ lock_fd = os.open(cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
try:
fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError, e:
utils.fubar("Couldn't obtain lock; assuming another 'dak process-accepted' is already running.")
else:
raise
- Logger = Upload.Logger = daklog.Logger(Cnf, "process-accepted")
- if not installing_to_stable and Cnf.get("Dir::UrgencyLog"):
- Urgency_Logger = Urgency_Log(Cnf)
-
- # Initialize the substitution template mapping global
- bcc = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
- if Cnf.has_key("Dinstall::Bcc"):
- Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
- else:
- Subst["__BCC__"] = bcc
+ Logger = daklog.Logger(cnf, "process-accepted")
+ if not stable_queue and cnf.get("Dir::UrgencyLog"):
+ # Initialise UrgencyLog()
+ log_urgency = True
+ UrgencyLog()
# Sort the .changes files so that we process sourceful ones first
changes_files.sort(utils.changes_compare)
# Process the changes files
for changes_file in changes_files:
print "\n" + changes_file
- process_it (changes_file, queue)
+ process_it(changes_file, stable_queue, log_urgency)
- if install_count:
+ if summarystats.accept_count:
sets = "set"
- if install_count > 1:
+ if summarystats.accept_count > 1:
sets = "sets"
- sys.stderr.write("Installed %d package %s, %s.\n" % (install_count, sets, utils.size_type(int(install_bytes))))
- Logger.log(["total",install_count,install_bytes])
+ sys.stderr.write("Installed %d package %s, %s.\n" % (summarystats.accept_count, sets,
+ utils.size_type(int(summarystats.accept_bytes))))
+ Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes])
if not Options["No-Action"]:
Logger.close()
- if Urgency_Logger:
- Urgency_Logger.close()
+ if log_urg:
+ UrgencyLog().close()
###############################################################################