]> git.decadent.org.uk Git - dak.git/blobdiff - dak/process_accepted.py
before I rip out pending_*
[dak.git] / dak / process_accepted.py
index 317fd9fb08e9a54e03da7d92ca76c42c71769c93..7b78f08c73f26b2327181b258738e18beba8ad4a 100755 (executable)
@@ -40,18 +40,17 @@ import fcntl
 import os
 import sys
 from datetime import datetime
-import re
-import apt_pkg, commands
+import apt_pkg
 
 from daklib import daklog
-from daklib import queue
+from daklib.queue import *
 from daklib import utils
 from daklib.dbconn import *
-from daklib.binary import copy_temporary_contents
 from daklib.dak_exceptions import *
 from daklib.regexes import re_default_answer, re_issource, re_fdnic
 from daklib.urgencylog import UrgencyLog
 from daklib.summarystats import SummaryStats
+from daklib.config import Config
 
 ###############################################################################
 
@@ -79,7 +78,7 @@ def init():
         if not cnf.has_key("Dinstall::Options::%s" % (i)):
             cnf["Dinstall::Options::%s" % (i)] = ""
 
-    changes_files = apt_pkg.ParseCommandLine(cnf, Arguments, sys.argv)
+    changes_files = apt_pkg.ParseCommandLine(cnf.Cnf, Arguments, sys.argv)
     Options = cnf.SubTree("Dinstall::Options")
 
     if Options["Help"]:
@@ -110,7 +109,7 @@ def usage (exit_code=0):
 
 ###############################################################################
 
-def action (u, stable_queue=None, log_urgency=True):
+def action (u, stable_queue=None, log_urgency=True, session=None):
     (summary, short_summary) = u.build_summaries()
     pi = u.package_info()
 
@@ -144,7 +143,7 @@ def action (u, stable_queue=None, log_urgency=True):
         if stable_queue:
             stable_install(u, summary, short_summary, stable_queue, log_urgency)
         else:
-            install(u, log_urgency)
+            install(u, session, log_urgency)
     elif answer == 'Q':
         sys.exit(0)
 
@@ -172,8 +171,8 @@ def add_dsc_to_db(u, filename, session):
     source.source = u.pkg.dsc["source"]
     source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
     source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
-    source.changedby_id = get_or_set_maintainer(u.pkg.dsc["changed-by"], session).maintainer_id
-    source.fingerprint_id = get_or_set_fingerprint(u.pkg.dsc["fingerprint"], session).fingerprint_id
+    source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
+    source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
     source.install_date = datetime.now().date()
 
     dsc_component = entry["component"]
@@ -205,16 +204,24 @@ def add_dsc_to_db(u, filename, session):
     dscfile.poolfile_id = entry["files id"]
     session.add(dscfile)
 
-    for dsc_file, dentry in u.pkg.dsc_files.keys():
+    for dsc_file, dentry in u.pkg.dsc_files.items():
         df = DSCFile()
         df.source_id = source.source_id
 
-        # If the .orig.tar.gz is already in the pool, it's
+        # If the .orig tarball is already in the pool, it's
         # files id is stored in dsc_files by check_dsc().
         files_id = dentry.get("files id", None)
 
+        # Find the entry in the files hash
+        # TODO: Bail out here properly
+        dfentry = None
+        for f, e in u.pkg.files.items():
+            if f == dsc_file:
+                dfentry = e
+                break
+
         if files_id is None:
-            filename = dentry["pool name"] + dsc_file
+            filename = dfentry["pool name"] + dsc_file
 
             (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
             # FIXME: needs to check for -1/-2 and or handle exception
@@ -223,6 +230,9 @@ def add_dsc_to_db(u, filename, session):
 
             # If still not found, add it
             if files_id is None:
+                # HACK: Force sha1sum etc into dentry
+                dentry["sha1sum"] = dfentry["sha1sum"]
+                dentry["sha256sum"] = dfentry["sha256sum"]
                 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
                 files_id = poolfile.file_id
 
@@ -232,7 +242,7 @@ def add_dsc_to_db(u, filename, session):
     session.flush()
 
     # Add the src_uploaders to the DB
-    uploader_ids = [maintainer_id]
+    uploader_ids = [source.maintainer_id]
     if u.pkg.dsc.has_key("uploaders"):
         for up in u.pkg.dsc["uploaders"].split(","):
             up = up.strip()
@@ -248,7 +258,7 @@ def add_dsc_to_db(u, filename, session):
 
         su = SrcUploader()
         su.maintainer_id = up
-        su.source_id = source_id
+        su.source_id = source.source_id
         session.add(su)
 
     session.flush()
@@ -274,6 +284,7 @@ def add_deb_to_db(u, filename, session):
 
     # Find poolfile id
     filename = entry["pool name"] + filename
+    fullpath = os.path.join(cnf["Dir::Pool"], filename)
     if not entry.get("location id", None):
         entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], utils.where_am_i(), session).location_id
 
@@ -284,7 +295,7 @@ def add_deb_to_db(u, filename, session):
     bin.poolfile_id = entry["files id"]
 
     # Find source id
-    bin_sources = get_sources_from_name(entry["source package"], entry["source version"])
+    bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
     if len(bin_sources) != 1:
         raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
                                   (bin.package, bin.version, bin.architecture.arch_string,
@@ -300,29 +311,33 @@ def add_deb_to_db(u, filename, session):
     for suite_name in u.pkg.changes["distribution"].keys():
         ba = BinAssociation()
         ba.binary_id = bin.binary_id
-        ba.suite_id = get_suite(suite_name).suite_id
-        session.add(sa)
+        suite = get_suite(suite_name)
+        ba.suite_id = suite.suite_id
+
+        component_id = bin.poolfile.location.component_id;
+        component_id = bin.poolfile.location.component_id;
+
+        contents = copy_temporary_contents(bin os.path.basename(filename), None, session)
+        if not contents:
+            print "REJECT\nCould not determine contents of package %s" % bin.package
+            session.rollback()
+            raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
+
+                                                     
+        session.add(ba)
+
 
     session.flush()
 
-    # Deal with contents
-    contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, filename, reject=None)
-    if not contents:
-        print "REJECT\n" + "\n".join(contents.rejects)
-        session.rollback()
-        raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
 
 
-def install(u, log_urgency=True):
+def install(u, session, log_urgency=True):
     cnf = Config()
     summarystats = SummaryStats()
 
     print "Installing."
 
-    Logger.log(["installing changes",pkg.changes_file])
-
-    # Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
-    session = DBConn().session()
+    Logger.log(["installing changes", u.pkg.changes_file])
 
     # Ensure that we have all the hashes we need below.
     u.ensure_hashes()
@@ -333,42 +348,47 @@ def install(u, log_urgency=True):
         return
 
     # Add the .dsc file to the DB first
-    for newfile in u.pkg.files.keys():
+    for newfile, entry in u.pkg.files.items():
         if entry["type"] == "dsc":
             dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
 
     # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
-    for newfile in u.pkg.files.keys():
+    for newfile, entry in u.pkg.files.items():
         if entry["type"] == "deb":
             add_deb_to_db(u, newfile, session)
 
     # If this is a sourceful diff only upload that is moving
-    # cross-component we need to copy the .orig.tar.gz into the new
+    # cross-component we need to copy the .orig files into the new
     # component too for the same reasons as above.
-    #
-    if u.pkg.changes["architecture"].has_key("source") and u.pkg.orig_tar_id and \
-       u.pkg.orig_tar_location != dsc_location_id:
-
-        oldf = get_poolfile_by_id(u.pkg.orig_tar_id, session)
-        old_filename = os.path.join(oldf.location.path, oldf.filename)
-        old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
-                   'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
-
-        new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
-
-        # TODO: Care about size/md5sum collisions etc
-        (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
-
-        if newf is None:
-            utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
-            newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
-
-            # TODO: Check that there's only 1 here
-            source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
-            dscf = get_dscfiles(source_id = source.source_id, poolfile_id=u.pkg.orig_tar_id, session=session)[0]
-            dscf.poolfile_id = newf.file_id
-            session.add(dscf)
-            session.flush()
+    if u.pkg.changes["architecture"].has_key("source"):
+        for orig_file in u.pkg.orig_files.keys():
+            if not u.pkg.orig_files[orig_file].has_key("id"):
+                continue # Skip if it's not in the pool
+            orig_file_id = u.pkg.orig_files[orig_file]["id"]
+            if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
+                continue # Skip if the location didn't change
+
+            # Do the move
+            oldf = get_poolfile_by_id(orig_file_id, session)
+            old_filename = os.path.join(oldf.location.path, oldf.filename)
+            old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
+                       'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
+
+            new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
+
+            # TODO: Care about size/md5sum collisions etc
+            (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
+
+            if newf is None:
+                utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
+                newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
+
+                # TODO: Check that there's only 1 here
+                source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
+                dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
+                dscf.poolfile_id = newf.file_id
+                session.add(dscf)
+                session.flush()
 
     # Install the files into the pool
     for newfile, entry in u.pkg.files.items():
@@ -380,7 +400,7 @@ def install(u, log_urgency=True):
     # Copy the .changes file across for suite which need it.
     copy_changes = {}
     copy_dot_dak = {}
-    for suite_name in changes["distribution"].keys():
+    for suite_name in u.pkg.changes["distribution"].keys():
         if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
             copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
         # and the .dak file...
@@ -442,15 +462,17 @@ def install(u, log_urgency=True):
                     os.unlink(dest)
                 os.symlink(src, dest)
 
-        # Update last_used on any non-upload .orig.tar.gz symlink
-        if u.pkg.orig_tar_id:
+        # Update last_used on any non-uploaded .orig symlink
+        for orig_file in u.pkg.orig_files.keys():
             # Determine the .orig.tar.gz file name
-            for dsc_file in u.pkg.dsc_files.keys():
-                if dsc_file.endswith(".orig.tar.gz"):
-                    u.pkg.orig_tar_gz = os.path.join(dest_dir, dsc_file)
+            if not u.pkg.orig_files[orig_file].has_key("id"):
+                continue # Skip files not in the pool
+            # XXX: do we really want to update the orig_files dict here
+            # instead of using a temporary variable?
+            u.pkg.orig_files[orig_file]["path"] = os.path.join(dest_dir, orig_file)
 
             # Remove it from the list of packages for later processing by apt-ftparchive
-            qb = get_queue_build(u.pkg.orig_tar_gz, suite.suite_id, session)
+            qb = get_queue_build(u.pkg.orig_files[orig_file]["path"], suite.suite_id, session)
             if qb:
                 qb.in_queue = False
                 qb.last_used = now_date
@@ -462,9 +484,8 @@ def install(u, log_urgency=True):
     summarystats.accept_count += 1
 
 ################################################################################
-### XXX: UP TO HERE
 
-def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates"):
+def stable_install(u, session, summary, short_summary, fromsuite_name="proposed-updates"):
     summarystats = SummaryStats()
 
     fromsuite_name = fromsuite_name.lower()
@@ -477,10 +498,6 @@ def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates")
     fromsuite = get_suite(fromsuite_name)
     tosuite = get_suite(tosuite_name)
 
-    # Begin a transaction; if we bomb out anywhere between here and
-    # the COMMIT WORK below, the DB won't be changed.
-    session = DBConn().session()
-
     # Add the source to stable (and remove it from proposed-updates)
     for newfile, entry in u.pkg.files.items():
         if entry["type"] == "dsc":
@@ -572,7 +589,7 @@ def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates")
     if not Options["No-Mail"] and u.pkg.changes["architecture"].has_key("source"):
         u.Subst["__SUITE__"] = " into %s" % (tosuite)
         u.Subst["__SUMMARY__"] = summary
-        u.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
+        u.Subst["__BCC__"] = "X-DAK: dak process-accepted"
 
         if cnf.has_key("Dinstall::Bcc"):
             u.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
@@ -590,7 +607,7 @@ def stable_install(u, summary, short_summary, fromsuite_name="proposed-updates")
 
 ################################################################################
 
-def process_it(changes_file, stable_queue=None, log_urgency=True):
+def process_it(changes_file, stable_queue, log_urgency, session):
     cnf = Config()
     u = Upload()
 
@@ -611,14 +628,14 @@ def process_it(changes_file, stable_queue=None, log_urgency=True):
         # overwrite_checks should not be performed if installing to stable
         overwrite_checks = False
 
-    u.load_dot_dak(cfile)
+    u.pkg.load_dot_dak(cfile)
     u.update_subst()
 
     if stable_queue:
         u.pkg.changes_file = old
 
-    u.accepted_checks(overwrite_checks)
-    action(u, stable_queue, log_urgency)
+    u.accepted_checks(overwrite_checks, session)
+    action(u, stable_queue, log_urgency, session)
 
     # Restore CWD
     os.chdir(u.prevdir)
@@ -669,10 +686,13 @@ def main():
     # Sort the .changes files so that we process sourceful ones first
     changes_files.sort(utils.changes_compare)
 
+
     # Process the changes files
     for changes_file in changes_files:
         print "\n" + changes_file
-        process_it(changes_file, stable_queue, log_urgency)
+        session = DBConn().session()
+        process_it(changes_file, stable_queue, log_urgency, session)
+        session.close()
 
     if summarystats.accept_count:
         sets = "set"
@@ -684,7 +704,7 @@ def main():
 
     if not Options["No-Action"]:
         Logger.close()
-        if log_urg:
+        if log_urgency:
             UrgencyLog().close()
 
 ###############################################################################