]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/changes.py
Add by-hash support
[dak.git] / daklib / changes.py
old mode 100755 (executable)
new mode 100644 (file)
index 2ecd9af..d6ccaa0
@@ -29,16 +29,14 @@ Changes class for dak
 
 import os
 import stat
-import time
 
 import datetime
 from cPickle import Unpickler, Pickler
 from errno import EPERM
 
-from apt_inst import debExtractControl
-from apt_pkg import ParseSection
+from apt_pkg import TagSection
 
-from utils import open_file, fubar, poolify
+from utils import open_file, fubar, poolify, deb_extract_control
 from config import *
 from dbconn import *
 
@@ -128,7 +126,7 @@ class Changes(object):
 
                 if entry["type"] == "deb":
                     deb_fh = open_file(name)
-                    summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
+                    summary += TagSection(deb_extract_control(deb_fh))["Description"] + '\n'
                     deb_fh.close()
 
             else:
@@ -178,189 +176,130 @@ class Changes(object):
 
         return summary
 
+    @session_wrapper
     def remove_known_changes(self, session=None):
-        if session is None:
-            session = DBConn().session()
-            privatetrans = True
+        session.delete(get_dbchange(self.changes_file, session))
 
-        session.delete(get_knownchange(self.changes_file, session))
+    def mark_missing_fields(self):
+        """add "missing" in fields which we will require for the known_changes table"""
+        for key in ['urgency', 'maintainer', 'fingerprint', 'changed-by' ]:
+            if (not self.changes.has_key(key)) or (not self.changes[key]):
+                self.changes[key]='missing'
 
-        if privatetrans:
-            session.commit()
-            session.close()
-            
-    def add_known_changes(self, queue, session=None):
+    def __get_file_from_pool(self, filename, entry, session, logger):
         cnf = Config()
 
-        if session is None:
-            session = DBConn().session()
-            privatetrans = True
-
-        dirpath = cnf["Dir::Queue::%s" % (queue) ]
-        changesfile = os.path.join(dirpath, self.changes_file)
-        filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
-
-        session.execute(
-            """INSERT INTO known_changes
-              (changesname, seen, source, binaries, architecture, version,
-              distribution, urgency, maintainer, fingerprint, changedby, date)
-              VALUES (:changesfile,:filetime,:source,:binary, :architecture,
-              :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""",
-              { 'changesfile':changesfile,
-                'filetime':filetime,
-                'source':self.changes["source"],
-                'binary':self.changes["binary"],
-                'architecture':self.changes["architecture"],
-                'version':self.changes["version"],
-                'distribution':self.changes["distribution"],
-                'urgency':self.changes["urgency"],
-                'maintainer':self.changes["maintainer"],
-                'fingerprint':self.changes["fingerprint"],
-                'changedby':self.changes["changed-by"],
-                'date':self.changes["date"]} )
-
-        if privatetrans:
-            session.commit()
-            session.close()
-
-    def load_dot_dak(self, changesfile):
-        """
-        Update ourself by reading a previously created cPickle .dak dumpfile.
-        """
-
-        self.changes_file = changesfile
-        dump_filename = self.changes_file[:-8]+".dak"
-        dump_file = open_file(dump_filename)
-
-        p = Unpickler(dump_file)
-
-        self.changes.update(p.load())
-        self.dsc.update(p.load())
-        self.files.update(p.load())
-        self.dsc_files.update(p.load())
-
-        next_obj = p.load()
-        if isinstance(next_obj, dict):
-            self.orig_files.update(next_obj)
+        if cnf.has_key("Dinstall::SuiteSuffix"):
+            component = cnf["Dinstall::SuiteSuffix"] + entry["component"]
         else:
-            # Auto-convert old dak files to new format supporting
-            # multiple tarballs
-            orig_tar_gz = None
-            for dsc_file in self.dsc_files.keys():
-                if dsc_file.endswith(".orig.tar.gz"):
-                    orig_tar_gz = dsc_file
-            self.orig_files[orig_tar_gz] = {}
-            if next_obj != None:
-                self.orig_files[orig_tar_gz]["id"] = next_obj
-            next_obj = p.load()
-            if next_obj != None and next_obj != "":
-                self.orig_files[orig_tar_gz]["location"] = next_obj
-            if len(self.orig_files[orig_tar_gz]) == 0:
-                del self.orig_files[orig_tar_gz]
-
-        dump_file.close()
-
-    def sanitised_files(self):
-        ret = {}
-        for name, entry in self.files.items():
-            ret[name] = {}
-            for i in CHANGESFIELDS_FILES:
-                if entry.has_key(i):
-                    ret[name][i] = entry[i]
-
-        return ret
-
-    def sanitised_changes(self):
-        ret = {}
-        # Mandatory changes fields
-        for i in CHANGESFIELDS_MANDATORY:
-            ret[i] = self.changes[i]
-
-        # Optional changes fields
-        for i in CHANGESFIELDS_OPTIONAL:
-            if self.changes.has_key(i):
-                ret[i] = self.changes[i]
-
-        return ret
-
-    def sanitised_dsc(self):
-        ret = {}
-        for i in CHANGESFIELDS_DSC:
-            if self.dsc.has_key(i):
-                ret[i] = self.dsc[i]
-
-        return ret
-
-    def sanitised_dsc_files(self):
-        ret = {}
-        for name, entry in self.dsc_files.items():
-            ret[name] = {}
-            # Mandatory dsc_files fields
-            for i in CHANGESFIELDS_DSCFILES_MANDATORY:
-                ret[name][i] = entry[i]
-
-            # Optional dsc_files fields
-            for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
-                if entry.has_key(i):
-                    ret[name][i] = entry[i]
-
-        return ret
-
-    def sanitised_orig_files(self):
-        ret = {}
-        for name, entry in self.orig_files.items():
-            ret[name] = {}
-            # Optional orig_files fields
-            for i in CHANGESFIELDS_ORIGFILES:
-                if entry.has_key(i):
-                    ret[name][i] = entry[i]
-
-        return ret
-
-    def write_dot_dak(self, dest_dir):
-        """
-        Dump ourself into a cPickle file.
+            component = entry["component"]
+
+        poolname = poolify(entry["source"], component)
+        l = get_location(cnf["Dir::Pool"], component, session=session)
+
+        found, poolfile = check_poolfile(os.path.join(poolname, filename),
+                                         entry['size'],
+                                         entry["md5sum"],
+                                         l.location_id,
+                                         session=session)
+
+        if found is None:
+            if logger is not None:
+                logger.log(["E: Found multiple files for pool (%s) for %s" % (filename, component)])
+            return None
+        elif found is False and poolfile is not None:
+            if logger is not None:
+                logger.log(["E: md5sum/size mismatch for %s in pool" % (filename)])
+            return None
+        else:
+            if poolfile is None:
+                if logger is not None:
+                    logger.log(["E: Could not find %s in pool" % (filename)])
+                return None
+            else:
+                return poolfile
 
-        @type dest_dir: string
-        @param dest_dir: Path where the dumpfile should be stored
+    @session_wrapper
+    def add_known_changes(self, dirpath, in_queue=None, session=None, logger=None):
+        """add "missing" in fields which we will require for the known_changes table"""
+        cnf = Config()
 
-        @note: This could just dump the dictionaries as is, but I'd like to avoid this so
-               there's some idea of what process-accepted & process-new use from
-               process-unchecked. (JT)
+        changesfile = os.path.join(dirpath, self.changes_file)
+        filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
 
-        """
+        self.mark_missing_fields()
 
-        dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
-        dump_file = open_file(dump_filename, 'w')
-
-        try:
-            os.chmod(dump_filename, 0664)
-        except OSError, e:
-            # chmod may fail when the dumpfile is not owned by the user
-            # invoking dak (like e.g. when NEW is processed by a member
-            # of ftpteam)
-            if e.errno == EPERM:
-                perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
-                # security precaution, should never happen unless a weird
-                # umask is set anywhere
-                if perms & stat.S_IWOTH:
-                    fubar("%s is world writable and chmod failed." % \
-                        (dump_filename,))
-                # ignore the failed chmod otherwise as the file should
-                # already have the right privileges and is just, at worst,
-                # unreadable for world
+        multivalues = {}
+        for key in ("distribution", "architecture", "binary"):
+            if isinstance(self.changes[key], dict):
+                multivalues[key] = " ".join(self.changes[key].keys())
             else:
-                raise
+                multivalues[key] = self.changes[key]
+
+        chg = DBChange()
+        chg.changesname = self.changes_file
+        chg.seen = filetime
+        chg.in_queue_id = in_queue
+        chg.source = self.changes["source"]
+        chg.binaries = multivalues["binary"]
+        chg.architecture = multivalues["architecture"]
+        chg.version = self.changes["version"]
+        chg.distribution = multivalues["distribution"]
+        chg.urgency = self.changes["urgency"]
+        chg.maintainer = self.changes["maintainer"]
+        chg.fingerprint = self.changes["fingerprint"]
+        chg.changedby = self.changes["changed-by"]
+        chg.date = self.changes["date"]
+
+        session.add(chg)
+
+        files = []
+        for chg_fn, entry in self.files.items():
+            try:
+                f = open(os.path.join(dirpath, chg_fn))
+                cpf = ChangePendingFile()
+                cpf.filename = chg_fn
+                cpf.size = entry['size']
+                cpf.md5sum = entry['md5sum']
+
+                if entry.has_key('sha1sum'):
+                    cpf.sha1sum = entry['sha1sum']
+                else:
+                    f.seek(0)
+                    cpf.sha1sum = apt_pkg.sha1sum(f)
+
+                if entry.has_key('sha256sum'):
+                    cpf.sha256sum = entry['sha256sum']
+                else:
+                    f.seek(0)
+                    cpf.sha256sum = apt_pkg.sha256sum(f)
+
+                session.add(cpf)
+                files.append(cpf)
+                f.close()
+
+            except IOError:
+                # Can't find the file, try to look it up in the pool
+                poolfile = self.__get_file_from_pool(chg_fn, entry, session)
+                if poolfile:
+                    chg.poolfiles.append(poolfile)
+
+        chg.files = files
+
+        # Add files referenced in .dsc, but not included in .changes
+        for name, entry in self.dsc_files.items():
+            if self.files.has_key(name):
+                continue
 
-        p = Pickler(dump_file, 1)
+            entry['source'] = self.changes['source']
+            poolfile = self.__get_file_from_pool(name, entry, session, logger)
+            if poolfile:
+                chg.poolfiles.append(poolfile)
 
-        p.dump(self.sanitised_changes())
-        p.dump(self.sanitised_dsc())
-        p.dump(self.sanitised_files())
-        p.dump(self.sanitised_dsc_files())
-        p.dump(self.sanitised_orig_files())
+        session.commit()
+        chg = session.query(DBChange).filter_by(changesname = self.changes_file).one();
 
-        dump_file.close()
+        return chg
 
     def unknown_files_fields(self, name):
         return sorted(list( set(self.files[name].keys()) -