]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/changes.py
Add by-hash support
[dak.git] / daklib / changes.py
old mode 100755 (executable)
new mode 100644 (file)
index 150e7c5..d6ccaa0
@@ -29,13 +29,16 @@ Changes class for dak
 
 import os
 import stat
+
+import datetime
 from cPickle import Unpickler, Pickler
 from errno import EPERM
 
-from apt_inst import debExtractControl
-from apt_pkg import ParseSection
+from apt_pkg import TagSection
 
-from utils import open_file, fubar, poolify
+from utils import open_file, fubar, poolify, deb_extract_control
+from config import *
+from dbconn import *
 
 ###############################################################################
 
@@ -76,6 +79,10 @@ CHANGESFIELDS_DSCFILES_OPTIONAL = [ "files id" ]
 
 __all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
 
+CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
+
+__all__.append('CHANGESFIELDS_ORIGFILES')
+
 ###############################################################################
 
 class Changes(object):
@@ -91,10 +98,7 @@ class Changes(object):
         self.dsc = {}
         self.files = {}
         self.dsc_files = {}
-
-        self.orig_tar_id = None
-        self.orig_tar_location = ""
-        self.orig_tar_gz = None
+        self.orig_files = {}
 
     def file_summary(self):
         # changes["distribution"] may not exist in corner cases
@@ -122,7 +126,7 @@ class Changes(object):
 
                 if entry["type"] == "deb":
                     deb_fh = open_file(name)
-                    summary += ParseSection(debExtractControl(deb_fh))["Description"] + '\n'
+                    summary += TagSection(deb_extract_control(deb_fh))["Description"] + '\n'
                     deb_fh.close()
 
             else:
@@ -150,7 +154,6 @@ class Changes(object):
         or the text of a warning if there are
         """
 
-        conf = Config()
         summary = ""
 
         # Abandon the check if it's a non-sourceful upload
@@ -166,125 +169,235 @@ class Changes(object):
                                                                                             entry["override section"])
 
                 if entry["priority"] != "-":
-                    if entry["priority"] != entry["override_priority"]:
+                    if entry["priority"] != entry["override priority"]:
                         summary += "%s: package says priority is %s, override says %s.\n" % (name,
                                                                                              entry["priority"],
                                                                                              entry["override priority"])
 
         return summary
 
+    @session_wrapper
+    def remove_known_changes(self, session=None):
+        session.delete(get_dbchange(self.changes_file, session))
+
+    def mark_missing_fields(self):
+        """add "missing" in fields which we will require for the known_changes table"""
+        for key in ['urgency', 'maintainer', 'fingerprint', 'changed-by' ]:
+            if (not self.changes.has_key(key)) or (not self.changes[key]):
+                self.changes[key]='missing'
+
+    def __get_file_from_pool(self, filename, entry, session, logger):
+        cnf = Config()
+
+        if cnf.has_key("Dinstall::SuiteSuffix"):
+            component = cnf["Dinstall::SuiteSuffix"] + entry["component"]
+        else:
+            component = entry["component"]
+
+        poolname = poolify(entry["source"], component)
+        l = get_location(cnf["Dir::Pool"], component, session=session)
+
+        found, poolfile = check_poolfile(os.path.join(poolname, filename),
+                                         entry['size'],
+                                         entry["md5sum"],
+                                         l.location_id,
+                                         session=session)
+
+        if found is None:
+            if logger is not None:
+                logger.log(["E: Found multiple files for pool (%s) for %s" % (filename, component)])
+            return None
+        elif found is False and poolfile is not None:
+            if logger is not None:
+                logger.log(["E: md5sum/size mismatch for %s in pool" % (filename)])
+            return None
+        else:
+            if poolfile is None:
+                if logger is not None:
+                    logger.log(["E: Could not find %s in pool" % (filename)])
+                return None
+            else:
+                return poolfile
 
-    def load_dot_dak(self, changesfile):
-        """
-        Update ourself by reading a previously created cPickle .dak dumpfile.
-        """
+    @session_wrapper
+    def add_known_changes(self, dirpath, in_queue=None, session=None, logger=None):
+        """add "missing" in fields which we will require for the known_changes table"""
+        cnf = Config()
+
+        changesfile = os.path.join(dirpath, self.changes_file)
+        filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
+
+        self.mark_missing_fields()
 
-        self.changes_file = changesfile
-        dump_filename = self.changes_file[:-8]+".dak"
-        dump_file = open_file(dump_filename)
+        multivalues = {}
+        for key in ("distribution", "architecture", "binary"):
+            if isinstance(self.changes[key], dict):
+                multivalues[key] = " ".join(self.changes[key].keys())
+            else:
+                multivalues[key] = self.changes[key]
+
+        chg = DBChange()
+        chg.changesname = self.changes_file
+        chg.seen = filetime
+        chg.in_queue_id = in_queue
+        chg.source = self.changes["source"]
+        chg.binaries = multivalues["binary"]
+        chg.architecture = multivalues["architecture"]
+        chg.version = self.changes["version"]
+        chg.distribution = multivalues["distribution"]
+        chg.urgency = self.changes["urgency"]
+        chg.maintainer = self.changes["maintainer"]
+        chg.fingerprint = self.changes["fingerprint"]
+        chg.changedby = self.changes["changed-by"]
+        chg.date = self.changes["date"]
+
+        session.add(chg)
+
+        files = []
+        for chg_fn, entry in self.files.items():
+            try:
+                f = open(os.path.join(dirpath, chg_fn))
+                cpf = ChangePendingFile()
+                cpf.filename = chg_fn
+                cpf.size = entry['size']
+                cpf.md5sum = entry['md5sum']
+
+                if entry.has_key('sha1sum'):
+                    cpf.sha1sum = entry['sha1sum']
+                else:
+                    f.seek(0)
+                    cpf.sha1sum = apt_pkg.sha1sum(f)
+
+                if entry.has_key('sha256sum'):
+                    cpf.sha256sum = entry['sha256sum']
+                else:
+                    f.seek(0)
+                    cpf.sha256sum = apt_pkg.sha256sum(f)
+
+                session.add(cpf)
+                files.append(cpf)
+                f.close()
+
+            except IOError:
+                # Can't find the file, try to look it up in the pool
+                poolfile = self.__get_file_from_pool(chg_fn, entry, session)
+                if poolfile:
+                    chg.poolfiles.append(poolfile)
+
+        chg.files = files
+
+        # Add files referenced in .dsc, but not included in .changes
+        for name, entry in self.dsc_files.items():
+            if self.files.has_key(name):
+                continue
 
-        p = Unpickler(dump_file)
+            entry['source'] = self.changes['source']
+            poolfile = self.__get_file_from_pool(name, entry, session, logger)
+            if poolfile:
+                chg.poolfiles.append(poolfile)
 
-        self.changes.update(p.load())
-        self.dsc.update(p.load())
-        self.files.update(p.load())
-        self.dsc_files.update(p.load())
+        session.commit()
+        chg = session.query(DBChange).filter_by(changesname = self.changes_file).one();
 
-        self.orig_tar_id = p.load()
-        self.orig_tar_location = p.load()
+        return chg
 
-        dump_file.close()
+    def unknown_files_fields(self, name):
+        return sorted(list( set(self.files[name].keys()) -
+                            set(CHANGESFIELDS_FILES)))
 
-    def sanitised_files(self):
-        ret = {}
+    def unknown_changes_fields(self):
+        return sorted(list( set(self.changes.keys()) -
+                            set(CHANGESFIELDS_MANDATORY + CHANGESFIELDS_OPTIONAL)))
+
+    def unknown_dsc_fields(self):
+        return sorted(list( set(self.dsc.keys()) -
+                            set(CHANGESFIELDS_DSC)))
+
+    def unknown_dsc_files_fields(self, name):
+        return sorted(list( set(self.dsc_files[name].keys()) -
+                            set(CHANGESFIELDS_DSCFILES_MANDATORY + CHANGESFIELDS_DSCFILES_OPTIONAL)))
+
+    def str_files(self):
+        r = []
         for name, entry in self.files.items():
-            ret[name] = {}
+            r.append("  %s:" % (name))
             for i in CHANGESFIELDS_FILES:
                 if entry.has_key(i):
-                    ret[name][i] = entry[i]
+                    r.append("   %s: %s" % (i.capitalize(), entry[i]))
+            xfields = self.unknown_files_fields(name)
+            if len(xfields) > 0:
+                r.append("files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
 
-        return ret
+        return r
 
-    def sanitised_changes(self):
-        ret = {}
-        # Mandatory changes fields
+    def str_changes(self):
+        r = []
         for i in CHANGESFIELDS_MANDATORY:
-            ret[i] = self.changes[i]
+            val = self.changes[i]
+            if isinstance(val, list):
+                val = " ".join(val)
+            elif isinstance(val, dict):
+                val = " ".join(val.keys())
+            r.append('  %s: %s' % (i.capitalize(), val))
 
-        # Optional changes fields
         for i in CHANGESFIELDS_OPTIONAL:
             if self.changes.has_key(i):
-                ret[i] = self.changes[i]
+                r.append('  %s: %s' % (i.capitalize(), self.changes[i]))
 
-        return ret
+        xfields = self.unknown_changes_fields()
+        if len(xfields) > 0:
+            r.append("Warning: changes still has the following unrecognised fields: %s" % ", ".join(xfields))
 
-    def sanitised_dsc(self):
-        ret = {}
+        return r
+
+    def str_dsc(self):
+        r = []
         for i in CHANGESFIELDS_DSC:
             if self.dsc.has_key(i):
-                ret[i] = self.dsc[i]
+                r.append('  %s: %s' % (i.capitalize(), self.dsc[i]))
+
+        xfields = self.unknown_dsc_fields()
+        if len(xfields) > 0:
+            r.append("Warning: dsc still has the following unrecognised fields: %s" % ", ".join(xfields))
 
-        return ret
+        return r
 
-    def sanitised_dsc_files(self):
-        ret = {}
+    def str_dsc_files(self):
+        r = []
         for name, entry in self.dsc_files.items():
-            ret[name] = {}
-            # Mandatory dsc_files fields
+            r.append("  %s:" % (name))
             for i in CHANGESFIELDS_DSCFILES_MANDATORY:
-                ret[name][i] = entry[i]
-
-            # Optional dsc_files fields
+                r.append("   %s: %s" % (i.capitalize(), entry[i]))
             for i in CHANGESFIELDS_DSCFILES_OPTIONAL:
                 if entry.has_key(i):
-                    ret[name][i] = entry[i]
+                    r.append("   %s: %s" % (i.capitalize(), entry[i]))
+            xfields = self.unknown_dsc_files_fields(name)
+            if len(xfields) > 0:
+                r.append("dsc_files[%s] still has following unrecognised keys: %s" % (name, ", ".join(xfields)))
 
-        return ret
+        return r
 
-    def write_dot_dak(self, dest_dir):
-        """
-        Dump ourself into a cPickle file.
+    def __str__(self):
+        r = []
 
-        @type dest_dir: string
-        @param dest_dir: Path where the dumpfile should be stored
+        r.append(" Changes:")
+        r += self.str_changes()
 
-        @note: This could just dump the dictionaries as is, but I'd like to avoid this so
-               there's some idea of what process-accepted & process-new use from
-               process-unchecked. (JT)
+        r.append("")
 
-        """
+        r.append(" Dsc:")
+        r += self.str_dsc()
 
-        dump_filename = os.path.join(dest_dir, self.changes_file[:-8] + ".dak")
-        dump_file = open_file(dump_filename, 'w')
-
-        try:
-            os.chmod(dump_filename, 0664)
-        except OSError, e:
-            # chmod may fail when the dumpfile is not owned by the user
-            # invoking dak (like e.g. when NEW is processed by a member
-            # of ftpteam)
-            if e.errno == EPERM:
-                perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
-                # security precaution, should never happen unless a weird
-                # umask is set anywhere
-                if perms & stat.S_IWOTH:
-                    fubar("%s is world writable and chmod failed." % \
-                        (dump_filename,))
-                # ignore the failed chmod otherwise as the file should
-                # already have the right privileges and is just, at worst,
-                # unreadable for world
-            else:
-                raise
+        r.append("")
+
+        r.append(" Files:")
+        r += self.str_files()
 
-        p = Pickler(dump_file, 1)
+        r.append("")
 
-        p.dump(self.sanitised_changes())
-        p.dump(self.sanitised_dsc())
-        p.dump(self.sanitised_files())
-        p.dump(self.sanitised_dsc_files())
-        p.dump(self.orig_tar_id)
-        p.dump(self.orig_tar_location)
+        r.append(" Dsc Files:")
+        r += self.str_dsc_files()
 
-        dump_file.close()
+        return "\n".join(r)
 
 __all__.append('Changes')