]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
Python modules should not be executable
[dak.git] / daklib / dbconn.py
old mode 100755 (executable)
new mode 100644 (file)
index 4a45ae2..5aa1cf5
@@ -42,6 +42,8 @@ import traceback
 import commands
 import signal
 
+from daklib.gpg import SignedFile
+
 try:
     # python >= 2.6
     import json
@@ -110,11 +112,11 @@ class DebVersion(UserDefinedType):
         return None
 
 sa_major_version = sqlalchemy.__version__[0:3]
-if sa_major_version in ["0.5", "0.6"]:
+if sa_major_version in ["0.5", "0.6", "0.7"]:
     from sqlalchemy.databases import postgres
     postgres.ischema_names['debversion'] = DebVersion
 else:
-    raise Exception("dak only ported to SQLA versions 0.5 and 0.6.  See daklib/dbconn.py")
+    raise Exception("dak only ported to SQLA versions 0.5 to 0.7.  See daklib/dbconn.py")
 
 ################################################################################
 
@@ -500,7 +502,7 @@ def subprocess_setup():
 class DBBinary(ORMObject):
     def __init__(self, package = None, source = None, version = None, \
         maintainer = None, architecture = None, poolfile = None, \
-        binarytype = 'deb'):
+        binarytype = 'deb', fingerprint=None):
         self.package = package
         self.source = source
         self.version = version
@@ -508,6 +510,7 @@ class DBBinary(ORMObject):
         self.architecture = architecture
         self.poolfile = poolfile
         self.binarytype = binarytype
+        self.fingerprint = fingerprint
 
     @property
     def pkid(self):
@@ -558,10 +561,10 @@ class DBBinary(ORMObject):
         @rtype: text
         @return: stanza text of the control section.
         '''
-        import apt_inst
+        import utils
         fullpath = self.poolfile.fullpath
         deb_file = open(fullpath, 'r')
-        stanza = apt_inst.debExtractControl(deb_file)
+        stanza = utils.deb_extract_control(deb_file)
         deb_file.close()
 
         return stanza
@@ -878,7 +881,6 @@ class BuildQueue(object):
         # Prepare BuildQueueFile object
         qf = BuildQueueFile()
         qf.build_queue_id = self.queue_id
-        qf.lastused = datetime.now()
         qf.filename = poolfile_basename
 
         targetpath = poolfile.fullpath
@@ -1423,6 +1425,10 @@ class PoolFile(ORMObject):
     def fullpath(self):
         return os.path.join(self.location.path, self.filename)
 
+    @property
+    def basename(self):
+        return os.path.basename(self.filename)
+
     def is_valid(self, filesize = -1, md5sum = None):
         return self.filesize == long(filesize) and self.md5sum == md5sum
 
@@ -2404,69 +2410,16 @@ __all__.append('SrcContents')
 
 ################################################################################
 
-from debian.debfile import Deb822
-
-# Temporary Deb822 subclass to fix bugs with : handling; see #597249
-class Dak822(Deb822):
-    def _internal_parser(self, sequence, fields=None):
-        # The key is non-whitespace, non-colon characters before any colon.
-        key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
-        single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
-        multi = re.compile(key_part + r"$")
-        multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
-
-        wanted_field = lambda f: fields is None or f in fields
-
-        if isinstance(sequence, basestring):
-            sequence = sequence.splitlines()
-
-        curkey = None
-        content = ""
-        for line in self.gpg_stripped_paragraph(sequence):
-            m = single.match(line)
-            if m:
-                if curkey:
-                    self[curkey] = content
-
-                if not wanted_field(m.group('key')):
-                    curkey = None
-                    continue
-
-                curkey = m.group('key')
-                content = m.group('data')
-                continue
-
-            m = multi.match(line)
-            if m:
-                if curkey:
-                    self[curkey] = content
-
-                if not wanted_field(m.group('key')):
-                    curkey = None
-                    continue
-
-                curkey = m.group('key')
-                content = ""
-                continue
-
-            m = multidata.match(line)
-            if m:
-                content += '\n' + line # XXX not m.group('data')?
-                continue
-
-        if curkey:
-            self[curkey] = content
-
-
 class DBSource(ORMObject):
     def __init__(self, source = None, version = None, maintainer = None, \
-        changedby = None, poolfile = None, install_date = None):
+        changedby = None, poolfile = None, install_date = None, fingerprint = None):
         self.source = source
         self.version = version
         self.maintainer = maintainer
         self.changedby = changedby
         self.poolfile = poolfile
         self.install_date = install_date
+        self.fingerprint = fingerprint
 
     @property
     def pkid(self):
@@ -2479,7 +2432,7 @@ class DBSource(ORMObject):
 
     def not_null_constraints(self):
         return ['source', 'version', 'install_date', 'maintainer', \
-            'changedby', 'poolfile', 'install_date']
+            'changedby', 'poolfile']
 
     def read_control_fields(self):
         '''
@@ -2489,7 +2442,9 @@ class DBSource(ORMObject):
         @return: fields is the dsc information in a dictionary form
         '''
         fullpath = self.poolfile.fullpath
-        fields = Dak822(open(self.poolfile.fullpath, 'r'))
+        contents = open(fullpath, 'r').read()
+        signed_file = SignedFile(contents, keyrings=[], require_signature=False)
+        fields = apt_pkg.TagSection(signed_file.contents)
         return fields
 
     metadata = association_proxy('key', 'value')
@@ -2556,11 +2511,12 @@ def source_exists(source, source_version, suites = ["any"], session=None):
         if suite != "any":
             # source must exist in 'suite' or a suite that is enhanced by 'suite'
             s = get_suite(suite, session)
-            enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
-            considered_suites = [ vc.reference for vc in enhances_vcs ]
-            considered_suites.append(s)
+            if s:
+                enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
+                considered_suites = [ vc.reference for vc in enhances_vcs ]
+                considered_suites.append(s)
 
-            q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
+                q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
 
         if q.count() > 0:
             continue
@@ -2997,6 +2953,10 @@ class Suite(ORMObject):
         else:
             return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
 
+    @property
+    def path(self):
+        return os.path.join(self.archive.path, 'dists', self.suite_name)
+
 __all__.append('Suite')
 
 @session_wrapper
@@ -3357,8 +3317,8 @@ class DBConn(object):
         mapper(Architecture, self.tbl_architecture,
             properties = dict(arch_id = self.tbl_architecture.c.id,
                suites = relation(Suite, secondary=self.tbl_suite_architectures,
-                   order_by='suite_name',
-                   backref=backref('architectures', order_by='arch_string'))),
+                   order_by=self.tbl_suite.c.suite_name,
+                   backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
             extension = validator)
 
         mapper(Archive, self.tbl_archive,
@@ -3594,7 +3554,8 @@ class DBConn(object):
                                  copy_queues = relation(BuildQueue,
                                      secondary=self.tbl_suite_build_queue_copy),
                                  srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
-                                     backref=backref('suites', lazy='dynamic'))),
+                                     backref=backref('suites', lazy='dynamic')),
+                                 archive = relation(Archive, backref='suites')),
                 extension = validator)
 
         mapper(Uid, self.tbl_uid,