]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/dbconn.py
Fix problem with uploaders containing a "," sign in their name
[dak.git] / daklib / dbconn.py
index 890c7c303cc50aa2c69decb7719a6ab779573716..5f45cdfc9f8ee1763ef3ab2f3c40337c36d4a577 100755 (executable)
@@ -530,6 +530,12 @@ class BuildQueue(object):
 
             os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
 
+            # Crude hack with open and append, but this whole section is and should be redone.
+            if self.notautomatic:
+                release=open("Release", "a")
+                release.write("NotAutomatic: yes")
+                release.close()
+
             # Sign if necessary
             if self.signingkey:
                 cnf = Config()
@@ -967,17 +973,16 @@ def insert_content_paths(binary_id, fullpaths, session=None):
 
     try:
         # Insert paths
-        pathcache = {}
-
         def generate_path_dicts():
             for fullpath in fullpaths:
                 if fullpath.startswith( './' ):
                     fullpath = fullpath[2:]
 
-                yield {'fulename':fullpath, 'id': binary_id }
+                yield {'filename':fullpath, 'id': binary_id }
 
-        session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
-                         generate_path_dicts() )
+        for d in generate_path_dicts():
+            session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
+                         d )
 
         session.commit()
         if privatetrans:
@@ -2300,7 +2305,7 @@ def add_dsc_to_db(u, filename, session=None):
     # Add the src_uploaders to the DB
     uploader_ids = [source.maintainer_id]
     if u.pkg.dsc.has_key("uploaders"):
-        for up in u.pkg.dsc["uploaders"].split(","):
+        for up in u.pkg.dsc["uploaders"].replace(">, ", "\t")split("\t"):
             up = up.strip()
             uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
 
@@ -2837,6 +2842,13 @@ class DBConn(object):
                                  section=self.tbl_udeb_contents.c.section,
                                  filename=self.tbl_udeb_contents.c.filename))
 
+        mapper(BuildQueue, self.tbl_build_queue,
+               properties = dict(queue_id = self.tbl_build_queue.c.id))
+
+        mapper(BuildQueueFile, self.tbl_build_queue_files,
+               properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
+                                 poolfile = relation(PoolFile, backref='buildqueueinstances')))
+
         mapper(DBBinary, self.tbl_binaries,
                properties = dict(binary_id = self.tbl_binaries.c.id,
                                  package = self.tbl_binaries.c.package,
@@ -2941,11 +2953,8 @@ class DBConn(object):
                                  fingerprint = relation(Fingerprint),
                                  source_files = relation(ChangePendingFile,
                                                          secondary=self.tbl_changes_pending_source_files,
-                                                         backref="pending_sources"),
-                                 files = relation(KnownChangePendingFile, backref="changesfile")))
+                                                         backref="pending_sources")))
 
-        mapper(KnownChangePendingFile, self.tbl_changes_pending_files,
-               properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.c.id))
 
         mapper(KeyringACLMap, self.tbl_keyring_acl_map,
                properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,