X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=ea11a1454724cd0f77aef276e89cb01c8e1b3642;hb=2f2cc6977f137b0581c83aceb881dd667fee47f7;hp=de055bcd3a4205b661ab2bd440d141fc58476206;hpb=fffe7d0517f4d1ab8a15b9e444bcb2fb92b2bd46;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index de055bcd..ea11a145 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -530,6 +530,12 @@ class BuildQueue(object): os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname)) + # Crude hack with open and append, but this whole section is and should be redone. + if self.notautomatic: + release=open("Release", "a") + release.write("NotAutomatic: yes") + release.close() + # Sign if necessary if self.signingkey: cnf = Config() @@ -967,17 +973,16 @@ def insert_content_paths(binary_id, fullpaths, session=None): try: # Insert paths - pathcache = {} - def generate_path_dicts(): for fullpath in fullpaths: if fullpath.startswith( './' ): fullpath = fullpath[2:] - yield {'fulename':fullpath, 'id': binary_id } + yield {'filename':fullpath, 'id': binary_id } - session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", - generate_path_dicts() ) + for d in generate_path_dicts(): + session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", + d ) session.commit() if privatetrans: @@ -2300,7 +2305,7 @@ def add_dsc_to_db(u, filename, session=None): # Add the src_uploaders to the DB uploader_ids = [source.maintainer_id] if u.pkg.dsc.has_key("uploaders"): - for up in u.pkg.dsc["uploaders"].split(","): + for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"): up = up.strip() uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id) @@ -2753,7 +2758,7 @@ class DBConn(object): 'binaries', 'binary_acl', 'binary_acl_map', - 'bin_contents' + 'bin_contents', 'build_queue', 'build_queue_files', 'component', @@ -2824,7 +2829,7 @@ class DBConn(object): mapper(DebContents, self.tbl_deb_contents, properties = dict(binary_id=self.tbl_deb_contents.c.binary_id, package=self.tbl_deb_contents.c.package, - component=self.tbl_deb_contents.c.component, + suite=self.tbl_deb_contents.c.suite, arch=self.tbl_deb_contents.c.arch, section=self.tbl_deb_contents.c.section, filename=self.tbl_deb_contents.c.filename)) @@ -2832,11 +2837,18 @@ class DBConn(object): mapper(UdebContents, self.tbl_udeb_contents, properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id, package=self.tbl_udeb_contents.c.package, - component=self.tbl_udeb_contents.c.component, + suite=self.tbl_udeb_contents.c.suite, arch=self.tbl_udeb_contents.c.arch, section=self.tbl_udeb_contents.c.section, filename=self.tbl_udeb_contents.c.filename)) + mapper(BuildQueue, self.tbl_build_queue, + properties = dict(queue_id = self.tbl_build_queue.c.id)) + + mapper(BuildQueueFile, self.tbl_build_queue_files, + properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'), + poolfile = relation(PoolFile, backref='buildqueueinstances'))) + mapper(DBBinary, self.tbl_binaries, properties = dict(binary_id = self.tbl_binaries.c.id, package = self.tbl_binaries.c.package, @@ -2941,11 +2953,8 @@ class DBConn(object): fingerprint = relation(Fingerprint), source_files = relation(ChangePendingFile, secondary=self.tbl_changes_pending_source_files, - backref="pending_sources"), - files = relation(KnownChangePendingFile, backref="changesfile"))) + backref="pending_sources"))) - mapper(KnownChangePendingFile, self.tbl_changes_pending_files, - properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.c.id)) mapper(KeyringACLMap, self.tbl_keyring_acl_map, properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,