X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=1418d081856192b9574fa6a63800138df9c36794;hb=4298f9fe5c993f4981f7d4f337e2dcccda4fc073;hp=dd5aa2decf3dc2e4ab10ca0f6c293326cfa41a21;hpb=b5c1801337d0115ceff491e0279f2cbd25c5d49b;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index dd5aa2de..1418d081 100644 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -503,18 +503,26 @@ class BuildQueue(object): (ac_fd, ac_name) = mkstemp() os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path, 'filelist': fl_name}) - os.close() + os.close(ac_fd) # Run apt-ftparchive generate - os.chdir(os.path.dirname(fl_name)) - os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(fl_name)) + os.chdir(os.path.dirname(ac_name)) + os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name)) # Run apt-ftparchive release # TODO: Eww - fix this bname = os.path.basename(self.path) os.chdir(self.path) os.chdir('..') - os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="${archs}" release %s > Release""", [self.origin, self.label, self.releasedescription, arches, bname]) + + # We have to remove the Release file otherwise it'll be included in the + # new one + try: + os.unlink(os.path.join(bname, 'Release')) + except OSError: + pass + + os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname)) # Sign if necessary if self.signingkey: @@ -557,23 +565,23 @@ class BuildQueue(object): except OSError: pass - def clean_and_update(self, starttime, dryrun=False): + def clean_and_update(self, starttime, Logger, dryrun=False): """WARNING: This routine commits for you""" session = DBConn().session().object_session(self) - if self.generate_metadata: + if self.generate_metadata and not dryrun: self.write_metadata(starttime) # Grab files older than our execution time - older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() + older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() for o in older: killdb = False try: if dryrun: - print "I: Would have removed %s from the queue" + Logger.log(["I: Would have removed %s from the queue" % o.fullpath]) else: - print "I: Removing %s from the queue" + Logger.log(["I: Removing %s from the queue" % o.fullpath]) os.unlink(o.fullpath) killdb = True except OSError, e: @@ -582,7 +590,7 @@ class BuildQueue(object): killdb = True else: # TODO: Replace with proper logging call - print "E: Could not remove %s" % o.fullpath + Logger.log(["E: Could not remove %s" % o.fullpath]) if killdb: session.delete(o) @@ -598,13 +606,13 @@ class BuildQueue(object): except NoResultFound: fp = os.path.join(self.path, f) if dryrun: - print "I: Would remove unused link %s" % fp + Logger.log(["I: Would remove unused link %s" % fp]) else: - print "I: Removing unused link %s" % fp + Logger.log(["I: Removing unused link %s" % fp]) try: os.unlink(fp) except OSError: - print "E: Failed to unlink unreferenced file %s" % r.fullpath + Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath]) def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is @@ -1144,7 +1152,7 @@ def get_poolfile_like_name(filename, session=None): """ # TODO: There must be a way of properly using bind parameters with %FOO% - q = session.query(PoolFile).filter(PoolFile.filename.like('%%%s%%' % filename)) + q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename)) return q.all() @@ -1429,6 +1437,19 @@ class DBChange(object): def __repr__(self): return '' % self.changesname + def clean_from_queue(self): + session = DBConn().session().object_session(self) + + # Remove changes_pool_files entries + self.poolfiles = [] + + # Remove changes_pending_files references + self.files = [] + + # Clear out of queue + self.in_queue = None + self.approved_for_id = None + __all__.append('DBChange') @session_wrapper @@ -2835,6 +2856,16 @@ class DBConn(object): poolfiles = relation(PoolFile, secondary=self.tbl_changes_pool_files, backref="changeslinks"), + seen = self.tbl_changes.c.seen, + source = self.tbl_changes.c.source, + binaries = self.tbl_changes.c.binaries, + architecture = self.tbl_changes.c.architecture, + distribution = self.tbl_changes.c.distribution, + urgency = self.tbl_changes.c.urgency, + maintainer = self.tbl_changes.c.maintainer, + changedby = self.tbl_changes.c.changedby, + date = self.tbl_changes.c.date, + version = self.tbl_changes.c.version, files = relation(ChangePendingFile, secondary=self.tbl_changes_pending_files_map, backref="changesfile"), @@ -2847,7 +2878,12 @@ class DBConn(object): properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id)) mapper(ChangePendingFile, self.tbl_changes_pending_files, - properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id)) + properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id, + filename = self.tbl_changes_pending_files.c.filename, + size = self.tbl_changes_pending_files.c.size, + md5sum = self.tbl_changes_pending_files.c.md5sum, + sha1sum = self.tbl_changes_pending_files.c.sha1sum, + sha256sum = self.tbl_changes_pending_files.c.sha256sum)) mapper(ChangePendingSource, self.tbl_changes_pending_source, properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,