X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=84d5b1c2c059fd586793bdbe54d1b0f0468a95d6;hb=4a9a9aaed763d95ac9d2700b9ebd60539b47c75b;hp=8543ab101642dca14056a4d77fba72bc6091f24e;hpb=c55d086c0c8463fcd2d8dcee2dcc6c414ee36d91;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 8543ab10..84d5b1c2 100644 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -477,7 +477,7 @@ class BuildQueue(object): def __repr__(self): return '' % self.queue_name - def write_metadata(self, ourtime, force=False): + def write_metadata(self, starttime, force=False): # Do we write out metafiles? if not (force or self.generate_metadata): return @@ -491,8 +491,7 @@ class BuildQueue(object): try: # Grab files we want to include - newer = session.query(BuildQueueFile).filter_by(build_queue_id = 1).filter(BuildQueueFile.lastused > ourtime).all() - + newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all() # Write file list with newer files (fl_fd, fl_name) = mkstemp() for n in newer: @@ -504,18 +503,26 @@ class BuildQueue(object): (ac_fd, ac_name) = mkstemp() os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path, 'filelist': fl_name}) - os.close() + os.close(ac_fd) # Run apt-ftparchive generate - os.chdir(os.path.dirname(fl_name)) - os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(fl_name)) + os.chdir(os.path.dirname(ac_name)) + os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name)) # Run apt-ftparchive release # TODO: Eww - fix this bname = os.path.basename(self.path) os.chdir(self.path) os.chdir('..') - os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="${archs}" release %s > Release""", [self.origin, self.label, self.releasedescription, arches, bname]) + + # We have to remove the Release file otherwise it'll be included in the + # new one + try: + os.unlink(os.path.join(bname, 'Release')) + except OSError: + pass + + os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname)) # Sign if necessary if self.signingkey: @@ -558,24 +565,23 @@ class BuildQueue(object): except OSError: pass - def clean_and_update(self, starttime, dryrun=False): + def clean_and_update(self, starttime, Logger, dryrun=False): """WARNING: This routine commits for you""" session = DBConn().session().object_session(self) - ourtime = starttime + timedelta(seconds=self.stay_of_execution) - - if self.generate_metadata: - self.write_metadata(ourtime) + if self.generate_metadata and not dryrun: + self.write_metadata(starttime) # Grab files older than our execution time - older = session.query(BuildQueueFile).filter_by(build_queue_id = 1).filter(BuildQueueFile.lastused <= ourtime).all() + older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() for o in older: killdb = False try: if dryrun: - print "I: Would have removed %s from the queue" + Logger.log(["I: Would have removed %s from the queue" % o.fullpath]) else: + Logger.log(["I: Removing %s from the queue" % o.fullpath]) os.unlink(o.fullpath) killdb = True except OSError, e: @@ -584,13 +590,29 @@ class BuildQueue(object): killdb = True else: # TODO: Replace with proper logging call - print "E: Could not remove %s" % o.fullpath + Logger.log(["E: Could not remove %s" % o.fullpath]) if killdb: session.delete(o) session.commit() + for f in os.listdir(self.path): + if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'): + continue + + try: + r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one() + except NoResultFound: + fp = os.path.join(self.path, f) + if dryrun: + Logger.log(["I: Would remove unused link %s" % fp]) + else: + Logger.log(["I: Removing unused link %s" % fp]) + try: + os.unlink(fp) + except OSError: + Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath]) def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is