X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=8da26063607d7d501b224162fb1630118b48a5ff;hb=80cfee07dd22dc9190eb05237db26a8e1514e1c6;hp=bd2356176e49e3d75fce13f67e6bed005dff71ec;hpb=ca14da13a68f3c03000ec77cf3855980e80007c9;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index bd235617..8da26063 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -703,6 +703,7 @@ class BuildQueue(object): try: # Grab files we want to include newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all() + newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all() # Write file list with newer files (fl_fd, fl_name) = mkstemp() for n in newer: @@ -795,6 +796,7 @@ class BuildQueue(object): # Grab files older than our execution time older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() + older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all() for o in older: killdb = False @@ -822,9 +824,7 @@ class BuildQueue(object): if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'): continue - try: - r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one() - except NoResultFound: + if not self.contains_filename(f): fp = os.path.join(self.path, f) if dryrun: Logger.log(["I: Would remove unused link %s" % fp]) @@ -835,6 +835,18 @@ class BuildQueue(object): except OSError: Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath]) + def contains_filename(self, filename): + """ + @rtype Boolean + @returns True if filename is supposed to be in the queue; False otherwise + """ + session = DBConn().session().object_session(self) + if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0: + return True + elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0: + return True + return False + def add_file_from_pool(self, poolfile): """Copies a file into the pool. Assumes that the PoolFile object is attached to the same SQLAlchemy session as the Queue object is. @@ -844,8 +856,8 @@ class BuildQueue(object): # Check if we have a file of this name or this ID already for f in self.queuefiles: - if f.fileid is not None and f.fileid == poolfile.file_id or \ - f.poolfile.filename == poolfile_basename: + if (f.fileid is not None and f.fileid == poolfile.file_id) or \ + (f.poolfile is not None and f.poolfile.filename == poolfile_basename): # In this case, update the BuildQueueFile entry so we # don't remove it too early f.lastused = datetime.now() @@ -879,6 +891,61 @@ class BuildQueue(object): return qf + def add_changes_from_policy_queue(self, policyqueue, changes): + """ + Copies a changes from a policy queue together with its poolfiles. + + @type policyqueue: PolicyQueue + @param policyqueue: policy queue to copy the changes from + + @type changes: DBChange + @param changes: changes to copy to this build queue + """ + for policyqueuefile in changes.files: + self.add_file_from_policy_queue(policyqueue, policyqueuefile) + for poolfile in changes.poolfiles: + self.add_file_from_pool(poolfile) + + def add_file_from_policy_queue(self, policyqueue, policyqueuefile): + """ + Copies a file from a policy queue. + Assumes that the policyqueuefile is attached to the same SQLAlchemy + session as the Queue object is. The caller is responsible for + committing after calling this function. + + @type policyqueue: PolicyQueue + @param policyqueue: policy queue to copy the file from + + @type policyqueuefile: ChangePendingFile + @param policyqueuefile: file to be added to the build queue + """ + session = DBConn().session().object_session(policyqueuefile) + + # Is the file already there? + try: + f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one() + f.lastused = datetime.now() + return f + except NoResultFound: + pass # continue below + + # We have to add the file. + f = BuildQueuePolicyFile() + f.build_queue = self + f.file = policyqueuefile + f.filename = policyqueuefile.filename + + source = os.path.join(policyqueue.path, policyqueuefile.filename) + target = f.fullpath + try: + # Always copy files from policy queues as they might move around. + import utils + utils.copy(source, target) + except OSError: + return None + + session.add(f) + return f __all__.append('BuildQueue') @@ -911,6 +978,10 @@ __all__.append('get_build_queue') ################################################################################ class BuildQueueFile(object): + """ + BuildQueueFile represents a file in a build queue coming from a pool. + """ + def __init__(self, *args, **kwargs): pass @@ -926,6 +997,27 @@ __all__.append('BuildQueueFile') ################################################################################ +class BuildQueuePolicyFile(object): + """ + BuildQueuePolicyFile represents a file in a build queue that comes from a + policy queue (and not a pool). + """ + + def __init__(self, *args, **kwargs): + pass + + #@property + #def filename(self): + # return self.file.filename + + @property + def fullpath(self): + return os.path.join(self.build_queue.path, self.filename) + +__all__.append('BuildQueuePolicyFile') + +################################################################################ + class ChangePendingBinary(object): def __init__(self, *args, **kwargs): pass @@ -2195,6 +2287,18 @@ __all__.append('get_sections') ################################################################################ +class SrcContents(ORMObject): + def __init__(self, file = None, source = None): + self.file = file + self.source = source + + def properties(self): + return ['file', 'source'] + +__all__.append('SrcContents') + +################################################################################ + from debian.debfile import Deb822 # Temporary Deb822 subclass to fix bugs with : handling; see #597249 @@ -2266,7 +2370,7 @@ class DBSource(ORMObject): def properties(self): return ['source', 'source_id', 'maintainer', 'changedby', \ 'fingerprint', 'poolfile', 'version', 'suites_count', \ - 'install_date', 'binaries_count'] + 'install_date', 'binaries_count', 'uploaders_count'] def not_null_constraints(self): return ['source', 'version', 'install_date', 'maintainer', \ @@ -2285,6 +2389,25 @@ class DBSource(ORMObject): metadata = association_proxy('key', 'value') + def scan_contents(self): + ''' + Returns a set of names for non directories. The path names are + normalized after converting them from either utf-8 or iso8859-1 + encoding. + ''' + fullpath = self.poolfile.fullpath + from daklib.contents import UnpackedSource + unpacked = UnpackedSource(fullpath) + fileset = set() + for name in unpacked.get_all_filenames(): + # enforce proper utf-8 encoding + try: + name.decode('utf-8') + except UnicodeDecodeError: + name = name.decode('iso8859-1').encode('utf-8') + fileset.add(name) + return fileset + __all__.append('DBSource') @session_wrapper @@ -2540,25 +2663,11 @@ def add_dsc_to_db(u, filename, session=None): session.add(df) # Add the src_uploaders to the DB - uploader_ids = [source.maintainer_id] + source.uploaders = [source.maintainer] if u.pkg.dsc.has_key("uploaders"): for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"): up = up.strip() - uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id) - - added_ids = {} - for up_id in uploader_ids: - if added_ids.has_key(up_id): - import utils - utils.warn("Already saw uploader %s for source %s" % (up_id, source.source)) - continue - - added_ids[up_id]=1 - - su = SrcUploader() - su.maintainer_id = up_id - su.source_id = source.source_id - session.add(su) + source.uploaders.append(get_or_set_maintainer(up, session)) session.flush() @@ -2660,17 +2769,6 @@ __all__.append('SrcFormat') ################################################################################ -class SrcUploader(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % self.uploader_id - -__all__.append('SrcUploader') - -################################################################################ - SUITE_FIELDS = [ ('SuiteName', 'suite_name'), ('SuiteID', 'suite_id'), ('Version', 'version'), @@ -3074,6 +3172,7 @@ class DBConn(object): 'binary_acl_map', 'build_queue', 'build_queue_files', + 'build_queue_policy_files', 'changelogs_text', 'changes', 'component', @@ -3104,6 +3203,7 @@ class DBConn(object): 'source_acl', 'source_metadata', 'src_associations', + 'src_contents', 'src_format', 'src_uploaders', 'suite', @@ -3119,7 +3219,6 @@ class DBConn(object): 'almost_obsolete_all_associations', 'almost_obsolete_src_associations', 'any_associations_source', - 'bin_assoc_by_arch', 'bin_associations_binaries', 'binaries_suite_arch', 'binfiles_suite_component_arch', @@ -3167,6 +3266,11 @@ class DBConn(object): properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'), poolfile = relation(PoolFile, backref='buildqueueinstances'))) + mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files, + properties = dict( + build_queue = relation(BuildQueue, backref='policy_queue_files'), + file = relation(ChangePendingFile, lazy='joined'))) + mapper(DBBinary, self.tbl_binaries, properties = dict(binary_id = self.tbl_binaries.c.id, package = self.tbl_binaries.c.package, @@ -3358,7 +3462,8 @@ class DBConn(object): primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)), suites = relation(Suite, secondary=self.tbl_src_associations, backref=backref('sources', lazy='dynamic')), - srcuploaders = relation(SrcUploader), + uploaders = relation(Maintainer, + secondary=self.tbl_src_uploaders), key = relation(SourceMetadata, cascade='all', collection_class=attribute_mapped_collection('key'))), extension = validator) @@ -3370,15 +3475,6 @@ class DBConn(object): properties = dict(src_format_id = self.tbl_src_format.c.id, format_name = self.tbl_src_format.c.format_name)) - mapper(SrcUploader, self.tbl_src_uploaders, - properties = dict(uploader_id = self.tbl_src_uploaders.c.id, - source_id = self.tbl_src_uploaders.c.source, - source = relation(DBSource, - primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)), - maintainer_id = self.tbl_src_uploaders.c.maintainer, - maintainer = relation(Maintainer, - primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id)))) - mapper(Suite, self.tbl_suite, properties = dict(suite_id = self.tbl_suite.c.id, policy_queue = relation(PolicyQueue), @@ -3408,6 +3504,12 @@ class DBConn(object): backref=backref('contents', lazy='dynamic', cascade='all')), file = self.tbl_bin_contents.c.file)) + mapper(SrcContents, self.tbl_src_contents, + properties = dict( + source = relation(DBSource, + backref=backref('contents', lazy='dynamic', cascade='all')), + file = self.tbl_src_contents.c.file)) + mapper(MetadataKey, self.tbl_metadata_keys, properties = dict( key_id = self.tbl_metadata_keys.c.key_id, @@ -3488,12 +3590,21 @@ class DBConn(object): self.__setupmappers() self.pid = os.getpid() - def session(self): + def session(self, work_mem = 0): + ''' + Returns a new session object. If a work_mem parameter is provided a new + transaction is started and the work_mem parameter is set for this + transaction. The work_mem parameter is measured in MB. A default value + will be used if the parameter is not set. + ''' # reinitialize DBConn in new processes if self.pid != os.getpid(): clear_mappers() self.__createconn() - return self.db_smaker() + session = self.db_smaker() + if work_mem > 0: + session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem) + return session __all__.append('DBConn')