X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fcontents.py;h=69000acf816c845305e333efb7ae711a97e331af;hb=bca08bd7434a318772a17ff4d4a9ecbbfee0fca8;hp=a158e8fcd4bb7d2ed27e8547b79b03d1cbd84b8a;hpb=93e9bcb9ac5817c7ed6ff5603e43aa75ff0e3578;p=dak.git diff --git a/daklib/contents.py b/daklib/contents.py index a158e8fc..69000acf 100755 --- a/daklib/contents.py +++ b/daklib/contents.py @@ -258,10 +258,10 @@ def generate_helper(suite_id, arch_id, overridetype_id, component_id = None): return log_message -class ContentsScanner(object): +class BinaryContentsScanner(object): ''' - ContentsScanner provides a threadsafe method scan() to scan the contents of - a DBBinary object. + BinaryContentsScanner provides a threadsafe method scan() to scan the + contents of a DBBinary object. ''' def __init__(self, binary_id): ''' @@ -302,18 +302,18 @@ class ContentsScanner(object): processed = query.count() pool = Pool() for binary in query.yield_per(100): - pool.apply_async(scan_helper, (binary.binary_id, )) + pool.apply_async(binary_scan_helper, (binary.binary_id, )) pool.close() pool.join() remaining = remaining() session.close() return { 'processed': processed, 'remaining': remaining } -def scan_helper(binary_id): +def binary_scan_helper(binary_id): ''' This function runs in a subprocess. ''' - scanner = ContentsScanner(binary_id) + scanner = BinaryContentsScanner(binary_id) scanner.scan() @@ -376,3 +376,63 @@ class UnpackedSource(object): Enforce cleanup. ''' self.cleanup() + + +class SourceContentsScanner(object): + ''' + SourceContentsScanner provides a method scan() to scan the contents of a + DBSource object. + ''' + def __init__(self, source_id): + ''' + The argument source_id is the id of the DBSource object that + should be scanned. + ''' + self.source_id = source_id + + def scan(self): + ''' + This method does the actual scan and fills in the associated SrcContents + property. It commits any changes to the database. + ''' + session = DBConn().session() + source = session.query(DBSource).get(self.source_id) + fileset = set(source.scan_contents()) + for filename in fileset: + source.contents.append(SrcContents(file = filename)) + session.commit() + session.close() + + @classmethod + def scan_all(class_, limit = None): + ''' + The class method scan_all() scans all source using multiple processes. + The number of sources to be scanned can be limited with the limit + argument. Returns the number of processed and remaining packages as a + dict. + ''' + session = DBConn().session() + query = session.query(DBSource).filter(DBSource.contents == None) + remaining = query.count + if limit is not None: + query = query.limit(limit) + processed = query.count() + pool = Pool() + for source in query.yield_per(100): + pool.apply_async(source_scan_helper, (source.source_id, )) + pool.close() + pool.join() + remaining = remaining() + session.close() + return { 'processed': processed, 'remaining': remaining } + +def source_scan_helper(source_id): + ''' + This function runs in a subprocess. + ''' + try: + scanner = SourceContentsScanner(source_id) + scanner.scan() + except Exception, e: + print e +