+ pool = os.path.join(archive.path, 'pool')
+
+ sources = {}
+ unpack = {}
+ files = ('changelog', 'copyright', 'NEWS.Debian', 'README.Debian')
+ stats = {'unpack': 0, 'created': 0, 'removed': 0, 'errors': 0, 'files': 0}
+ query = """SELECT DISTINCT s.source, su.suite_name AS suite, s.version, c.name || '/' || f.filename AS filename
+ FROM source s
+ JOIN newest_source n ON n.source = s.source AND n.version = s.version
+ JOIN src_associations sa ON sa.source = s.id
+ JOIN suite su ON su.id = sa.suite
+ JOIN files f ON f.id = s.file
+ JOIN files_archive_map fam ON f.id = fam.file_id AND fam.archive_id = su.archive_id
+ JOIN component c ON fam.component_id = c.id
+ WHERE su.archive_id = :archive_id
+ ORDER BY s.source, suite"""
+
+ for p in session.execute(query, {'archive_id': archive.archive_id}):
+ if not sources.has_key(p[0]):
+ sources[p[0]] = {}
+ sources[p[0]][p[1]] = (re_no_epoch.sub('', p[2]), p[3])
+
+ for p in sources.keys():
+ for s in sources[p].keys():
+ path = os.path.join(clpool, '/'.join(sources[p][s][1].split('/')[:-1]))
+ if not os.path.exists(path):
+ os.makedirs(path)
+ if not os.path.exists(os.path.join(path, \
+ '%s_%s_changelog' % (p, sources[p][s][0]))):
+ if not unpack.has_key(os.path.join(pool, sources[p][s][1])):
+ unpack[os.path.join(pool, sources[p][s][1])] = (path, set())
+ unpack[os.path.join(pool, sources[p][s][1])][1].add(s)
+ else:
+ for file in glob('%s/%s_%s*' % (path, p, sources[p][s][0])):
+ link = '%s%s' % (s, file.split('%s_%s' \
+ % (p, sources[p][s][0]))[1])
+ try:
+ os.unlink(os.path.join(path, link))
+ except OSError:
+ pass
+ os.link(os.path.join(path, file), os.path.join(path, link))
+
+ for p in unpack.keys():
+ package = os.path.splitext(os.path.basename(p))[0].split('_')
+ try:
+ unpacked = UnpackedSource(p)
+ tempdir = unpacked.get_root_directory()
+ stats['unpack'] += 1
+ if progress:
+ if stats['unpack'] % 100 == 0:
+ sys.stderr.write('%d packages unpacked\n' % stats['unpack'])
+ elif stats['unpack'] % 10 == 0:
+ sys.stderr.write('.')
+ for file in files:
+ for f in glob(os.path.join(tempdir, 'debian', '*%s' % file)):
+ for s in unpack[p][1]:
+ suite = os.path.join(unpack[p][0], '%s_%s' \
+ % (s, os.path.basename(f)))
+ version = os.path.join(unpack[p][0], '%s_%s_%s' % \
+ (package[0], package[1], os.path.basename(f)))
+ if not os.path.exists(version):
+ os.link(f, version)
+ stats['created'] += 1
+ try:
+ os.unlink(suite)
+ except OSError:
+ pass
+ os.link(version, suite)
+ stats['created'] += 1
+ unpacked.cleanup()
+ except Exception as e:
+ print 'make-changelog: unable to unpack %s\n%s' % (p, e)
+ stats['errors'] += 1
+
+ for root, dirs, files in os.walk(clpool, topdown=False):
+ files = [f for f in files if f != filelist]
+ if len(files):
+ if root != clpool:
+ if root.split('/')[-1] not in sources.keys():
+ if os.path.exists(root):
+ stats['removed'] += len(os.listdir(root))
+ rmtree(root)
+ for file in files:
+ if os.path.exists(os.path.join(root, file)):
+ if os.stat(os.path.join(root, file)).st_nlink == 1:
+ stats['removed'] += 1
+ os.unlink(os.path.join(root, file))
+ for dir in dirs:
+ try:
+ os.rmdir(os.path.join(root, dir))
+ except OSError:
+ pass
+ stats['files'] += len(files)
+ stats['files'] -= stats['removed']
+
+ print 'make-changelog: file exporting finished'
+ print ' * New packages unpacked: %d' % stats['unpack']
+ print ' * New files created: %d' % stats['created']
+ print ' * New files removed: %d' % stats['removed']
+ print ' * Unpack errors: %d' % stats['errors']
+ print ' * Files available into changelog pool: %d' % stats['files']
+
+def generate_export_filelist(clpool):
+ clfiles = {}
+ for root, dirs, files in os.walk(clpool):
+ for file in [f for f in files if f != filelist]:
+ clpath = os.path.join(root, file).replace(clpool, '').strip('/')
+ source = clpath.split('/')[2]
+ elements = clpath.split('/')[3].split('_')
+ if source not in clfiles:
+ clfiles[source] = {}
+ if elements[0] == source:
+ if elements[1] not in clfiles[source]:
+ clfiles[source][elements[1]] = []
+ clfiles[source][elements[1]].append(clpath)
+ else:
+ if elements[0] not in clfiles[source]:
+ clfiles[source][elements[0]] = []
+ clfiles[source][elements[0]].append(clpath)
+ with open(os.path.join(clpool, filelist), 'w+') as fd:
+ safe_dump(clfiles, fd, default_flow_style=False)