from sqlalchemy.exc import *
from sqlalchemy.orm.exc import NoResultFound
+# Only import Config until Queue stuff is changed to store its config
+# in the database
from config import Config
from textutils import fix_maintainer
(ac_fd, ac_name) = mkstemp()
os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
'filelist': fl_name})
- os.close()
+ os.close(ac_fd)
# Run apt-ftparchive generate
os.chdir(os.path.dirname(ac_name))
bname = os.path.basename(self.path)
os.chdir(self.path)
os.chdir('..')
- os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="${archs}" release %s > Release""", [self.origin, self.label, self.releasedescription, arches, bname])
+
+ # We have to remove the Release file otherwise it'll be included in the
+ # new one
+ try:
+ os.unlink(os.path.join(bname, 'Release'))
+ except OSError:
+ pass
+
+ os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
# Sign if necessary
if self.signingkey:
except OSError:
pass
- def clean_and_update(self, starttime, dryrun=False):
+ def clean_and_update(self, starttime, Logger, dryrun=False):
"""WARNING: This routine commits for you"""
session = DBConn().session().object_session(self)
- if self.generate_metadata:
+ if self.generate_metadata and not dryrun:
self.write_metadata(starttime)
# Grab files older than our execution time
- older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
+ older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
for o in older:
killdb = False
try:
if dryrun:
- print "I: Would have removed %s from the queue"
+ Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
else:
- print "I: Removing %s from the queue"
+ Logger.log(["I: Removing %s from the queue" % o.fullpath])
os.unlink(o.fullpath)
killdb = True
except OSError, e:
killdb = True
else:
# TODO: Replace with proper logging call
- print "E: Could not remove %s" % o.fullpath
+ Logger.log(["E: Could not remove %s" % o.fullpath])
if killdb:
session.delete(o)
except NoResultFound:
fp = os.path.join(self.path, f)
if dryrun:
- print "I: Would remove unused link %s" % fp
+ Logger.log(["I: Would remove unused link %s" % fp])
else:
- print "I: Removing unused link %s" % fp
+ Logger.log(["I: Removing unused link %s" % fp])
try:
os.unlink(fp)
except OSError:
- print "E: Failed to unlink unreferenced file %s" % r.fullpath
+ Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
def add_file_from_pool(self, poolfile):
"""Copies a file into the pool. Assumes that the PoolFile object is
try:
# Insert paths
pathcache = {}
- for fullpath in fullpaths:
- if fullpath.startswith( './' ):
- fullpath = fullpath[2:]
- session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} )
+ def generate_path_dicts():
+ for fullpath in fullpaths:
+ if fullpath.startswith( './' ):
+ fullpath = fullpath[2:]
+
+ yield {'fulename':fullpath, 'id': binary_id }
+
+ session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
+ generate_path_dicts() )
session.commit()
if privatetrans:
################################################################################
-class PendingContentAssociation(object):
+class DebContents(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<DebConetnts %s: %s>' % (self.package.package,self.file)
+
+__all__.append('DebContents')
+
+
+class UdebContents(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
+
+__all__.append('UdebContents')
+
+class PendingBinContents(object):
def __init__(self, *args, **kwargs):
pass
def __repr__(self):
- return '<PendingContentAssociation %s>' % self.pca_id
+ return '<PendingBinContents %s>' % self.contents_id
-__all__.append('PendingContentAssociation')
+__all__.append('PendingBinContents')
-def insert_pending_content_paths(package, fullpaths, session=None):
+def insert_pending_content_paths(package,
+ is_udeb,
+ fullpaths,
+ session=None):
"""
Make sure given paths are temporarily associated with given
package
arch_id = arch.arch_id
# Remove any already existing recorded files for this package
- q = session.query(PendingContentAssociation)
+ q = session.query(PendingBinContents)
q = q.filter_by(package=package['Package'])
q = q.filter_by(version=package['Version'])
q = q.filter_by(architecture=arch_id)
q.delete()
- # Insert paths
- pathcache = {}
for fullpath in fullpaths:
- (path, filename) = os.path.split(fullpath)
-
- if path.startswith( "./" ):
- path = path[2:]
- filepath_id = get_or_set_contents_path_id(path, session)
- filename_id = get_or_set_contents_file_id(filename, session)
-
- pathcache[fullpath] = (filepath_id, filename_id)
+ if fullpath.startswith( "./" ):
+ fullpath = fullpath[2:]
- for fullpath, dat in pathcache.items():
- pca = PendingContentAssociation()
+ pca = PendingBinContents()
pca.package = package['Package']
pca.version = package['Version']
- pca.filepath_id = dat[0]
- pca.filename_id = dat[1]
+ pca.file = fullpath
pca.architecture = arch_id
+
+ if isudeb:
+ pca.type = 8 # gross
+ else:
+ pca.type = 7 # also gross
session.add(pca)
# Only commit if we set up the session ourself
'binaries',
'binary_acl',
'binary_acl_map',
+ 'bin_contents'
'build_queue',
'build_queue_files',
'component',
'config',
- 'content_associations',
- 'content_file_names',
- 'content_file_paths',
'changes_pending_binaries',
'changes_pending_files',
'changes_pending_files_map',
'changes_pending_source',
'changes_pending_source_files',
'changes_pool_files',
+ 'deb_contents',
'dsc_files',
'files',
'fingerprint',
'new_comments',
'override',
'override_type',
- 'pending_content_associations',
+ 'pending_bin_contents',
'policy_queue',
'priority',
'section',
'suite_architectures',
'suite_src_formats',
'suite_build_queue_copy',
+ 'udeb_contents',
'uid',
'upload_blocks',
)
binary_id = self.tbl_bin_associations.c.bin,
binary = relation(DBBinary)))
- mapper(BuildQueue, self.tbl_build_queue,
- properties = dict(queue_id = self.tbl_build_queue.c.id))
-
- mapper(BuildQueueFile, self.tbl_build_queue_files,
- properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
- poolfile = relation(PoolFile, backref='buildqueueinstances')))
+ mapper(PendingBinContents, self.tbl_pending_bin_contents,
+ properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
+ filename = self.tbl_pending_bin_contents.c.filename,
+ package = self.tbl_pending_bin_contents.c.package,
+ version = self.tbl_pending_bin_contents.c.version,
+ arch = self.tbl_pending_bin_contents.c.arch,
+ otype = self.tbl_pending_bin_contents.c.type))
+
+ mapper(DebContents, self.tbl_deb_contents,
+ properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
+ package=self.tbl_deb_contents.c.package,
+ component=self.tbl_deb_contents.c.component,
+ arch=self.tbl_deb_contents.c.arch,
+ section=self.tbl_deb_contents.c.section,
+ filename=self.tbl_deb_contents.c.filename))
+
+ mapper(UdebContents, self.tbl_udeb_contents,
+ properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
+ package=self.tbl_udeb_contents.c.package,
+ component=self.tbl_udeb_contents.c.component,
+ arch=self.tbl_udeb_contents.c.arch,
+ section=self.tbl_udeb_contents.c.section,
+ filename=self.tbl_udeb_contents.c.filename))
mapper(DBBinary, self.tbl_binaries,
properties = dict(binary_id = self.tbl_binaries.c.id,
source_files = relation(ChangePendingFile,
secondary=self.tbl_changes_pending_source_files,
backref="pending_sources")))
+ files = relation(KnownChangePendingFile, backref="changesfile")))
+
+ mapper(KnownChangePendingFile, self.tbl_changes_pending_files,
+ properties = dict(known_change_pending_file_id = self.tbl_changes_pending_files.c.id))
+
mapper(KeyringACLMap, self.tbl_keyring_acl_map,
properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
keyring = relation(Keyring, backref="keyring_acl_map"),
mapper(Override, self.tbl_override,
properties = dict(suite_id = self.tbl_override.c.suite,
suite = relation(Suite),
+ package = self.tbl_override.c.package,
component_id = self.tbl_override.c.component,
component = relation(Component),
priority_id = self.tbl_override.c.priority,
properties = dict(priority_id = self.tbl_priority.c.id))
mapper(Section, self.tbl_section,
- properties = dict(section_id = self.tbl_section.c.id))
+ properties = dict(section_id = self.tbl_section.c.id,
+ section=self.tbl_section.c.section))
mapper(DBSource, self.tbl_source,
properties = dict(source_id = self.tbl_source.c.id,