X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=25ba49e1e3a3e61b9046fd9de450f4ca8033b4de;hb=b5b25234382f20595be0b8dad19f6502fb704b52;hp=4695677849aae10902f1824eb60e981ea815ab4d;hpb=408245228ba96e5bad24738b9432bd78079178f6;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 46956778..25ba49e1 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -37,6 +37,7 @@ import os import re import psycopg2 import traceback +import commands from datetime import datetime, timedelta from errno import ENOENT from tempfile import mkstemp, mkdtemp @@ -52,6 +53,8 @@ from sqlalchemy import types as sqltypes from sqlalchemy.exc import * from sqlalchemy.orm.exc import NoResultFound +# Only import Config until Queue stuff is changed to store its config +# in the database from config import Config from textutils import fix_maintainer @@ -69,11 +72,11 @@ class DebVersion(sqltypes.Text): return "DEBVERSION" sa_major_version = sqlalchemy.__version__[0:3] -if sa_major_version == "0.5": +if sa_major_version in ["0.5", "0.6"]: from sqlalchemy.databases import postgres postgres.ischema_names['debversion'] = DebVersion else: - raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py") + raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py") ################################################################################ @@ -439,8 +442,8 @@ MINIMAL_APT_CONF=""" Dir { ArchiveDir "%(archivepath)s"; - OverrideDir "/srv/ftp.debian.org/scripts/override/"; - CacheDir "/srv/ftp.debian.org/database/"; + OverrideDir "%(overridedir)s"; + CacheDir "%(cachedir)s"; }; Default @@ -502,11 +505,16 @@ class BuildQueue(object): os.write(fl_fd, '%s\n' % n.fullpath) os.close(fl_fd) + cnf = Config() + # Write minimal apt.conf # TODO: Remove hardcoding from template (ac_fd, ac_name) = mkstemp() os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path, - 'filelist': fl_name}) + 'filelist': fl_name, + 'cachedir': cnf["Dir::Cache"], + 'overridedir': cnf["Dir::Override"], + }) os.close(ac_fd) # Run apt-ftparchive generate @@ -528,9 +536,14 @@ class BuildQueue(object): os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname)) + # Crude hack with open and append, but this whole section is and should be redone. + if self.notautomatic: + release=open("Release", "a") + release.write("NotAutomatic: yes") + release.close() + # Sign if necessary if self.signingkey: - cnf = Config() keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"] if cnf.has_key("Dinstall::SigningPubKeyring"): keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] @@ -602,7 +615,7 @@ class BuildQueue(object): session.commit() for f in os.listdir(self.path): - if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'): + if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'): continue try: @@ -965,12 +978,16 @@ def insert_content_paths(binary_id, fullpaths, session=None): try: # Insert paths - pathcache = {} - for fullpath in fullpaths: - if fullpath.startswith( './' ): - fullpath = fullpath[2:] + def generate_path_dicts(): + for fullpath in fullpaths: + if fullpath.startswith( './' ): + fullpath = fullpath[2:] - session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} ) + yield {'filename':fullpath, 'id': binary_id } + + for d in generate_path_dicts(): + session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", + d ) session.commit() if privatetrans: @@ -1499,7 +1516,7 @@ def get_location(location, component=None, archive=None, session=None): and archive @type location: string - @param location: the path of the location, e.g. I{/srv/ftp.debian.org/ftp/pool/} + @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/} @type component: string @param component: the component name (if None, no restriction applied) @@ -1761,16 +1778,38 @@ __all__.append('get_override_type') ################################################################################ -class PendingContentAssociation(object): +class DebContents(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % (self.package.package,self.file) + +__all__.append('DebContents') + + +class UdebContents(object): + def __init__(self, *args, **kwargs): + pass + + def __repr__(self): + return '' % (self.package.package,self.file) + +__all__.append('UdebContents') + +class PendingBinContents(object): def __init__(self, *args, **kwargs): pass def __repr__(self): - return '' % self.pca_id + return '' % self.contents_id -__all__.append('PendingContentAssociation') +__all__.append('PendingBinContents') -def insert_pending_content_paths(package, fullpaths, session=None): +def insert_pending_content_paths(package, + is_udeb, + fullpaths, + session=None): """ Make sure given paths are temporarily associated with given package @@ -1799,32 +1838,27 @@ def insert_pending_content_paths(package, fullpaths, session=None): arch_id = arch.arch_id # Remove any already existing recorded files for this package - q = session.query(PendingContentAssociation) + q = session.query(PendingBinContents) q = q.filter_by(package=package['Package']) q = q.filter_by(version=package['Version']) q = q.filter_by(architecture=arch_id) q.delete() - # Insert paths - pathcache = {} for fullpath in fullpaths: - (path, filename) = os.path.split(fullpath) - - if path.startswith( "./" ): - path = path[2:] - filepath_id = get_or_set_contents_path_id(path, session) - filename_id = get_or_set_contents_file_id(filename, session) - - pathcache[fullpath] = (filepath_id, filename_id) + if fullpath.startswith( "./" ): + fullpath = fullpath[2:] - for fullpath, dat in pathcache.items(): - pca = PendingContentAssociation() + pca = PendingBinContents() pca.package = package['Package'] pca.version = package['Version'] - pca.filepath_id = dat[0] - pca.filename_id = dat[1] + pca.file = fullpath pca.architecture = arch_id + + if isudeb: + pca.type = 8 # gross + else: + pca.type = 7 # also gross session.add(pca) # Only commit if we set up the session ourself @@ -1883,6 +1917,31 @@ def get_policy_queue(queuename, session=None): __all__.append('get_policy_queue') +@session_wrapper +def get_policy_queue_from_path(pathname, session=None): + """ + Returns PolicyQueue object for given C{path name} + + @type queuename: string + @param queuename: The path + + @type session: Session + @param session: Optional SQLA session object (a temporary one will be + generated if not supplied) + + @rtype: PolicyQueue + @return: PolicyQueue object for the given queue + """ + + q = session.query(PolicyQueue).filter_by(path=pathname) + + try: + return q.one() + except NoResultFound: + return None + +__all__.append('get_policy_queue_from_path') + ################################################################################ class Priority(object): @@ -2276,7 +2335,7 @@ def add_dsc_to_db(u, filename, session=None): # Add the src_uploaders to the DB uploader_ids = [source.maintainer_id] if u.pkg.dsc.has_key("uploaders"): - for up in u.pkg.dsc["uploaders"].split(","): + for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"): up = up.strip() uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id) @@ -2424,11 +2483,7 @@ SUITE_FIELDS = [ ('SuiteName', 'suite_name'), ('Priority', 'priority'), ('NotAutomatic', 'notautomatic'), ('CopyChanges', 'copychanges'), - ('CopyDotDak', 'copydotdak'), - ('CommentsDir', 'commentsdir'), - ('OverrideSuite', 'overridesuite'), - ('ChangelogBase', 'changelogbase')] - + ('OverrideSuite', 'overridesuite')] class Suite(object): def __init__(self, *args, **kwargs): @@ -2626,28 +2681,6 @@ class Uid(object): __all__.append('Uid') -@session_wrapper -def add_database_user(uidname, session=None): - """ - Adds a database user - - @type uidname: string - @param uidname: The uid of the user to add - - @type session: SQLAlchemy - @param session: Optional SQL session object (a temporary one will be - generated if not supplied). If not passed, a commit will be performed at - the end of the function, otherwise the caller is responsible for commiting. - - @rtype: Uid - @return: the uid object for the given uidname - """ - - session.execute("CREATE USER :uid", {'uid': uidname}) - session.commit_or_flush() - -__all__.append('add_database_user') - @session_wrapper def get_or_set_uid(uidname, session=None): """ @@ -2729,19 +2762,18 @@ class DBConn(object): 'binaries', 'binary_acl', 'binary_acl_map', + 'bin_contents', 'build_queue', 'build_queue_files', 'component', 'config', - 'content_associations', - 'content_file_names', - 'content_file_paths', 'changes_pending_binaries', 'changes_pending_files', 'changes_pending_files_map', 'changes_pending_source', 'changes_pending_source_files', 'changes_pool_files', + 'deb_contents', 'dsc_files', 'files', 'fingerprint', @@ -2753,7 +2785,7 @@ class DBConn(object): 'new_comments', 'override', 'override_type', - 'pending_content_associations', + 'pending_bin_contents', 'policy_queue', 'priority', 'section', @@ -2766,6 +2798,7 @@ class DBConn(object): 'suite_architectures', 'suite_src_formats', 'suite_build_queue_copy', + 'udeb_contents', 'uid', 'upload_blocks', ) @@ -2789,6 +2822,30 @@ class DBConn(object): binary_id = self.tbl_bin_associations.c.bin, binary = relation(DBBinary))) + mapper(PendingBinContents, self.tbl_pending_bin_contents, + properties = dict(contents_id =self.tbl_pending_bin_contents.c.id, + filename = self.tbl_pending_bin_contents.c.filename, + package = self.tbl_pending_bin_contents.c.package, + version = self.tbl_pending_bin_contents.c.version, + arch = self.tbl_pending_bin_contents.c.arch, + otype = self.tbl_pending_bin_contents.c.type)) + + mapper(DebContents, self.tbl_deb_contents, + properties = dict(binary_id=self.tbl_deb_contents.c.binary_id, + package=self.tbl_deb_contents.c.package, + suite=self.tbl_deb_contents.c.suite, + arch=self.tbl_deb_contents.c.arch, + section=self.tbl_deb_contents.c.section, + filename=self.tbl_deb_contents.c.filename)) + + mapper(UdebContents, self.tbl_udeb_contents, + properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id, + package=self.tbl_udeb_contents.c.package, + suite=self.tbl_udeb_contents.c.suite, + arch=self.tbl_udeb_contents.c.arch, + section=self.tbl_udeb_contents.c.section, + filename=self.tbl_udeb_contents.c.filename)) + mapper(BuildQueue, self.tbl_build_queue, properties = dict(queue_id = self.tbl_build_queue.c.id)) @@ -2901,6 +2958,8 @@ class DBConn(object): source_files = relation(ChangePendingFile, secondary=self.tbl_changes_pending_source_files, backref="pending_sources"))) + + mapper(KeyringACLMap, self.tbl_keyring_acl_map, properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id, keyring = relation(Keyring, backref="keyring_acl_map"), @@ -2923,6 +2982,7 @@ class DBConn(object): mapper(Override, self.tbl_override, properties = dict(suite_id = self.tbl_override.c.suite, suite = relation(Suite), + package = self.tbl_override.c.package, component_id = self.tbl_override.c.component, component = relation(Component), priority_id = self.tbl_override.c.priority, @@ -2943,7 +3003,8 @@ class DBConn(object): properties = dict(priority_id = self.tbl_priority.c.id)) mapper(Section, self.tbl_section, - properties = dict(section_id = self.tbl_section.c.id)) + properties = dict(section_id = self.tbl_section.c.id, + section=self.tbl_section.c.section)) mapper(DBSource, self.tbl_source, properties = dict(source_id = self.tbl_source.c.id,