X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=80a1f233ad29341a3e13972372811c466c25ff4c;hb=9a4fb568340b14698947bfa59b309ae0c67c693a;hp=48dfd0d8b205c840188f9b5bb77d6336e13d0fb9;hpb=0335cafe7912f53d7a1b8c9f4b6e77071318e921;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 48dfd0d8..80a1f233 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -75,12 +75,16 @@ from sqlalchemy.orm.exc import NoResultFound from config import Config from textutils import fix_maintainer from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError +import utils # suppress some deprecation warnings in squeeze related to sqlalchemy import warnings warnings.filterwarnings('ignore', \ "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \ SADeprecationWarning) +warnings.filterwarnings('ignore', \ + "Predicate of partial index .* ignored during reflection", \ + SAWarning) ################################################################################ @@ -107,11 +111,11 @@ class DebVersion(UserDefinedType): return None sa_major_version = sqlalchemy.__version__[0:3] -if sa_major_version in ["0.5", "0.6"]: +if sa_major_version in ["0.5", "0.6", "0.7"]: from sqlalchemy.databases import postgres postgres.ischema_names['debversion'] = DebVersion else: - raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py") + raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py") ################################################################################ @@ -558,7 +562,7 @@ class DBBinary(ORMObject): import apt_inst fullpath = self.poolfile.fullpath deb_file = open(fullpath, 'r') - stanza = apt_inst.debExtractControl(deb_file) + stanza = utils.deb_extract_control(deb_file) deb_file.close() return stanza @@ -752,7 +756,7 @@ class BuildQueue(object): # Crude hack with open and append, but this whole section is and should be redone. if self.notautomatic: release=open("Release", "a") - release.write("NotAutomatic: yes") + release.write("NotAutomatic: yes\n") release.close() # Sign if necessary @@ -815,7 +819,7 @@ class BuildQueue(object): Logger.log(["I: Removing %s from the queue" % o.fullpath]) os.unlink(o.fullpath) killdb = True - except OSError, e: + except OSError as e: # If it wasn't there, don't worry if e.errno == ENOENT: killdb = True @@ -875,7 +879,6 @@ class BuildQueue(object): # Prepare BuildQueueFile object qf = BuildQueueFile() qf.build_queue_id = self.queue_id - qf.lastused = datetime.now() qf.filename = poolfile_basename targetpath = poolfile.fullpath @@ -1132,6 +1135,19 @@ def get_component(component, session=None): __all__.append('get_component') +@session_wrapper +def get_component_names(session=None): + """ + Returns list of strings of component names. + + @rtype: list + @return: list of strings of component names + """ + + return [ x.component_name for x in session.query(Component).all() ] + +__all__.append('get_component_names') + ################################################################################ class DBConfig(object): @@ -1670,7 +1686,7 @@ class Keyring(object): key = None signingkey = False - for line in k.xreadlines(): + for line in k: field = line.split(":") if field[0] == "pub": key = field[4] @@ -1777,6 +1793,34 @@ def get_keyring(keyring, session=None): __all__.append('get_keyring') +@session_wrapper +def get_active_keyring_paths(session=None): + """ + @rtype: list + @return: list of active keyring paths + """ + return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ] + +__all__.append('get_active_keyring_paths') + +@session_wrapper +def get_primary_keyring_path(session=None): + """ + Get the full path to the highest priority active keyring + + @rtype: str or None + @return: path to the active keyring with the highest priority or None if no + keyring is configured + """ + keyrings = get_active_keyring_paths() + + if len(keyrings) > 0: + return keyrings[0] + else: + return None + +__all__.append('get_primary_keyring_path') + ################################################################################ class KeyringACLMap(object): @@ -2450,6 +2494,9 @@ class DBSource(ORMObject): metadata = association_proxy('key', 'value') + def get_component_name(self): + return self.poolfile.location.component.component_name + def scan_contents(self): ''' Returns a set of names for non directories. The path names are @@ -2509,11 +2556,12 @@ def source_exists(source, source_version, suites = ["any"], session=None): if suite != "any": # source must exist in 'suite' or a suite that is enhanced by 'suite' s = get_suite(suite, session) - enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances') - considered_suites = [ vc.reference for vc in enhances_vcs ] - considered_suites.append(s) + if s: + enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances') + considered_suites = [ vc.reference for vc in enhances_vcs ] + considered_suites.append(s) - q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites]))) + q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites]))) if q.count() > 0: continue @@ -2635,6 +2683,15 @@ __all__.append('import_metadata_into_db') ################################################################################ +def split_uploaders(uploaders_list): + ''' + Split the Uploaders field into the individual uploaders and yield each of + them. Beware: email addresses might contain commas. + ''' + import re + for uploader in re.sub(">[ ]*,", ">\t", uploaders_list).split("\t"): + yield uploader.strip() + @session_wrapper def add_dsc_to_db(u, filename, session=None): entry = u.pkg.files[filename] @@ -2725,8 +2782,7 @@ def add_dsc_to_db(u, filename, session=None): session.refresh(source) source.uploaders = [source.maintainer] if u.pkg.dsc.has_key("uploaders"): - for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"): - up = up.strip() + for up in split_uploaders(u.pkg.dsc["uploaders"]): source.uploaders.append(get_or_set_maintainer(up, session)) session.flush() @@ -2768,10 +2824,25 @@ def add_deb_to_db(u, filename, session=None): # Find source id bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session) + + # If we couldn't find anything and the upload contains Arch: source, + # fall back to trying the source package, source version uploaded + # This maintains backwards compatibility with previous dak behaviour + # and deals with slightly broken binary debs which don't properly + # declare their source package name + if len(bin_sources) == 0: + if u.pkg.changes["architecture"].has_key("source") \ + and u.pkg.dsc.has_key("source") and u.pkg.dsc.has_key("version"): + bin_sources = get_sources_from_name(u.pkg.dsc["source"], u.pkg.dsc["version"], session=session) + + # If we couldn't find a source here, we reject + # TODO: Fix this so that it doesn't kill process-upload and instead just + # performs a reject. To be honest, we should probably spot this + # *much* earlier than here if len(bin_sources) != 1: - raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \ + raise NoSourceFieldError("Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \ (bin.package, bin.version, entry["architecture"], - filename, bin.binarytype, u.pkg.changes["fingerprint"]) + filename, bin.binarytype, u.pkg.changes["fingerprint"])) bin.source_id = bin_sources[0].source_id @@ -2779,9 +2850,9 @@ def add_deb_to_db(u, filename, session=None): for srcname, version in entry["built-using"]: exsources = get_sources_from_name(srcname, version, session=session) if len(exsources) != 1: - raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \ + raise NoSourceFieldError("Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \ (srcname, version, bin.package, bin.version, entry["architecture"], - filename, bin.binarytype, u.pkg.changes["fingerprint"]) + filename, bin.binarytype, u.pkg.changes["fingerprint"])) bin.extra_sources.append(exsources[0]) @@ -2956,11 +3027,11 @@ __all__.append('get_suite') ################################################################################ -# TODO: should be removed because the implementation is too trivial @session_wrapper def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None): """ - Returns list of Architecture objects for given C{suite} name + Returns list of Architecture objects for given C{suite} name. The list is + empty if suite does not exist. @type suite: str @param suite: Suite name to search for @@ -2981,48 +3052,15 @@ def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None): @return: list of Architecture objects for the given name (may be empty) """ - return get_suite(suite, session).get_architectures(skipsrc, skipall) + try: + return get_suite(suite, session).get_architectures(skipsrc, skipall) + except AttributeError: + return [] __all__.append('get_suite_architectures') ################################################################################ -class SuiteSrcFormat(object): - def __init__(self, *args, **kwargs): - pass - - def __repr__(self): - return '' % (self.suite_id, self.src_format_id) - -__all__.append('SuiteSrcFormat') - -@session_wrapper -def get_suite_src_formats(suite, session=None): - """ - Returns list of allowed SrcFormat for C{suite}. - - @type suite: str - @param suite: Suite name to search for - - @type session: Session - @param session: Optional SQL session object (a temporary one will be - generated if not supplied) - - @rtype: list - @return: the list of allowed source formats for I{suite} - """ - - q = session.query(SrcFormat) - q = q.join(SuiteSrcFormat) - q = q.join(Suite).filter_by(suite_name=suite) - q = q.order_by('format_name') - - return q.all() - -__all__.append('get_suite_src_formats') - -################################################################################ - class Uid(ORMObject): def __init__(self, uid = None, name = None): self.uid = uid @@ -3320,8 +3358,8 @@ class DBConn(object): mapper(Architecture, self.tbl_architecture, properties = dict(arch_id = self.tbl_architecture.c.id, suites = relation(Suite, secondary=self.tbl_suite_architectures, - order_by='suite_name', - backref=backref('architectures', order_by='arch_string'))), + order_by=self.tbl_suite.c.suite_name, + backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))), extension = validator) mapper(Archive, self.tbl_archive, @@ -3555,15 +3593,11 @@ class DBConn(object): properties = dict(suite_id = self.tbl_suite.c.id, policy_queue = relation(PolicyQueue), copy_queues = relation(BuildQueue, - secondary=self.tbl_suite_build_queue_copy)), + secondary=self.tbl_suite_build_queue_copy), + srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats, + backref=backref('suites', lazy='dynamic'))), extension = validator) - mapper(SuiteSrcFormat, self.tbl_suite_src_formats, - properties = dict(suite_id = self.tbl_suite_src_formats.c.suite, - suite = relation(Suite, backref='suitesrcformats'), - src_format_id = self.tbl_suite_src_formats.c.src_format, - src_format = relation(SrcFormat))) - mapper(Uid, self.tbl_uid, properties = dict(uid_id = self.tbl_uid.c.id, fingerprint = relation(Fingerprint)), @@ -3655,15 +3689,21 @@ class DBConn(object): sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak - self.db_pg = create_engine(connstr, **engine_args) - self.db_meta = MetaData() - self.db_meta.bind = self.db_pg - self.db_smaker = sessionmaker(bind=self.db_pg, - autoflush=True, - autocommit=False) + try: + self.db_pg = create_engine(connstr, **engine_args) + self.db_meta = MetaData() + self.db_meta.bind = self.db_pg + self.db_smaker = sessionmaker(bind=self.db_pg, + autoflush=True, + autocommit=False) + + self.__setuptables() + self.__setupmappers() + + except OperationalError as e: + import utils + utils.fubar("Cannot connect to database (%s)" % str(e)) - self.__setuptables() - self.__setupmappers() self.pid = os.getpid() def session(self, work_mem = 0):