X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fdbconn.py;h=ca6befea8677dfd1ad5f94a51a42ef85410638d3;hb=f2b3db04952a94011b8f459f701d6aff2ef9b2ed;hp=637301871a6bf4495036d8a004d7d3d8086f7fa2;hpb=0156b10c74c121497951899f37401535055e2e14;p=dak.git diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 63730187..ca6befea 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -110,11 +110,11 @@ class DebVersion(UserDefinedType): return None sa_major_version = sqlalchemy.__version__[0:3] -if sa_major_version in ["0.5", "0.6"]: +if sa_major_version in ["0.5", "0.6", "0.7"]: from sqlalchemy.databases import postgres postgres.ischema_names['debversion'] = DebVersion else: - raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py") + raise Exception("dak only ported to SQLA versions 0.5 to 0.7. See daklib/dbconn.py") ################################################################################ @@ -500,7 +500,7 @@ def subprocess_setup(): class DBBinary(ORMObject): def __init__(self, package = None, source = None, version = None, \ maintainer = None, architecture = None, poolfile = None, \ - binarytype = 'deb'): + binarytype = 'deb', fingerprint=None): self.package = package self.source = source self.version = version @@ -508,6 +508,7 @@ class DBBinary(ORMObject): self.architecture = architecture self.poolfile = poolfile self.binarytype = binarytype + self.fingerprint = fingerprint @property def pkid(self): @@ -558,10 +559,10 @@ class DBBinary(ORMObject): @rtype: text @return: stanza text of the control section. ''' - import apt_inst + import utils fullpath = self.poolfile.fullpath deb_file = open(fullpath, 'r') - stanza = apt_inst.debExtractControl(deb_file) + stanza = utils.deb_extract_control(deb_file) deb_file.close() return stanza @@ -818,7 +819,7 @@ class BuildQueue(object): Logger.log(["I: Removing %s from the queue" % o.fullpath]) os.unlink(o.fullpath) killdb = True - except OSError, e: + except OSError as e: # If it wasn't there, don't worry if e.errno == ENOENT: killdb = True @@ -878,7 +879,6 @@ class BuildQueue(object): # Prepare BuildQueueFile object qf = BuildQueueFile() qf.build_queue_id = self.queue_id - qf.lastused = datetime.now() qf.filename = poolfile_basename targetpath = poolfile.fullpath @@ -1686,7 +1686,7 @@ class Keyring(object): key = None signingkey = False - for line in k.xreadlines(): + for line in k: field = line.split(":") if field[0] == "pub": key = field[4] @@ -2460,13 +2460,14 @@ class Dak822(Deb822): class DBSource(ORMObject): def __init__(self, source = None, version = None, maintainer = None, \ - changedby = None, poolfile = None, install_date = None): + changedby = None, poolfile = None, install_date = None, fingerprint = None): self.source = source self.version = version self.maintainer = maintainer self.changedby = changedby self.poolfile = poolfile self.install_date = install_date + self.fingerprint = fingerprint @property def pkid(self): @@ -2479,7 +2480,7 @@ class DBSource(ORMObject): def not_null_constraints(self): return ['source', 'version', 'install_date', 'maintainer', \ - 'changedby', 'poolfile', 'install_date'] + 'changedby', 'poolfile'] def read_control_fields(self): ''' @@ -2556,11 +2557,12 @@ def source_exists(source, source_version, suites = ["any"], session=None): if suite != "any": # source must exist in 'suite' or a suite that is enhanced by 'suite' s = get_suite(suite, session) - enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances') - considered_suites = [ vc.reference for vc in enhances_vcs ] - considered_suites.append(s) + if s: + enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances') + considered_suites = [ vc.reference for vc in enhances_vcs ] + considered_suites.append(s) - q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites]))) + q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites]))) if q.count() > 0: continue @@ -2823,10 +2825,25 @@ def add_deb_to_db(u, filename, session=None): # Find source id bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session) + + # If we couldn't find anything and the upload contains Arch: source, + # fall back to trying the source package, source version uploaded + # This maintains backwards compatibility with previous dak behaviour + # and deals with slightly broken binary debs which don't properly + # declare their source package name + if len(bin_sources) == 0: + if u.pkg.changes["architecture"].has_key("source") \ + and u.pkg.dsc.has_key("source") and u.pkg.dsc.has_key("version"): + bin_sources = get_sources_from_name(u.pkg.dsc["source"], u.pkg.dsc["version"], session=session) + + # If we couldn't find a source here, we reject + # TODO: Fix this so that it doesn't kill process-upload and instead just + # performs a reject. To be honest, we should probably spot this + # *much* earlier than here if len(bin_sources) != 1: - raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \ + raise NoSourceFieldError("Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \ (bin.package, bin.version, entry["architecture"], - filename, bin.binarytype, u.pkg.changes["fingerprint"]) + filename, bin.binarytype, u.pkg.changes["fingerprint"])) bin.source_id = bin_sources[0].source_id @@ -2834,9 +2851,9 @@ def add_deb_to_db(u, filename, session=None): for srcname, version in entry["built-using"]: exsources = get_sources_from_name(srcname, version, session=session) if len(exsources) != 1: - raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \ + raise NoSourceFieldError("Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \ (srcname, version, bin.package, bin.version, entry["architecture"], - filename, bin.binarytype, u.pkg.changes["fingerprint"]) + filename, bin.binarytype, u.pkg.changes["fingerprint"])) bin.extra_sources.append(exsources[0]) @@ -2982,6 +2999,10 @@ class Suite(ORMObject): else: return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one() + @property + def path(self): + return os.path.join(self.archive.path, 'dists', self.suite_name) + __all__.append('Suite') @session_wrapper @@ -3342,8 +3363,8 @@ class DBConn(object): mapper(Architecture, self.tbl_architecture, properties = dict(arch_id = self.tbl_architecture.c.id, suites = relation(Suite, secondary=self.tbl_suite_architectures, - order_by='suite_name', - backref=backref('architectures', order_by='arch_string'))), + order_by=self.tbl_suite.c.suite_name, + backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))), extension = validator) mapper(Archive, self.tbl_archive, @@ -3579,7 +3600,8 @@ class DBConn(object): copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy), srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats, - backref=backref('suites', lazy='dynamic'))), + backref=backref('suites', lazy='dynamic')), + archive = relation(Archive, backref='suites')), extension = validator) mapper(Uid, self.tbl_uid, @@ -3684,7 +3706,7 @@ class DBConn(object): self.__setuptables() self.__setupmappers() - except OperationalError, e: + except OperationalError as e: import utils utils.fubar("Cannot connect to database (%s)" % str(e))