5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
40 from datetime import datetime, timedelta
41 from errno import ENOENT
42 from tempfile import mkstemp, mkdtemp
44 from inspect import getargspec
47 from sqlalchemy import create_engine, Table, MetaData
48 from sqlalchemy.orm import sessionmaker, mapper, relation
49 from sqlalchemy import types as sqltypes
51 # Don't remove this, we re-export the exceptions to scripts which import us
52 from sqlalchemy.exc import *
53 from sqlalchemy.orm.exc import NoResultFound
55 from config import Config
56 from textutils import fix_maintainer
58 ################################################################################
60 # Patch in support for the debversion field type so that it works during
63 class DebVersion(sqltypes.Text):
64 def get_col_spec(self):
67 sa_major_version = sqlalchemy.__version__[0:3]
68 if sa_major_version == "0.5":
69 from sqlalchemy.databases import postgres
70 postgres.ischema_names['debversion'] = DebVersion
72 raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py")
74 ################################################################################
76 __all__ = ['IntegrityError', 'SQLAlchemyError']
78 ################################################################################
80 def session_wrapper(fn):
82 Wrapper around common ".., session=None):" handling. If the wrapped
83 function is called without passing 'session', we create a local one
84 and destroy it when the function ends.
86 Also attaches a commit_or_flush method to the session; if we created a
87 local session, this is a synonym for session.commit(), otherwise it is a
88 synonym for session.flush().
91 def wrapped(*args, **kwargs):
92 private_transaction = False
94 # Find the session object
95 session = kwargs.get('session')
98 if len(args) <= len(getargspec(fn)[0]) - 1:
99 # No session specified as last argument or in kwargs
100 private_transaction = True
101 session = kwargs['session'] = DBConn().session()
103 # Session is last argument in args
107 session = args[-1] = DBConn().session()
108 private_transaction = True
110 if private_transaction:
111 session.commit_or_flush = session.commit
113 session.commit_or_flush = session.flush
116 return fn(*args, **kwargs)
118 if private_transaction:
119 # We created a session; close it.
122 wrapped.__doc__ = fn.__doc__
123 wrapped.func_name = fn.func_name
127 __all__.append('session_wrapper')
129 ################################################################################
131 class Architecture(object):
132 def __init__(self, *args, **kwargs):
135 def __eq__(self, val):
136 if isinstance(val, str):
137 return (self.arch_string== val)
138 # This signals to use the normal comparison operator
139 return NotImplemented
141 def __ne__(self, val):
142 if isinstance(val, str):
143 return (self.arch_string != val)
144 # This signals to use the normal comparison operator
145 return NotImplemented
148 return '<Architecture %s>' % self.arch_string
150 __all__.append('Architecture')
153 def get_architecture(architecture, session=None):
155 Returns database id for given C{architecture}.
157 @type architecture: string
158 @param architecture: The name of the architecture
160 @type session: Session
161 @param session: Optional SQLA session object (a temporary one will be
162 generated if not supplied)
165 @return: Architecture object for the given arch (None if not present)
168 q = session.query(Architecture).filter_by(arch_string=architecture)
172 except NoResultFound:
175 __all__.append('get_architecture')
178 def get_architecture_suites(architecture, session=None):
180 Returns list of Suite objects for given C{architecture} name
183 @param source: Architecture name to search for
185 @type session: Session
186 @param session: Optional SQL session object (a temporary one will be
187 generated if not supplied)
190 @return: list of Suite objects for the given name (may be empty)
193 q = session.query(Suite)
194 q = q.join(SuiteArchitecture)
195 q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
201 __all__.append('get_architecture_suites')
203 ################################################################################
205 class Archive(object):
206 def __init__(self, *args, **kwargs):
210 return '<Archive %s>' % self.archive_name
212 __all__.append('Archive')
215 def get_archive(archive, session=None):
217 returns database id for given C{archive}.
219 @type archive: string
220 @param archive: the name of the arhive
222 @type session: Session
223 @param session: Optional SQLA session object (a temporary one will be
224 generated if not supplied)
227 @return: Archive object for the given name (None if not present)
230 archive = archive.lower()
232 q = session.query(Archive).filter_by(archive_name=archive)
236 except NoResultFound:
239 __all__.append('get_archive')
241 ################################################################################
243 class BinAssociation(object):
244 def __init__(self, *args, **kwargs):
248 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
250 __all__.append('BinAssociation')
252 ################################################################################
254 class BinContents(object):
255 def __init__(self, *args, **kwargs):
259 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
261 __all__.append('BinContents')
263 ################################################################################
265 class DBBinary(object):
266 def __init__(self, *args, **kwargs):
270 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
272 __all__.append('DBBinary')
275 def get_suites_binary_in(package, session=None):
277 Returns list of Suite objects which given C{package} name is in
280 @param source: DBBinary package name to search for
283 @return: list of Suite objects for the given package
286 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
288 __all__.append('get_suites_binary_in')
291 def get_binary_from_id(binary_id, session=None):
293 Returns DBBinary object for given C{id}
296 @param binary_id: Id of the required binary
298 @type session: Session
299 @param session: Optional SQLA session object (a temporary one will be
300 generated if not supplied)
303 @return: DBBinary object for the given binary (None if not present)
306 q = session.query(DBBinary).filter_by(binary_id=binary_id)
310 except NoResultFound:
313 __all__.append('get_binary_from_id')
316 def get_binaries_from_name(package, version=None, architecture=None, session=None):
318 Returns list of DBBinary objects for given C{package} name
321 @param package: DBBinary package name to search for
323 @type version: str or None
324 @param version: Version to search for (or None)
326 @type package: str, list or None
327 @param package: Architectures to limit to (or None if no limit)
329 @type session: Session
330 @param session: Optional SQL session object (a temporary one will be
331 generated if not supplied)
334 @return: list of DBBinary objects for the given name (may be empty)
337 q = session.query(DBBinary).filter_by(package=package)
339 if version is not None:
340 q = q.filter_by(version=version)
342 if architecture is not None:
343 if not isinstance(architecture, list):
344 architecture = [architecture]
345 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
351 __all__.append('get_binaries_from_name')
354 def get_binaries_from_source_id(source_id, session=None):
356 Returns list of DBBinary objects for given C{source_id}
359 @param source_id: source_id to search for
361 @type session: Session
362 @param session: Optional SQL session object (a temporary one will be
363 generated if not supplied)
366 @return: list of DBBinary objects for the given name (may be empty)
369 return session.query(DBBinary).filter_by(source_id=source_id).all()
371 __all__.append('get_binaries_from_source_id')
374 def get_binary_from_name_suite(package, suitename, session=None):
375 ### For dak examine-package
376 ### XXX: Doesn't use object API yet
378 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
379 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
380 WHERE b.package=:package
382 AND fi.location = l.id
383 AND l.component = c.id
386 AND su.suite_name=:suitename
387 ORDER BY b.version DESC"""
389 return session.execute(sql, {'package': package, 'suitename': suitename})
391 __all__.append('get_binary_from_name_suite')
394 def get_binary_components(package, suitename, arch, session=None):
395 # Check for packages that have moved from one component to another
396 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
397 WHERE b.package=:package AND s.suite_name=:suitename
398 AND (a.arch_string = :arch OR a.arch_string = 'all')
399 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
400 AND f.location = l.id
401 AND l.component = c.id
404 vals = {'package': package, 'suitename': suitename, 'arch': arch}
406 return session.execute(query, vals)
408 __all__.append('get_binary_components')
410 ################################################################################
412 class BinaryACL(object):
413 def __init__(self, *args, **kwargs):
417 return '<BinaryACL %s>' % self.binary_acl_id
419 __all__.append('BinaryACL')
421 ################################################################################
423 class BinaryACLMap(object):
424 def __init__(self, *args, **kwargs):
428 return '<BinaryACLMap %s>' % self.binary_acl_map_id
430 __all__.append('BinaryACLMap')
432 ################################################################################
437 ArchiveDir "%(archivepath)s";
438 OverrideDir "/srv/ftp.debian.org/scripts/override/";
439 CacheDir "/srv/ftp.debian.org/database/";
444 Packages::Compress ". bzip2 gzip";
445 Sources::Compress ". bzip2 gzip";
450 bindirectory "incoming"
455 BinOverride "override.sid.all3";
456 BinCacheDB "packages-accepted.db";
458 FileList "%(filelist)s";
461 Packages::Extensions ".deb .udeb";
464 bindirectory "incoming/"
467 BinOverride "override.sid.all3";
468 SrcOverride "override.sid.all3.src";
469 FileList "%(filelist)s";
473 class BuildQueue(object):
474 def __init__(self, *args, **kwargs):
478 return '<BuildQueue %s>' % self.queue_name
480 def write_metadata(self, starttime, force=False):
481 # Do we write out metafiles?
482 if not (force or self.generate_metadata):
485 session = DBConn().session().object_session(self)
487 fl_fd = fl_name = ac_fd = ac_name = None
489 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
490 startdir = os.getcwd()
493 # Grab files we want to include
494 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
495 # Write file list with newer files
496 (fl_fd, fl_name) = mkstemp()
498 os.write(fl_fd, '%s\n' % n.fullpath)
501 # Write minimal apt.conf
502 # TODO: Remove hardcoding from template
503 (ac_fd, ac_name) = mkstemp()
504 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
505 'filelist': fl_name})
508 # Run apt-ftparchive generate
509 os.chdir(os.path.dirname(ac_name))
510 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
512 # Run apt-ftparchive release
513 # TODO: Eww - fix this
514 bname = os.path.basename(self.path)
518 # We have to remove the Release file otherwise it'll be included in the
521 os.unlink(os.path.join(bname, 'Release'))
525 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
530 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
531 if cnf.has_key("Dinstall::SigningPubKeyring"):
532 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
534 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
536 # Move the files if we got this far
537 os.rename('Release', os.path.join(bname, 'Release'))
539 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
541 # Clean up any left behind files
568 def clean_and_update(self, starttime, Logger, dryrun=False):
569 """WARNING: This routine commits for you"""
570 session = DBConn().session().object_session(self)
572 if self.generate_metadata and not dryrun:
573 self.write_metadata(starttime)
575 # Grab files older than our execution time
576 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
582 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
584 Logger.log(["I: Removing %s from the queue" % o.fullpath])
585 os.unlink(o.fullpath)
588 # If it wasn't there, don't worry
589 if e.errno == ENOENT:
592 # TODO: Replace with proper logging call
593 Logger.log(["E: Could not remove %s" % o.fullpath])
600 for f in os.listdir(self.path):
601 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'):
605 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
606 except NoResultFound:
607 fp = os.path.join(self.path, f)
609 Logger.log(["I: Would remove unused link %s" % fp])
611 Logger.log(["I: Removing unused link %s" % fp])
615 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
617 def add_file_from_pool(self, poolfile):
618 """Copies a file into the pool. Assumes that the PoolFile object is
619 attached to the same SQLAlchemy session as the Queue object is.
621 The caller is responsible for committing after calling this function."""
622 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
624 # Check if we have a file of this name or this ID already
625 for f in self.queuefiles:
626 if f.fileid is not None and f.fileid == poolfile.file_id or \
627 f.poolfile.filename == poolfile_basename:
628 # In this case, update the BuildQueueFile entry so we
629 # don't remove it too early
630 f.lastused = datetime.now()
631 DBConn().session().object_session(poolfile).add(f)
634 # Prepare BuildQueueFile object
635 qf = BuildQueueFile()
636 qf.build_queue_id = self.queue_id
637 qf.lastused = datetime.now()
638 qf.filename = poolfile_basename
640 targetpath = poolfile.fullpath
641 queuepath = os.path.join(self.path, poolfile_basename)
645 # We need to copy instead of symlink
647 utils.copy(targetpath, queuepath)
648 # NULL in the fileid field implies a copy
651 os.symlink(targetpath, queuepath)
652 qf.fileid = poolfile.file_id
656 # Get the same session as the PoolFile is using and add the qf to it
657 DBConn().session().object_session(poolfile).add(qf)
662 __all__.append('BuildQueue')
665 def get_build_queue(queuename, session=None):
667 Returns BuildQueue object for given C{queue name}, creating it if it does not
670 @type queuename: string
671 @param queuename: The name of the queue
673 @type session: Session
674 @param session: Optional SQLA session object (a temporary one will be
675 generated if not supplied)
678 @return: BuildQueue object for the given queue
681 q = session.query(BuildQueue).filter_by(queue_name=queuename)
685 except NoResultFound:
688 __all__.append('get_build_queue')
690 ################################################################################
692 class BuildQueueFile(object):
693 def __init__(self, *args, **kwargs):
697 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
701 return os.path.join(self.buildqueue.path, self.filename)
704 __all__.append('BuildQueueFile')
706 ################################################################################
708 class ChangePendingBinary(object):
709 def __init__(self, *args, **kwargs):
713 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
715 __all__.append('ChangePendingBinary')
717 ################################################################################
719 class ChangePendingFile(object):
720 def __init__(self, *args, **kwargs):
724 return '<ChangePendingFile %s>' % self.change_pending_file_id
726 __all__.append('ChangePendingFile')
728 ################################################################################
730 class ChangePendingSource(object):
731 def __init__(self, *args, **kwargs):
735 return '<ChangePendingSource %s>' % self.change_pending_source_id
737 __all__.append('ChangePendingSource')
739 ################################################################################
741 class Component(object):
742 def __init__(self, *args, **kwargs):
745 def __eq__(self, val):
746 if isinstance(val, str):
747 return (self.component_name == val)
748 # This signals to use the normal comparison operator
749 return NotImplemented
751 def __ne__(self, val):
752 if isinstance(val, str):
753 return (self.component_name != val)
754 # This signals to use the normal comparison operator
755 return NotImplemented
758 return '<Component %s>' % self.component_name
761 __all__.append('Component')
764 def get_component(component, session=None):
766 Returns database id for given C{component}.
768 @type component: string
769 @param component: The name of the override type
772 @return: the database id for the given component
775 component = component.lower()
777 q = session.query(Component).filter_by(component_name=component)
781 except NoResultFound:
784 __all__.append('get_component')
786 ################################################################################
788 class DBConfig(object):
789 def __init__(self, *args, **kwargs):
793 return '<DBConfig %s>' % self.name
795 __all__.append('DBConfig')
797 ################################################################################
800 def get_or_set_contents_file_id(filename, session=None):
802 Returns database id for given filename.
804 If no matching file is found, a row is inserted.
806 @type filename: string
807 @param filename: The filename
808 @type session: SQLAlchemy
809 @param session: Optional SQL session object (a temporary one will be
810 generated if not supplied). If not passed, a commit will be performed at
811 the end of the function, otherwise the caller is responsible for commiting.
814 @return: the database id for the given component
817 q = session.query(ContentFilename).filter_by(filename=filename)
820 ret = q.one().cafilename_id
821 except NoResultFound:
822 cf = ContentFilename()
823 cf.filename = filename
825 session.commit_or_flush()
826 ret = cf.cafilename_id
830 __all__.append('get_or_set_contents_file_id')
833 def get_contents(suite, overridetype, section=None, session=None):
835 Returns contents for a suite / overridetype combination, limiting
836 to a section if not None.
839 @param suite: Suite object
841 @type overridetype: OverrideType
842 @param overridetype: OverrideType object
844 @type section: Section
845 @param section: Optional section object to limit results to
847 @type session: SQLAlchemy
848 @param session: Optional SQL session object (a temporary one will be
849 generated if not supplied)
852 @return: ResultsProxy object set up to return tuples of (filename, section,
856 # find me all of the contents for a given suite
857 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
861 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
862 JOIN content_file_names n ON (c.filename=n.id)
863 JOIN binaries b ON (b.id=c.binary_pkg)
864 JOIN override o ON (o.package=b.package)
865 JOIN section s ON (s.id=o.section)
866 WHERE o.suite = :suiteid AND o.type = :overridetypeid
867 AND b.type=:overridetypename"""
869 vals = {'suiteid': suite.suite_id,
870 'overridetypeid': overridetype.overridetype_id,
871 'overridetypename': overridetype.overridetype}
873 if section is not None:
874 contents_q += " AND s.id = :sectionid"
875 vals['sectionid'] = section.section_id
877 contents_q += " ORDER BY fn"
879 return session.execute(contents_q, vals)
881 __all__.append('get_contents')
883 ################################################################################
885 class ContentFilepath(object):
886 def __init__(self, *args, **kwargs):
890 return '<ContentFilepath %s>' % self.filepath
892 __all__.append('ContentFilepath')
895 def get_or_set_contents_path_id(filepath, session=None):
897 Returns database id for given path.
899 If no matching file is found, a row is inserted.
901 @type filename: string
902 @param filename: The filepath
903 @type session: SQLAlchemy
904 @param session: Optional SQL session object (a temporary one will be
905 generated if not supplied). If not passed, a commit will be performed at
906 the end of the function, otherwise the caller is responsible for commiting.
909 @return: the database id for the given path
912 q = session.query(ContentFilepath).filter_by(filepath=filepath)
915 ret = q.one().cafilepath_id
916 except NoResultFound:
917 cf = ContentFilepath()
918 cf.filepath = filepath
920 session.commit_or_flush()
921 ret = cf.cafilepath_id
925 __all__.append('get_or_set_contents_path_id')
927 ################################################################################
929 class ContentAssociation(object):
930 def __init__(self, *args, **kwargs):
934 return '<ContentAssociation %s>' % self.ca_id
936 __all__.append('ContentAssociation')
938 def insert_content_paths(binary_id, fullpaths, session=None):
940 Make sure given path is associated with given binary id
943 @param binary_id: the id of the binary
944 @type fullpaths: list
945 @param fullpaths: the list of paths of the file being associated with the binary
946 @type session: SQLAlchemy session
947 @param session: Optional SQLAlchemy session. If this is passed, the caller
948 is responsible for ensuring a transaction has begun and committing the
949 results or rolling back based on the result code. If not passed, a commit
950 will be performed at the end of the function, otherwise the caller is
951 responsible for commiting.
953 @return: True upon success
958 session = DBConn().session()
964 for fullpath in fullpaths:
965 if fullpath.startswith( './' ):
966 fullpath = fullpath[2:]
968 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} )
976 traceback.print_exc()
978 # Only rollback if we set up the session ourself
985 __all__.append('insert_content_paths')
987 ################################################################################
989 class DSCFile(object):
990 def __init__(self, *args, **kwargs):
994 return '<DSCFile %s>' % self.dscfile_id
996 __all__.append('DSCFile')
999 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1001 Returns a list of DSCFiles which may be empty
1003 @type dscfile_id: int (optional)
1004 @param dscfile_id: the dscfile_id of the DSCFiles to find
1006 @type source_id: int (optional)
1007 @param source_id: the source id related to the DSCFiles to find
1009 @type poolfile_id: int (optional)
1010 @param poolfile_id: the poolfile id related to the DSCFiles to find
1013 @return: Possibly empty list of DSCFiles
1016 q = session.query(DSCFile)
1018 if dscfile_id is not None:
1019 q = q.filter_by(dscfile_id=dscfile_id)
1021 if source_id is not None:
1022 q = q.filter_by(source_id=source_id)
1024 if poolfile_id is not None:
1025 q = q.filter_by(poolfile_id=poolfile_id)
1029 __all__.append('get_dscfiles')
1031 ################################################################################
1033 class PoolFile(object):
1034 def __init__(self, *args, **kwargs):
1038 return '<PoolFile %s>' % self.filename
1042 return os.path.join(self.location.path, self.filename)
1044 __all__.append('PoolFile')
1047 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1050 (ValidFileFound [boolean or None], PoolFile object or None)
1052 @type filename: string
1053 @param filename: the filename of the file to check against the DB
1056 @param filesize: the size of the file to check against the DB
1058 @type md5sum: string
1059 @param md5sum: the md5sum of the file to check against the DB
1061 @type location_id: int
1062 @param location_id: the id of the location to look in
1065 @return: Tuple of length 2.
1066 If more than one file found with that name:
1068 If valid pool file found: (True, PoolFile object)
1069 If valid pool file not found:
1070 (False, None) if no file found
1071 (False, PoolFile object) if file found with size/md5sum mismatch
1074 q = session.query(PoolFile).filter_by(filename=filename)
1075 q = q.join(Location).filter_by(location_id=location_id)
1085 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1093 __all__.append('check_poolfile')
1096 def get_poolfile_by_id(file_id, session=None):
1098 Returns a PoolFile objects or None for the given id
1101 @param file_id: the id of the file to look for
1103 @rtype: PoolFile or None
1104 @return: either the PoolFile object or None
1107 q = session.query(PoolFile).filter_by(file_id=file_id)
1111 except NoResultFound:
1114 __all__.append('get_poolfile_by_id')
1118 def get_poolfile_by_name(filename, location_id=None, session=None):
1120 Returns an array of PoolFile objects for the given filename and
1121 (optionally) location_id
1123 @type filename: string
1124 @param filename: the filename of the file to check against the DB
1126 @type location_id: int
1127 @param location_id: the id of the location to look in (optional)
1130 @return: array of PoolFile objects
1133 q = session.query(PoolFile).filter_by(filename=filename)
1135 if location_id is not None:
1136 q = q.join(Location).filter_by(location_id=location_id)
1140 __all__.append('get_poolfile_by_name')
1143 def get_poolfile_like_name(filename, session=None):
1145 Returns an array of PoolFile objects which are like the given name
1147 @type filename: string
1148 @param filename: the filename of the file to check against the DB
1151 @return: array of PoolFile objects
1154 # TODO: There must be a way of properly using bind parameters with %FOO%
1155 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1159 __all__.append('get_poolfile_like_name')
1162 def add_poolfile(filename, datadict, location_id, session=None):
1164 Add a new file to the pool
1166 @type filename: string
1167 @param filename: filename
1169 @type datadict: dict
1170 @param datadict: dict with needed data
1172 @type location_id: int
1173 @param location_id: database id of the location
1176 @return: the PoolFile object created
1178 poolfile = PoolFile()
1179 poolfile.filename = filename
1180 poolfile.filesize = datadict["size"]
1181 poolfile.md5sum = datadict["md5sum"]
1182 poolfile.sha1sum = datadict["sha1sum"]
1183 poolfile.sha256sum = datadict["sha256sum"]
1184 poolfile.location_id = location_id
1186 session.add(poolfile)
1187 # Flush to get a file id (NB: This is not a commit)
1192 __all__.append('add_poolfile')
1194 ################################################################################
1196 class Fingerprint(object):
1197 def __init__(self, *args, **kwargs):
1201 return '<Fingerprint %s>' % self.fingerprint
1203 __all__.append('Fingerprint')
1206 def get_fingerprint(fpr, session=None):
1208 Returns Fingerprint object for given fpr.
1211 @param fpr: The fpr to find / add
1213 @type session: SQLAlchemy
1214 @param session: Optional SQL session object (a temporary one will be
1215 generated if not supplied).
1218 @return: the Fingerprint object for the given fpr or None
1221 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1225 except NoResultFound:
1230 __all__.append('get_fingerprint')
1233 def get_or_set_fingerprint(fpr, session=None):
1235 Returns Fingerprint object for given fpr.
1237 If no matching fpr is found, a row is inserted.
1240 @param fpr: The fpr to find / add
1242 @type session: SQLAlchemy
1243 @param session: Optional SQL session object (a temporary one will be
1244 generated if not supplied). If not passed, a commit will be performed at
1245 the end of the function, otherwise the caller is responsible for commiting.
1246 A flush will be performed either way.
1249 @return: the Fingerprint object for the given fpr
1252 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1256 except NoResultFound:
1257 fingerprint = Fingerprint()
1258 fingerprint.fingerprint = fpr
1259 session.add(fingerprint)
1260 session.commit_or_flush()
1265 __all__.append('get_or_set_fingerprint')
1267 ################################################################################
1269 # Helper routine for Keyring class
1270 def get_ldap_name(entry):
1272 for k in ["cn", "mn", "sn"]:
1274 if ret and ret[0] != "" and ret[0] != "-":
1276 return " ".join(name)
1278 ################################################################################
1280 class Keyring(object):
1281 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1282 " --with-colons --fingerprint --fingerprint"
1287 def __init__(self, *args, **kwargs):
1291 return '<Keyring %s>' % self.keyring_name
1293 def de_escape_gpg_str(self, txt):
1294 esclist = re.split(r'(\\x..)', txt)
1295 for x in range(1,len(esclist),2):
1296 esclist[x] = "%c" % (int(esclist[x][2:],16))
1297 return "".join(esclist)
1299 def load_keys(self, keyring):
1302 if not self.keyring_id:
1303 raise Exception('Must be initialized with database information')
1305 k = os.popen(self.gpg_invocation % keyring, "r")
1309 for line in k.xreadlines():
1310 field = line.split(":")
1311 if field[0] == "pub":
1313 (name, addr) = email.Utils.parseaddr(field[9])
1314 name = re.sub(r"\s*[(].*[)]", "", name)
1315 if name == "" or addr == "" or "@" not in addr:
1317 addr = "invalid-uid"
1318 name = self.de_escape_gpg_str(name)
1319 self.keys[key] = {"email": addr}
1321 self.keys[key]["name"] = name
1322 self.keys[key]["aliases"] = [name]
1323 self.keys[key]["fingerprints"] = []
1325 elif key and field[0] == "sub" and len(field) >= 12:
1326 signingkey = ("s" in field[11])
1327 elif key and field[0] == "uid":
1328 (name, addr) = email.Utils.parseaddr(field[9])
1329 if name and name not in self.keys[key]["aliases"]:
1330 self.keys[key]["aliases"].append(name)
1331 elif signingkey and field[0] == "fpr":
1332 self.keys[key]["fingerprints"].append(field[9])
1333 self.fpr_lookup[field[9]] = key
1335 def import_users_from_ldap(self, session):
1339 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1340 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1342 l = ldap.open(LDAPServer)
1343 l.simple_bind_s("","")
1344 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1345 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1346 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1348 ldap_fin_uid_id = {}
1355 uid = entry["uid"][0]
1356 name = get_ldap_name(entry)
1357 fingerprints = entry["keyFingerPrint"]
1359 for f in fingerprints:
1360 key = self.fpr_lookup.get(f, None)
1361 if key not in self.keys:
1363 self.keys[key]["uid"] = uid
1367 keyid = get_or_set_uid(uid, session).uid_id
1368 byuid[keyid] = (uid, name)
1369 byname[uid] = (keyid, name)
1371 return (byname, byuid)
1373 def generate_users_from_keyring(self, format, session):
1377 for x in self.keys.keys():
1378 if self.keys[x]["email"] == "invalid-uid":
1380 self.keys[x]["uid"] = format % "invalid-uid"
1382 uid = format % self.keys[x]["email"]
1383 keyid = get_or_set_uid(uid, session).uid_id
1384 byuid[keyid] = (uid, self.keys[x]["name"])
1385 byname[uid] = (keyid, self.keys[x]["name"])
1386 self.keys[x]["uid"] = uid
1389 uid = format % "invalid-uid"
1390 keyid = get_or_set_uid(uid, session).uid_id
1391 byuid[keyid] = (uid, "ungeneratable user id")
1392 byname[uid] = (keyid, "ungeneratable user id")
1394 return (byname, byuid)
1396 __all__.append('Keyring')
1399 def get_keyring(keyring, session=None):
1401 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1402 If C{keyring} already has an entry, simply return the existing Keyring
1404 @type keyring: string
1405 @param keyring: the keyring name
1408 @return: the Keyring object for this keyring
1411 q = session.query(Keyring).filter_by(keyring_name=keyring)
1415 except NoResultFound:
1418 __all__.append('get_keyring')
1420 ################################################################################
1422 class KeyringACLMap(object):
1423 def __init__(self, *args, **kwargs):
1427 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1429 __all__.append('KeyringACLMap')
1431 ################################################################################
1433 class DBChange(object):
1434 def __init__(self, *args, **kwargs):
1438 return '<DBChange %s>' % self.changesname
1440 def upload_into_db(self, u, path):
1442 session = DBConn().session().object_session(self)
1445 for chg_fn, entry in u.pkg.files.items():
1447 f = open(os.path.join(path, chg_fn))
1448 cpf = ChangePendingFile()
1449 cpf.filename = chg_fn
1450 cpf.size = entry['size']
1451 cpf.md5sum = entry['md5sum']
1453 if entry.has_key('sha1sum'):
1454 cpf.sha1sum = entry['sha1sum']
1457 cpf.sha1sum = apt_pkg.sha1sum(f)
1459 if entry.has_key('sha256sum'):
1460 cpf.sha256sum = entry['sha256sum']
1463 cpf.sha256sum = apt_pkg.sha256sum(f)
1470 # Can't find the file, try to look it up in the pool
1471 from utils import poolify
1472 poolname = poolify(entry["source"], entry["component"])
1473 l = get_location(cnf["Dir::Pool"], entry["component"], session=session)
1475 found, poolfile = check_poolfile(os.path.join(poolname, chg_fn),
1482 Logger.log(["E: Found multiple files for pool (%s) for %s" % % (chg_fn, entry["component"]))
1483 elif found is False and poolfile is not None:
1484 Logger.log(["E: md5sum/size mismatch for %s in pool" % % (chg_fn))
1486 if poolfile is None:
1487 Logger.log(["E: Could not find %s in pool" % % (chg_fn))
1489 chg.poolfiles.append(poolfile)
1494 def clean_from_queue(self):
1495 session = DBConn().session().object_session(self)
1497 # Remove changes_pool_files entries
1498 for pf in self.poolfiles:
1499 self.poolfiles.remove(pf)
1502 for cf in self.files:
1503 self.files.remove(cf)
1505 # Clear out of queue
1506 self.in_queue = None
1507 self.approved_for_id = None
1509 __all__.append('DBChange')
1512 def get_dbchange(filename, session=None):
1514 returns DBChange object for given C{filename}.
1516 @type archive: string
1517 @param archive: the name of the arhive
1519 @type session: Session
1520 @param session: Optional SQLA session object (a temporary one will be
1521 generated if not supplied)
1524 @return: Archive object for the given name (None if not present)
1527 q = session.query(DBChange).filter_by(changesname=filename)
1531 except NoResultFound:
1534 __all__.append('get_dbchange')
1536 ################################################################################
1538 class Location(object):
1539 def __init__(self, *args, **kwargs):
1543 return '<Location %s (%s)>' % (self.path, self.location_id)
1545 __all__.append('Location')
1548 def get_location(location, component=None, archive=None, session=None):
1550 Returns Location object for the given combination of location, component
1553 @type location: string
1554 @param location: the path of the location, e.g. I{/srv/ftp.debian.org/ftp/pool/}
1556 @type component: string
1557 @param component: the component name (if None, no restriction applied)
1559 @type archive: string
1560 @param archive_id: the archive name (if None, no restriction applied)
1562 @rtype: Location / None
1563 @return: Either a Location object or None if one can't be found
1566 q = session.query(Location).filter_by(path=location)
1568 if archive is not None:
1569 q = q.join(Archive).filter_by(archive_name=archive)
1571 if component is not None:
1572 q = q.join(Component).filter_by(component_name=component)
1576 except NoResultFound:
1579 __all__.append('get_location')
1581 ################################################################################
1583 class Maintainer(object):
1584 def __init__(self, *args, **kwargs):
1588 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1590 def get_split_maintainer(self):
1591 if not hasattr(self, 'name') or self.name is None:
1592 return ('', '', '', '')
1594 return fix_maintainer(self.name.strip())
1596 __all__.append('Maintainer')
1599 def get_or_set_maintainer(name, session=None):
1601 Returns Maintainer object for given maintainer name.
1603 If no matching maintainer name is found, a row is inserted.
1606 @param name: The maintainer name to add
1608 @type session: SQLAlchemy
1609 @param session: Optional SQL session object (a temporary one will be
1610 generated if not supplied). If not passed, a commit will be performed at
1611 the end of the function, otherwise the caller is responsible for commiting.
1612 A flush will be performed either way.
1615 @return: the Maintainer object for the given maintainer
1618 q = session.query(Maintainer).filter_by(name=name)
1621 except NoResultFound:
1622 maintainer = Maintainer()
1623 maintainer.name = name
1624 session.add(maintainer)
1625 session.commit_or_flush()
1630 __all__.append('get_or_set_maintainer')
1633 def get_maintainer(maintainer_id, session=None):
1635 Return the name of the maintainer behind C{maintainer_id} or None if that
1636 maintainer_id is invalid.
1638 @type maintainer_id: int
1639 @param maintainer_id: the id of the maintainer
1642 @return: the Maintainer with this C{maintainer_id}
1645 return session.query(Maintainer).get(maintainer_id)
1647 __all__.append('get_maintainer')
1649 ################################################################################
1651 class NewComment(object):
1652 def __init__(self, *args, **kwargs):
1656 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1658 __all__.append('NewComment')
1661 def has_new_comment(package, version, session=None):
1663 Returns true if the given combination of C{package}, C{version} has a comment.
1665 @type package: string
1666 @param package: name of the package
1668 @type version: string
1669 @param version: package version
1671 @type session: Session
1672 @param session: Optional SQLA session object (a temporary one will be
1673 generated if not supplied)
1679 q = session.query(NewComment)
1680 q = q.filter_by(package=package)
1681 q = q.filter_by(version=version)
1683 return bool(q.count() > 0)
1685 __all__.append('has_new_comment')
1688 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1690 Returns (possibly empty) list of NewComment objects for the given
1693 @type package: string (optional)
1694 @param package: name of the package
1696 @type version: string (optional)
1697 @param version: package version
1699 @type comment_id: int (optional)
1700 @param comment_id: An id of a comment
1702 @type session: Session
1703 @param session: Optional SQLA session object (a temporary one will be
1704 generated if not supplied)
1707 @return: A (possibly empty) list of NewComment objects will be returned
1710 q = session.query(NewComment)
1711 if package is not None: q = q.filter_by(package=package)
1712 if version is not None: q = q.filter_by(version=version)
1713 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1717 __all__.append('get_new_comments')
1719 ################################################################################
1721 class Override(object):
1722 def __init__(self, *args, **kwargs):
1726 return '<Override %s (%s)>' % (self.package, self.suite_id)
1728 __all__.append('Override')
1731 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1733 Returns Override object for the given parameters
1735 @type package: string
1736 @param package: The name of the package
1738 @type suite: string, list or None
1739 @param suite: The name of the suite (or suites if a list) to limit to. If
1740 None, don't limit. Defaults to None.
1742 @type component: string, list or None
1743 @param component: The name of the component (or components if a list) to
1744 limit to. If None, don't limit. Defaults to None.
1746 @type overridetype: string, list or None
1747 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1748 limit to. If None, don't limit. Defaults to None.
1750 @type session: Session
1751 @param session: Optional SQLA session object (a temporary one will be
1752 generated if not supplied)
1755 @return: A (possibly empty) list of Override objects will be returned
1758 q = session.query(Override)
1759 q = q.filter_by(package=package)
1761 if suite is not None:
1762 if not isinstance(suite, list): suite = [suite]
1763 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1765 if component is not None:
1766 if not isinstance(component, list): component = [component]
1767 q = q.join(Component).filter(Component.component_name.in_(component))
1769 if overridetype is not None:
1770 if not isinstance(overridetype, list): overridetype = [overridetype]
1771 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1775 __all__.append('get_override')
1778 ################################################################################
1780 class OverrideType(object):
1781 def __init__(self, *args, **kwargs):
1785 return '<OverrideType %s>' % self.overridetype
1787 __all__.append('OverrideType')
1790 def get_override_type(override_type, session=None):
1792 Returns OverrideType object for given C{override type}.
1794 @type override_type: string
1795 @param override_type: The name of the override type
1797 @type session: Session
1798 @param session: Optional SQLA session object (a temporary one will be
1799 generated if not supplied)
1802 @return: the database id for the given override type
1805 q = session.query(OverrideType).filter_by(overridetype=override_type)
1809 except NoResultFound:
1812 __all__.append('get_override_type')
1814 ################################################################################
1816 class PendingContentAssociation(object):
1817 def __init__(self, *args, **kwargs):
1821 return '<PendingContentAssociation %s>' % self.pca_id
1823 __all__.append('PendingContentAssociation')
1825 def insert_pending_content_paths(package, fullpaths, session=None):
1827 Make sure given paths are temporarily associated with given
1831 @param package: the package to associate with should have been read in from the binary control file
1832 @type fullpaths: list
1833 @param fullpaths: the list of paths of the file being associated with the binary
1834 @type session: SQLAlchemy session
1835 @param session: Optional SQLAlchemy session. If this is passed, the caller
1836 is responsible for ensuring a transaction has begun and committing the
1837 results or rolling back based on the result code. If not passed, a commit
1838 will be performed at the end of the function
1840 @return: True upon success, False if there is a problem
1843 privatetrans = False
1846 session = DBConn().session()
1850 arch = get_architecture(package['Architecture'], session)
1851 arch_id = arch.arch_id
1853 # Remove any already existing recorded files for this package
1854 q = session.query(PendingContentAssociation)
1855 q = q.filter_by(package=package['Package'])
1856 q = q.filter_by(version=package['Version'])
1857 q = q.filter_by(architecture=arch_id)
1862 for fullpath in fullpaths:
1863 (path, filename) = os.path.split(fullpath)
1865 if path.startswith( "./" ):
1868 filepath_id = get_or_set_contents_path_id(path, session)
1869 filename_id = get_or_set_contents_file_id(filename, session)
1871 pathcache[fullpath] = (filepath_id, filename_id)
1873 for fullpath, dat in pathcache.items():
1874 pca = PendingContentAssociation()
1875 pca.package = package['Package']
1876 pca.version = package['Version']
1877 pca.filepath_id = dat[0]
1878 pca.filename_id = dat[1]
1879 pca.architecture = arch_id
1882 # Only commit if we set up the session ourself
1890 except Exception, e:
1891 traceback.print_exc()
1893 # Only rollback if we set up the session ourself
1900 __all__.append('insert_pending_content_paths')
1902 ################################################################################
1904 class PolicyQueue(object):
1905 def __init__(self, *args, **kwargs):
1909 return '<PolicyQueue %s>' % self.queue_name
1911 __all__.append('PolicyQueue')
1914 def get_policy_queue(queuename, session=None):
1916 Returns PolicyQueue object for given C{queue name}
1918 @type queuename: string
1919 @param queuename: The name of the queue
1921 @type session: Session
1922 @param session: Optional SQLA session object (a temporary one will be
1923 generated if not supplied)
1926 @return: PolicyQueue object for the given queue
1929 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1933 except NoResultFound:
1936 __all__.append('get_policy_queue')
1938 ################################################################################
1940 class Priority(object):
1941 def __init__(self, *args, **kwargs):
1944 def __eq__(self, val):
1945 if isinstance(val, str):
1946 return (self.priority == val)
1947 # This signals to use the normal comparison operator
1948 return NotImplemented
1950 def __ne__(self, val):
1951 if isinstance(val, str):
1952 return (self.priority != val)
1953 # This signals to use the normal comparison operator
1954 return NotImplemented
1957 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1959 __all__.append('Priority')
1962 def get_priority(priority, session=None):
1964 Returns Priority object for given C{priority name}.
1966 @type priority: string
1967 @param priority: The name of the priority
1969 @type session: Session
1970 @param session: Optional SQLA session object (a temporary one will be
1971 generated if not supplied)
1974 @return: Priority object for the given priority
1977 q = session.query(Priority).filter_by(priority=priority)
1981 except NoResultFound:
1984 __all__.append('get_priority')
1987 def get_priorities(session=None):
1989 Returns dictionary of priority names -> id mappings
1991 @type session: Session
1992 @param session: Optional SQL session object (a temporary one will be
1993 generated if not supplied)
1996 @return: dictionary of priority names -> id mappings
2000 q = session.query(Priority)
2002 ret[x.priority] = x.priority_id
2006 __all__.append('get_priorities')
2008 ################################################################################
2010 class Section(object):
2011 def __init__(self, *args, **kwargs):
2014 def __eq__(self, val):
2015 if isinstance(val, str):
2016 return (self.section == val)
2017 # This signals to use the normal comparison operator
2018 return NotImplemented
2020 def __ne__(self, val):
2021 if isinstance(val, str):
2022 return (self.section != val)
2023 # This signals to use the normal comparison operator
2024 return NotImplemented
2027 return '<Section %s>' % self.section
2029 __all__.append('Section')
2032 def get_section(section, session=None):
2034 Returns Section object for given C{section name}.
2036 @type section: string
2037 @param section: The name of the section
2039 @type session: Session
2040 @param session: Optional SQLA session object (a temporary one will be
2041 generated if not supplied)
2044 @return: Section object for the given section name
2047 q = session.query(Section).filter_by(section=section)
2051 except NoResultFound:
2054 __all__.append('get_section')
2057 def get_sections(session=None):
2059 Returns dictionary of section names -> id mappings
2061 @type session: Session
2062 @param session: Optional SQL session object (a temporary one will be
2063 generated if not supplied)
2066 @return: dictionary of section names -> id mappings
2070 q = session.query(Section)
2072 ret[x.section] = x.section_id
2076 __all__.append('get_sections')
2078 ################################################################################
2080 class DBSource(object):
2081 def __init__(self, *args, **kwargs):
2085 return '<DBSource %s (%s)>' % (self.source, self.version)
2087 __all__.append('DBSource')
2090 def source_exists(source, source_version, suites = ["any"], session=None):
2092 Ensure that source exists somewhere in the archive for the binary
2093 upload being processed.
2094 1. exact match => 1.0-3
2095 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2097 @type package: string
2098 @param package: package source name
2100 @type source_version: string
2101 @param source_version: expected source version
2104 @param suites: list of suites to check in, default I{any}
2106 @type session: Session
2107 @param session: Optional SQLA session object (a temporary one will be
2108 generated if not supplied)
2111 @return: returns 1 if a source with expected version is found, otherwise 0
2118 for suite in suites:
2119 q = session.query(DBSource).filter_by(source=source)
2121 # source must exist in suite X, or in some other suite that's
2122 # mapped to X, recursively... silent-maps are counted too,
2123 # unreleased-maps aren't.
2124 maps = cnf.ValueList("SuiteMappings")[:]
2126 maps = [ m.split() for m in maps ]
2127 maps = [ (x[1], x[2]) for x in maps
2128 if x[0] == "map" or x[0] == "silent-map" ]
2131 if x[1] in s and x[0] not in s:
2134 q = q.join(SrcAssociation).join(Suite)
2135 q = q.filter(Suite.suite_name.in_(s))
2137 # Reduce the query results to a list of version numbers
2138 ql = [ j.version for j in q.all() ]
2141 if source_version in ql:
2145 from daklib.regexes import re_bin_only_nmu
2146 orig_source_version = re_bin_only_nmu.sub('', source_version)
2147 if orig_source_version in ql:
2150 # No source found so return not ok
2155 __all__.append('source_exists')
2158 def get_suites_source_in(source, session=None):
2160 Returns list of Suite objects which given C{source} name is in
2163 @param source: DBSource package name to search for
2166 @return: list of Suite objects for the given source
2169 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2171 __all__.append('get_suites_source_in')
2174 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2176 Returns list of DBSource objects for given C{source} name and other parameters
2179 @param source: DBSource package name to search for
2181 @type source: str or None
2182 @param source: DBSource version name to search for or None if not applicable
2184 @type dm_upload_allowed: bool
2185 @param dm_upload_allowed: If None, no effect. If True or False, only
2186 return packages with that dm_upload_allowed setting
2188 @type session: Session
2189 @param session: Optional SQL session object (a temporary one will be
2190 generated if not supplied)
2193 @return: list of DBSource objects for the given name (may be empty)
2196 q = session.query(DBSource).filter_by(source=source)
2198 if version is not None:
2199 q = q.filter_by(version=version)
2201 if dm_upload_allowed is not None:
2202 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2206 __all__.append('get_sources_from_name')
2209 def get_source_in_suite(source, suite, session=None):
2211 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2213 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2214 - B{suite} - a suite name, eg. I{unstable}
2216 @type source: string
2217 @param source: source package name
2220 @param suite: the suite name
2223 @return: the version for I{source} in I{suite}
2227 q = session.query(SrcAssociation)
2228 q = q.join('source').filter_by(source=source)
2229 q = q.join('suite').filter_by(suite_name=suite)
2232 return q.one().source
2233 except NoResultFound:
2236 __all__.append('get_source_in_suite')
2238 ################################################################################
2241 def add_dsc_to_db(u, filename, session=None):
2242 entry = u.pkg.files[filename]
2246 source.source = u.pkg.dsc["source"]
2247 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2248 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2249 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2250 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2251 source.install_date = datetime.now().date()
2253 dsc_component = entry["component"]
2254 dsc_location_id = entry["location id"]
2256 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2258 # Set up a new poolfile if necessary
2259 if not entry.has_key("files id") or not entry["files id"]:
2260 filename = entry["pool name"] + filename
2261 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2263 pfs.append(poolfile)
2264 entry["files id"] = poolfile.file_id
2266 source.poolfile_id = entry["files id"]
2270 for suite_name in u.pkg.changes["distribution"].keys():
2271 sa = SrcAssociation()
2272 sa.source_id = source.source_id
2273 sa.suite_id = get_suite(suite_name).suite_id
2278 # Add the source files to the DB (files and dsc_files)
2280 dscfile.source_id = source.source_id
2281 dscfile.poolfile_id = entry["files id"]
2282 session.add(dscfile)
2284 for dsc_file, dentry in u.pkg.dsc_files.items():
2286 df.source_id = source.source_id
2288 # If the .orig tarball is already in the pool, it's
2289 # files id is stored in dsc_files by check_dsc().
2290 files_id = dentry.get("files id", None)
2292 # Find the entry in the files hash
2293 # TODO: Bail out here properly
2295 for f, e in u.pkg.files.items():
2300 if files_id is None:
2301 filename = dfentry["pool name"] + dsc_file
2303 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2304 # FIXME: needs to check for -1/-2 and or handle exception
2305 if found and obj is not None:
2306 files_id = obj.file_id
2309 # If still not found, add it
2310 if files_id is None:
2311 # HACK: Force sha1sum etc into dentry
2312 dentry["sha1sum"] = dfentry["sha1sum"]
2313 dentry["sha256sum"] = dfentry["sha256sum"]
2314 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2315 pfs.append(poolfile)
2316 files_id = poolfile.file_id
2318 poolfile = get_poolfile_by_id(files_id, session)
2319 if poolfile is None:
2320 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2321 pfs.append(poolfile)
2323 df.poolfile_id = files_id
2328 # Add the src_uploaders to the DB
2329 uploader_ids = [source.maintainer_id]
2330 if u.pkg.dsc.has_key("uploaders"):
2331 for up in u.pkg.dsc["uploaders"].split(","):
2333 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2336 for up in uploader_ids:
2337 if added_ids.has_key(up):
2338 utils.warn("Already saw uploader %s for source %s" % (up, source.source))
2344 su.maintainer_id = up
2345 su.source_id = source.source_id
2350 return dsc_component, dsc_location_id, pfs
2352 __all__.append('add_dsc_to_db')
2355 def add_deb_to_db(u, filename, session=None):
2357 Contrary to what you might expect, this routine deals with both
2358 debs and udebs. That info is in 'dbtype', whilst 'type' is
2359 'deb' for both of them
2362 entry = u.pkg.files[filename]
2365 bin.package = entry["package"]
2366 bin.version = entry["version"]
2367 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2368 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2369 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2370 bin.binarytype = entry["dbtype"]
2373 filename = entry["pool name"] + filename
2374 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2375 if not entry.get("location id", None):
2376 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2378 if entry.get("files id", None):
2379 poolfile = get_poolfile_by_id(bin.poolfile_id)
2380 bin.poolfile_id = entry["files id"]
2382 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2383 bin.poolfile_id = entry["files id"] = poolfile.file_id
2386 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2387 if len(bin_sources) != 1:
2388 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2389 (bin.package, bin.version, bin.architecture.arch_string,
2390 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2392 bin.source_id = bin_sources[0].source_id
2394 # Add and flush object so it has an ID
2398 # Add BinAssociations
2399 for suite_name in u.pkg.changes["distribution"].keys():
2400 ba = BinAssociation()
2401 ba.binary_id = bin.binary_id
2402 ba.suite_id = get_suite(suite_name).suite_id
2407 # Deal with contents - disabled for now
2408 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2410 # print "REJECT\nCould not determine contents of package %s" % bin.package
2411 # session.rollback()
2412 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2416 __all__.append('add_deb_to_db')
2418 ################################################################################
2420 class SourceACL(object):
2421 def __init__(self, *args, **kwargs):
2425 return '<SourceACL %s>' % self.source_acl_id
2427 __all__.append('SourceACL')
2429 ################################################################################
2431 class SrcAssociation(object):
2432 def __init__(self, *args, **kwargs):
2436 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2438 __all__.append('SrcAssociation')
2440 ################################################################################
2442 class SrcFormat(object):
2443 def __init__(self, *args, **kwargs):
2447 return '<SrcFormat %s>' % (self.format_name)
2449 __all__.append('SrcFormat')
2451 ################################################################################
2453 class SrcUploader(object):
2454 def __init__(self, *args, **kwargs):
2458 return '<SrcUploader %s>' % self.uploader_id
2460 __all__.append('SrcUploader')
2462 ################################################################################
2464 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2465 ('SuiteID', 'suite_id'),
2466 ('Version', 'version'),
2467 ('Origin', 'origin'),
2469 ('Description', 'description'),
2470 ('Untouchable', 'untouchable'),
2471 ('Announce', 'announce'),
2472 ('Codename', 'codename'),
2473 ('OverrideCodename', 'overridecodename'),
2474 ('ValidTime', 'validtime'),
2475 ('Priority', 'priority'),
2476 ('NotAutomatic', 'notautomatic'),
2477 ('CopyChanges', 'copychanges'),
2478 ('CopyDotDak', 'copydotdak'),
2479 ('CommentsDir', 'commentsdir'),
2480 ('OverrideSuite', 'overridesuite'),
2481 ('ChangelogBase', 'changelogbase')]
2484 class Suite(object):
2485 def __init__(self, *args, **kwargs):
2489 return '<Suite %s>' % self.suite_name
2491 def __eq__(self, val):
2492 if isinstance(val, str):
2493 return (self.suite_name == val)
2494 # This signals to use the normal comparison operator
2495 return NotImplemented
2497 def __ne__(self, val):
2498 if isinstance(val, str):
2499 return (self.suite_name != val)
2500 # This signals to use the normal comparison operator
2501 return NotImplemented
2505 for disp, field in SUITE_FIELDS:
2506 val = getattr(self, field, None)
2508 ret.append("%s: %s" % (disp, val))
2510 return "\n".join(ret)
2512 __all__.append('Suite')
2515 def get_suite_architecture(suite, architecture, session=None):
2517 Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
2521 @param suite: Suite name to search for
2523 @type architecture: str
2524 @param architecture: Architecture name to search for
2526 @type session: Session
2527 @param session: Optional SQL session object (a temporary one will be
2528 generated if not supplied)
2530 @rtype: SuiteArchitecture
2531 @return: the SuiteArchitecture object or None
2534 q = session.query(SuiteArchitecture)
2535 q = q.join(Architecture).filter_by(arch_string=architecture)
2536 q = q.join(Suite).filter_by(suite_name=suite)
2540 except NoResultFound:
2543 __all__.append('get_suite_architecture')
2546 def get_suite(suite, session=None):
2548 Returns Suite object for given C{suite name}.
2551 @param suite: The name of the suite
2553 @type session: Session
2554 @param session: Optional SQLA session object (a temporary one will be
2555 generated if not supplied)
2558 @return: Suite object for the requested suite name (None if not present)
2561 q = session.query(Suite).filter_by(suite_name=suite)
2565 except NoResultFound:
2568 __all__.append('get_suite')
2570 ################################################################################
2572 class SuiteArchitecture(object):
2573 def __init__(self, *args, **kwargs):
2577 return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
2579 __all__.append('SuiteArchitecture')
2582 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2584 Returns list of Architecture objects for given C{suite} name
2587 @param source: Suite name to search for
2589 @type skipsrc: boolean
2590 @param skipsrc: Whether to skip returning the 'source' architecture entry
2593 @type skipall: boolean
2594 @param skipall: Whether to skip returning the 'all' architecture entry
2597 @type session: Session
2598 @param session: Optional SQL session object (a temporary one will be
2599 generated if not supplied)
2602 @return: list of Architecture objects for the given name (may be empty)
2605 q = session.query(Architecture)
2606 q = q.join(SuiteArchitecture)
2607 q = q.join(Suite).filter_by(suite_name=suite)
2610 q = q.filter(Architecture.arch_string != 'source')
2613 q = q.filter(Architecture.arch_string != 'all')
2615 q = q.order_by('arch_string')
2619 __all__.append('get_suite_architectures')
2621 ################################################################################
2623 class SuiteSrcFormat(object):
2624 def __init__(self, *args, **kwargs):
2628 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2630 __all__.append('SuiteSrcFormat')
2633 def get_suite_src_formats(suite, session=None):
2635 Returns list of allowed SrcFormat for C{suite}.
2638 @param suite: Suite name to search for
2640 @type session: Session
2641 @param session: Optional SQL session object (a temporary one will be
2642 generated if not supplied)
2645 @return: the list of allowed source formats for I{suite}
2648 q = session.query(SrcFormat)
2649 q = q.join(SuiteSrcFormat)
2650 q = q.join(Suite).filter_by(suite_name=suite)
2651 q = q.order_by('format_name')
2655 __all__.append('get_suite_src_formats')
2657 ################################################################################
2660 def __init__(self, *args, **kwargs):
2663 def __eq__(self, val):
2664 if isinstance(val, str):
2665 return (self.uid == val)
2666 # This signals to use the normal comparison operator
2667 return NotImplemented
2669 def __ne__(self, val):
2670 if isinstance(val, str):
2671 return (self.uid != val)
2672 # This signals to use the normal comparison operator
2673 return NotImplemented
2676 return '<Uid %s (%s)>' % (self.uid, self.name)
2678 __all__.append('Uid')
2681 def add_database_user(uidname, session=None):
2683 Adds a database user
2685 @type uidname: string
2686 @param uidname: The uid of the user to add
2688 @type session: SQLAlchemy
2689 @param session: Optional SQL session object (a temporary one will be
2690 generated if not supplied). If not passed, a commit will be performed at
2691 the end of the function, otherwise the caller is responsible for commiting.
2694 @return: the uid object for the given uidname
2697 session.execute("CREATE USER :uid", {'uid': uidname})
2698 session.commit_or_flush()
2700 __all__.append('add_database_user')
2703 def get_or_set_uid(uidname, session=None):
2705 Returns uid object for given uidname.
2707 If no matching uidname is found, a row is inserted.
2709 @type uidname: string
2710 @param uidname: The uid to add
2712 @type session: SQLAlchemy
2713 @param session: Optional SQL session object (a temporary one will be
2714 generated if not supplied). If not passed, a commit will be performed at
2715 the end of the function, otherwise the caller is responsible for commiting.
2718 @return: the uid object for the given uidname
2721 q = session.query(Uid).filter_by(uid=uidname)
2725 except NoResultFound:
2729 session.commit_or_flush()
2734 __all__.append('get_or_set_uid')
2737 def get_uid_from_fingerprint(fpr, session=None):
2738 q = session.query(Uid)
2739 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2743 except NoResultFound:
2746 __all__.append('get_uid_from_fingerprint')
2748 ################################################################################
2750 class UploadBlock(object):
2751 def __init__(self, *args, **kwargs):
2755 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2757 __all__.append('UploadBlock')
2759 ################################################################################
2761 class DBConn(object):
2763 database module init.
2767 def __init__(self, *args, **kwargs):
2768 self.__dict__ = self.__shared_state
2770 if not getattr(self, 'initialised', False):
2771 self.initialised = True
2772 self.debug = kwargs.has_key('debug')
2775 def __setuptables(self):
2784 'build_queue_files',
2787 'content_associations',
2788 'content_file_names',
2789 'content_file_paths',
2790 'changes_pending_binaries',
2791 'changes_pending_files',
2792 'changes_pending_files_map',
2793 'changes_pending_source',
2794 'changes_pending_source_files',
2795 'changes_pool_files',
2807 'pending_content_associations',
2817 'suite_architectures',
2818 'suite_src_formats',
2819 'suite_build_queue_copy',
2824 for table_name in tables:
2825 table = Table(table_name, self.db_meta, autoload=True)
2826 setattr(self, 'tbl_%s' % table_name, table)
2828 def __setupmappers(self):
2829 mapper(Architecture, self.tbl_architecture,
2830 properties = dict(arch_id = self.tbl_architecture.c.id))
2832 mapper(Archive, self.tbl_archive,
2833 properties = dict(archive_id = self.tbl_archive.c.id,
2834 archive_name = self.tbl_archive.c.name))
2836 mapper(BinAssociation, self.tbl_bin_associations,
2837 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2838 suite_id = self.tbl_bin_associations.c.suite,
2839 suite = relation(Suite),
2840 binary_id = self.tbl_bin_associations.c.bin,
2841 binary = relation(DBBinary)))
2843 mapper(BuildQueue, self.tbl_build_queue,
2844 properties = dict(queue_id = self.tbl_build_queue.c.id))
2846 mapper(BuildQueueFile, self.tbl_build_queue_files,
2847 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2848 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2850 mapper(DBBinary, self.tbl_binaries,
2851 properties = dict(binary_id = self.tbl_binaries.c.id,
2852 package = self.tbl_binaries.c.package,
2853 version = self.tbl_binaries.c.version,
2854 maintainer_id = self.tbl_binaries.c.maintainer,
2855 maintainer = relation(Maintainer),
2856 source_id = self.tbl_binaries.c.source,
2857 source = relation(DBSource),
2858 arch_id = self.tbl_binaries.c.architecture,
2859 architecture = relation(Architecture),
2860 poolfile_id = self.tbl_binaries.c.file,
2861 poolfile = relation(PoolFile),
2862 binarytype = self.tbl_binaries.c.type,
2863 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2864 fingerprint = relation(Fingerprint),
2865 install_date = self.tbl_binaries.c.install_date,
2866 binassociations = relation(BinAssociation,
2867 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2869 mapper(BinaryACL, self.tbl_binary_acl,
2870 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2872 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2873 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2874 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2875 architecture = relation(Architecture)))
2877 mapper(Component, self.tbl_component,
2878 properties = dict(component_id = self.tbl_component.c.id,
2879 component_name = self.tbl_component.c.name))
2881 mapper(DBConfig, self.tbl_config,
2882 properties = dict(config_id = self.tbl_config.c.id))
2884 mapper(DSCFile, self.tbl_dsc_files,
2885 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2886 source_id = self.tbl_dsc_files.c.source,
2887 source = relation(DBSource),
2888 poolfile_id = self.tbl_dsc_files.c.file,
2889 poolfile = relation(PoolFile)))
2891 mapper(PoolFile, self.tbl_files,
2892 properties = dict(file_id = self.tbl_files.c.id,
2893 filesize = self.tbl_files.c.size,
2894 location_id = self.tbl_files.c.location,
2895 location = relation(Location)))
2897 mapper(Fingerprint, self.tbl_fingerprint,
2898 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2899 uid_id = self.tbl_fingerprint.c.uid,
2900 uid = relation(Uid),
2901 keyring_id = self.tbl_fingerprint.c.keyring,
2902 keyring = relation(Keyring),
2903 source_acl = relation(SourceACL),
2904 binary_acl = relation(BinaryACL)))
2906 mapper(Keyring, self.tbl_keyrings,
2907 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2908 keyring_id = self.tbl_keyrings.c.id))
2910 mapper(DBChange, self.tbl_changes,
2911 properties = dict(change_id = self.tbl_changes.c.id,
2912 poolfiles = relation(PoolFile,
2913 secondary=self.tbl_changes_pool_files,
2914 backref="changeslinks"),
2915 seen = self.tbl_changes.c.seen,
2916 source = self.tbl_changes.c.source,
2917 binaries = self.tbl_changes.c.binaries,
2918 architecture = self.tbl_changes.c.architecture,
2919 distribution = self.tbl_changes.c.distribution,
2920 urgency = self.tbl_changes.c.urgency,
2921 maintainer = self.tbl_changes.c.maintainer,
2922 changedby = self.tbl_changes.c.changedby,
2923 date = self.tbl_changes.c.date,
2924 version = self.tbl_changes.c.version,
2925 files = relation(ChangePendingFile,
2926 secondary=self.tbl_changes_pending_files_map,
2927 backref="changesfile"),
2928 in_queue_id = self.tbl_changes.c.in_queue,
2929 in_queue = relation(PolicyQueue,
2930 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2931 approved_for_id = self.tbl_changes.c.approved_for))
2933 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2934 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2936 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2937 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2938 filename = self.tbl_changes_pending_files.c.filename,
2939 size = self.tbl_changes_pending_files.c.size,
2940 md5sum = self.tbl_changes_pending_files.c.md5sum,
2941 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2942 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2944 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2945 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2946 change = relation(DBChange),
2947 maintainer = relation(Maintainer,
2948 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
2949 changedby = relation(Maintainer,
2950 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
2951 fingerprint = relation(Fingerprint),
2952 source_files = relation(ChangePendingFile,
2953 secondary=self.tbl_changes_pending_source_files,
2954 backref="pending_sources")))
2955 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
2956 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
2957 keyring = relation(Keyring, backref="keyring_acl_map"),
2958 architecture = relation(Architecture)))
2960 mapper(Location, self.tbl_location,
2961 properties = dict(location_id = self.tbl_location.c.id,
2962 component_id = self.tbl_location.c.component,
2963 component = relation(Component),
2964 archive_id = self.tbl_location.c.archive,
2965 archive = relation(Archive),
2966 archive_type = self.tbl_location.c.type))
2968 mapper(Maintainer, self.tbl_maintainer,
2969 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
2971 mapper(NewComment, self.tbl_new_comments,
2972 properties = dict(comment_id = self.tbl_new_comments.c.id))
2974 mapper(Override, self.tbl_override,
2975 properties = dict(suite_id = self.tbl_override.c.suite,
2976 suite = relation(Suite),
2977 component_id = self.tbl_override.c.component,
2978 component = relation(Component),
2979 priority_id = self.tbl_override.c.priority,
2980 priority = relation(Priority),
2981 section_id = self.tbl_override.c.section,
2982 section = relation(Section),
2983 overridetype_id = self.tbl_override.c.type,
2984 overridetype = relation(OverrideType)))
2986 mapper(OverrideType, self.tbl_override_type,
2987 properties = dict(overridetype = self.tbl_override_type.c.type,
2988 overridetype_id = self.tbl_override_type.c.id))
2990 mapper(PolicyQueue, self.tbl_policy_queue,
2991 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
2993 mapper(Priority, self.tbl_priority,
2994 properties = dict(priority_id = self.tbl_priority.c.id))
2996 mapper(Section, self.tbl_section,
2997 properties = dict(section_id = self.tbl_section.c.id))
2999 mapper(DBSource, self.tbl_source,
3000 properties = dict(source_id = self.tbl_source.c.id,
3001 version = self.tbl_source.c.version,
3002 maintainer_id = self.tbl_source.c.maintainer,
3003 maintainer = relation(Maintainer,
3004 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3005 poolfile_id = self.tbl_source.c.file,
3006 poolfile = relation(PoolFile),
3007 fingerprint_id = self.tbl_source.c.sig_fpr,
3008 fingerprint = relation(Fingerprint),
3009 changedby_id = self.tbl_source.c.changedby,
3010 changedby = relation(Maintainer,
3011 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3012 srcfiles = relation(DSCFile,
3013 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3014 srcassociations = relation(SrcAssociation,
3015 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3016 srcuploaders = relation(SrcUploader)))
3018 mapper(SourceACL, self.tbl_source_acl,
3019 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3021 mapper(SrcAssociation, self.tbl_src_associations,
3022 properties = dict(sa_id = self.tbl_src_associations.c.id,
3023 suite_id = self.tbl_src_associations.c.suite,
3024 suite = relation(Suite),
3025 source_id = self.tbl_src_associations.c.source,
3026 source = relation(DBSource)))
3028 mapper(SrcFormat, self.tbl_src_format,
3029 properties = dict(src_format_id = self.tbl_src_format.c.id,
3030 format_name = self.tbl_src_format.c.format_name))
3032 mapper(SrcUploader, self.tbl_src_uploaders,
3033 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3034 source_id = self.tbl_src_uploaders.c.source,
3035 source = relation(DBSource,
3036 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3037 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3038 maintainer = relation(Maintainer,
3039 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3041 mapper(Suite, self.tbl_suite,
3042 properties = dict(suite_id = self.tbl_suite.c.id,
3043 policy_queue = relation(PolicyQueue),
3044 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3046 mapper(SuiteArchitecture, self.tbl_suite_architectures,
3047 properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
3048 suite = relation(Suite, backref='suitearchitectures'),
3049 arch_id = self.tbl_suite_architectures.c.architecture,
3050 architecture = relation(Architecture)))
3052 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3053 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3054 suite = relation(Suite, backref='suitesrcformats'),
3055 src_format_id = self.tbl_suite_src_formats.c.src_format,
3056 src_format = relation(SrcFormat)))
3058 mapper(Uid, self.tbl_uid,
3059 properties = dict(uid_id = self.tbl_uid.c.id,
3060 fingerprint = relation(Fingerprint)))
3062 mapper(UploadBlock, self.tbl_upload_blocks,
3063 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3064 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3065 uid = relation(Uid, backref="uploadblocks")))
3067 ## Connection functions
3068 def __createconn(self):
3069 from config import Config
3073 connstr = "postgres://%s" % cnf["DB::Host"]
3074 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3075 connstr += ":%s" % cnf["DB::Port"]
3076 connstr += "/%s" % cnf["DB::Name"]
3079 connstr = "postgres:///%s" % cnf["DB::Name"]
3080 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3081 connstr += "?port=%s" % cnf["DB::Port"]
3083 self.db_pg = create_engine(connstr, echo=self.debug)
3084 self.db_meta = MetaData()
3085 self.db_meta.bind = self.db_pg
3086 self.db_smaker = sessionmaker(bind=self.db_pg,
3090 self.__setuptables()
3091 self.__setupmappers()
3094 return self.db_smaker()
3096 __all__.append('DBConn')