5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 # suppress some deprecation warnings in squeeze related to sqlalchemy
64 warnings.filterwarnings('ignore', \
65 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
67 # TODO: sqlalchemy needs some extra configuration to correctly reflect
68 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
69 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
72 ################################################################################
74 # Patch in support for the debversion field type so that it works during
78 # that is for sqlalchemy 0.6
79 UserDefinedType = sqltypes.UserDefinedType
81 # this one for sqlalchemy 0.5
82 UserDefinedType = sqltypes.TypeEngine
84 class DebVersion(UserDefinedType):
85 def get_col_spec(self):
88 def bind_processor(self, dialect):
91 # ' = None' is needed for sqlalchemy 0.5:
92 def result_processor(self, dialect, coltype = None):
95 sa_major_version = sqlalchemy.__version__[0:3]
96 if sa_major_version in ["0.5", "0.6"]:
97 from sqlalchemy.databases import postgres
98 postgres.ischema_names['debversion'] = DebVersion
100 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
102 ################################################################################
104 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
106 ################################################################################
108 def session_wrapper(fn):
110 Wrapper around common ".., session=None):" handling. If the wrapped
111 function is called without passing 'session', we create a local one
112 and destroy it when the function ends.
114 Also attaches a commit_or_flush method to the session; if we created a
115 local session, this is a synonym for session.commit(), otherwise it is a
116 synonym for session.flush().
119 def wrapped(*args, **kwargs):
120 private_transaction = False
122 # Find the session object
123 session = kwargs.get('session')
126 if len(args) <= len(getargspec(fn)[0]) - 1:
127 # No session specified as last argument or in kwargs
128 private_transaction = True
129 session = kwargs['session'] = DBConn().session()
131 # Session is last argument in args
135 session = args[-1] = DBConn().session()
136 private_transaction = True
138 if private_transaction:
139 session.commit_or_flush = session.commit
141 session.commit_or_flush = session.flush
144 return fn(*args, **kwargs)
146 if private_transaction:
147 # We created a session; close it.
150 wrapped.__doc__ = fn.__doc__
151 wrapped.func_name = fn.func_name
155 __all__.append('session_wrapper')
157 ################################################################################
159 class Architecture(object):
160 def __init__(self, arch_string = None, description = None):
161 self.arch_string = arch_string
162 self.description = description
164 def __eq__(self, val):
165 if isinstance(val, str):
166 return (self.arch_string== val)
167 # This signals to use the normal comparison operator
168 return NotImplemented
170 def __ne__(self, val):
171 if isinstance(val, str):
172 return (self.arch_string != val)
173 # This signals to use the normal comparison operator
174 return NotImplemented
177 return '<Architecture %s>' % self.arch_string
179 __all__.append('Architecture')
182 def get_architecture(architecture, session=None):
184 Returns database id for given C{architecture}.
186 @type architecture: string
187 @param architecture: The name of the architecture
189 @type session: Session
190 @param session: Optional SQLA session object (a temporary one will be
191 generated if not supplied)
194 @return: Architecture object for the given arch (None if not present)
197 q = session.query(Architecture).filter_by(arch_string=architecture)
201 except NoResultFound:
204 __all__.append('get_architecture')
206 # TODO: should be removed because the implementation is too trivial
208 def get_architecture_suites(architecture, session=None):
210 Returns list of Suite objects for given C{architecture} name
212 @type architecture: str
213 @param architecture: Architecture name to search for
215 @type session: Session
216 @param session: Optional SQL session object (a temporary one will be
217 generated if not supplied)
220 @return: list of Suite objects for the given name (may be empty)
223 return get_architecture(architecture, session).suites
225 __all__.append('get_architecture_suites')
227 ################################################################################
229 class Archive(object):
230 def __init__(self, *args, **kwargs):
234 return '<Archive %s>' % self.archive_name
236 __all__.append('Archive')
239 def get_archive(archive, session=None):
241 returns database id for given C{archive}.
243 @type archive: string
244 @param archive: the name of the arhive
246 @type session: Session
247 @param session: Optional SQLA session object (a temporary one will be
248 generated if not supplied)
251 @return: Archive object for the given name (None if not present)
254 archive = archive.lower()
256 q = session.query(Archive).filter_by(archive_name=archive)
260 except NoResultFound:
263 __all__.append('get_archive')
265 ################################################################################
267 class BinAssociation(object):
268 def __init__(self, *args, **kwargs):
272 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
274 __all__.append('BinAssociation')
276 ################################################################################
278 class BinContents(object):
279 def __init__(self, *args, **kwargs):
283 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
285 __all__.append('BinContents')
287 ################################################################################
289 class DBBinary(object):
290 def __init__(self, *args, **kwargs):
294 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
296 __all__.append('DBBinary')
299 def get_suites_binary_in(package, session=None):
301 Returns list of Suite objects which given C{package} name is in
304 @param package: DBBinary package name to search for
307 @return: list of Suite objects for the given package
310 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
312 __all__.append('get_suites_binary_in')
315 def get_binary_from_id(binary_id, session=None):
317 Returns DBBinary object for given C{id}
320 @param binary_id: Id of the required binary
322 @type session: Session
323 @param session: Optional SQLA session object (a temporary one will be
324 generated if not supplied)
327 @return: DBBinary object for the given binary (None if not present)
330 q = session.query(DBBinary).filter_by(binary_id=binary_id)
334 except NoResultFound:
337 __all__.append('get_binary_from_id')
340 def get_binaries_from_name(package, version=None, architecture=None, session=None):
342 Returns list of DBBinary objects for given C{package} name
345 @param package: DBBinary package name to search for
347 @type version: str or None
348 @param version: Version to search for (or None)
350 @type architecture: str, list or None
351 @param architecture: Architectures to limit to (or None if no limit)
353 @type session: Session
354 @param session: Optional SQL session object (a temporary one will be
355 generated if not supplied)
358 @return: list of DBBinary objects for the given name (may be empty)
361 q = session.query(DBBinary).filter_by(package=package)
363 if version is not None:
364 q = q.filter_by(version=version)
366 if architecture is not None:
367 if not isinstance(architecture, list):
368 architecture = [architecture]
369 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
375 __all__.append('get_binaries_from_name')
378 def get_binaries_from_source_id(source_id, session=None):
380 Returns list of DBBinary objects for given C{source_id}
383 @param source_id: source_id to search for
385 @type session: Session
386 @param session: Optional SQL session object (a temporary one will be
387 generated if not supplied)
390 @return: list of DBBinary objects for the given name (may be empty)
393 return session.query(DBBinary).filter_by(source_id=source_id).all()
395 __all__.append('get_binaries_from_source_id')
398 def get_binary_from_name_suite(package, suitename, session=None):
399 ### For dak examine-package
400 ### XXX: Doesn't use object API yet
402 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
403 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
404 WHERE b.package='%(package)s'
406 AND fi.location = l.id
407 AND l.component = c.id
410 AND su.suite_name %(suitename)s
411 ORDER BY b.version DESC"""
413 return session.execute(sql % {'package': package, 'suitename': suitename})
415 __all__.append('get_binary_from_name_suite')
418 def get_binary_components(package, suitename, arch, session=None):
419 # Check for packages that have moved from one component to another
420 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
421 WHERE b.package=:package AND s.suite_name=:suitename
422 AND (a.arch_string = :arch OR a.arch_string = 'all')
423 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
424 AND f.location = l.id
425 AND l.component = c.id
428 vals = {'package': package, 'suitename': suitename, 'arch': arch}
430 return session.execute(query, vals)
432 __all__.append('get_binary_components')
434 ################################################################################
436 class BinaryACL(object):
437 def __init__(self, *args, **kwargs):
441 return '<BinaryACL %s>' % self.binary_acl_id
443 __all__.append('BinaryACL')
445 ################################################################################
447 class BinaryACLMap(object):
448 def __init__(self, *args, **kwargs):
452 return '<BinaryACLMap %s>' % self.binary_acl_map_id
454 __all__.append('BinaryACLMap')
456 ################################################################################
461 ArchiveDir "%(archivepath)s";
462 OverrideDir "%(overridedir)s";
463 CacheDir "%(cachedir)s";
468 Packages::Compress ". bzip2 gzip";
469 Sources::Compress ". bzip2 gzip";
474 bindirectory "incoming"
479 BinOverride "override.sid.all3";
480 BinCacheDB "packages-accepted.db";
482 FileList "%(filelist)s";
485 Packages::Extensions ".deb .udeb";
488 bindirectory "incoming/"
491 BinOverride "override.sid.all3";
492 SrcOverride "override.sid.all3.src";
493 FileList "%(filelist)s";
497 class BuildQueue(object):
498 def __init__(self, *args, **kwargs):
502 return '<BuildQueue %s>' % self.queue_name
504 def write_metadata(self, starttime, force=False):
505 # Do we write out metafiles?
506 if not (force or self.generate_metadata):
509 session = DBConn().session().object_session(self)
511 fl_fd = fl_name = ac_fd = ac_name = None
513 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
514 startdir = os.getcwd()
517 # Grab files we want to include
518 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
519 # Write file list with newer files
520 (fl_fd, fl_name) = mkstemp()
522 os.write(fl_fd, '%s\n' % n.fullpath)
527 # Write minimal apt.conf
528 # TODO: Remove hardcoding from template
529 (ac_fd, ac_name) = mkstemp()
530 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
532 'cachedir': cnf["Dir::Cache"],
533 'overridedir': cnf["Dir::Override"],
537 # Run apt-ftparchive generate
538 os.chdir(os.path.dirname(ac_name))
539 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
541 # Run apt-ftparchive release
542 # TODO: Eww - fix this
543 bname = os.path.basename(self.path)
547 # We have to remove the Release file otherwise it'll be included in the
550 os.unlink(os.path.join(bname, 'Release'))
554 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
556 # Crude hack with open and append, but this whole section is and should be redone.
557 if self.notautomatic:
558 release=open("Release", "a")
559 release.write("NotAutomatic: yes")
564 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
565 if cnf.has_key("Dinstall::SigningPubKeyring"):
566 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
568 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
570 # Move the files if we got this far
571 os.rename('Release', os.path.join(bname, 'Release'))
573 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
575 # Clean up any left behind files
602 def clean_and_update(self, starttime, Logger, dryrun=False):
603 """WARNING: This routine commits for you"""
604 session = DBConn().session().object_session(self)
606 if self.generate_metadata and not dryrun:
607 self.write_metadata(starttime)
609 # Grab files older than our execution time
610 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
616 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
618 Logger.log(["I: Removing %s from the queue" % o.fullpath])
619 os.unlink(o.fullpath)
622 # If it wasn't there, don't worry
623 if e.errno == ENOENT:
626 # TODO: Replace with proper logging call
627 Logger.log(["E: Could not remove %s" % o.fullpath])
634 for f in os.listdir(self.path):
635 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
639 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
640 except NoResultFound:
641 fp = os.path.join(self.path, f)
643 Logger.log(["I: Would remove unused link %s" % fp])
645 Logger.log(["I: Removing unused link %s" % fp])
649 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
651 def add_file_from_pool(self, poolfile):
652 """Copies a file into the pool. Assumes that the PoolFile object is
653 attached to the same SQLAlchemy session as the Queue object is.
655 The caller is responsible for committing after calling this function."""
656 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
658 # Check if we have a file of this name or this ID already
659 for f in self.queuefiles:
660 if f.fileid is not None and f.fileid == poolfile.file_id or \
661 f.poolfile.filename == poolfile_basename:
662 # In this case, update the BuildQueueFile entry so we
663 # don't remove it too early
664 f.lastused = datetime.now()
665 DBConn().session().object_session(poolfile).add(f)
668 # Prepare BuildQueueFile object
669 qf = BuildQueueFile()
670 qf.build_queue_id = self.queue_id
671 qf.lastused = datetime.now()
672 qf.filename = poolfile_basename
674 targetpath = poolfile.fullpath
675 queuepath = os.path.join(self.path, poolfile_basename)
679 # We need to copy instead of symlink
681 utils.copy(targetpath, queuepath)
682 # NULL in the fileid field implies a copy
685 os.symlink(targetpath, queuepath)
686 qf.fileid = poolfile.file_id
690 # Get the same session as the PoolFile is using and add the qf to it
691 DBConn().session().object_session(poolfile).add(qf)
696 __all__.append('BuildQueue')
699 def get_build_queue(queuename, session=None):
701 Returns BuildQueue object for given C{queue name}, creating it if it does not
704 @type queuename: string
705 @param queuename: The name of the queue
707 @type session: Session
708 @param session: Optional SQLA session object (a temporary one will be
709 generated if not supplied)
712 @return: BuildQueue object for the given queue
715 q = session.query(BuildQueue).filter_by(queue_name=queuename)
719 except NoResultFound:
722 __all__.append('get_build_queue')
724 ################################################################################
726 class BuildQueueFile(object):
727 def __init__(self, *args, **kwargs):
731 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
735 return os.path.join(self.buildqueue.path, self.filename)
738 __all__.append('BuildQueueFile')
740 ################################################################################
742 class ChangePendingBinary(object):
743 def __init__(self, *args, **kwargs):
747 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
749 __all__.append('ChangePendingBinary')
751 ################################################################################
753 class ChangePendingFile(object):
754 def __init__(self, *args, **kwargs):
758 return '<ChangePendingFile %s>' % self.change_pending_file_id
760 __all__.append('ChangePendingFile')
762 ################################################################################
764 class ChangePendingSource(object):
765 def __init__(self, *args, **kwargs):
769 return '<ChangePendingSource %s>' % self.change_pending_source_id
771 __all__.append('ChangePendingSource')
773 ################################################################################
775 class Component(object):
776 def __init__(self, *args, **kwargs):
779 def __eq__(self, val):
780 if isinstance(val, str):
781 return (self.component_name == val)
782 # This signals to use the normal comparison operator
783 return NotImplemented
785 def __ne__(self, val):
786 if isinstance(val, str):
787 return (self.component_name != val)
788 # This signals to use the normal comparison operator
789 return NotImplemented
792 return '<Component %s>' % self.component_name
795 __all__.append('Component')
798 def get_component(component, session=None):
800 Returns database id for given C{component}.
802 @type component: string
803 @param component: The name of the override type
806 @return: the database id for the given component
809 component = component.lower()
811 q = session.query(Component).filter_by(component_name=component)
815 except NoResultFound:
818 __all__.append('get_component')
820 ################################################################################
822 class DBConfig(object):
823 def __init__(self, *args, **kwargs):
827 return '<DBConfig %s>' % self.name
829 __all__.append('DBConfig')
831 ################################################################################
834 def get_or_set_contents_file_id(filename, session=None):
836 Returns database id for given filename.
838 If no matching file is found, a row is inserted.
840 @type filename: string
841 @param filename: The filename
842 @type session: SQLAlchemy
843 @param session: Optional SQL session object (a temporary one will be
844 generated if not supplied). If not passed, a commit will be performed at
845 the end of the function, otherwise the caller is responsible for commiting.
848 @return: the database id for the given component
851 q = session.query(ContentFilename).filter_by(filename=filename)
854 ret = q.one().cafilename_id
855 except NoResultFound:
856 cf = ContentFilename()
857 cf.filename = filename
859 session.commit_or_flush()
860 ret = cf.cafilename_id
864 __all__.append('get_or_set_contents_file_id')
867 def get_contents(suite, overridetype, section=None, session=None):
869 Returns contents for a suite / overridetype combination, limiting
870 to a section if not None.
873 @param suite: Suite object
875 @type overridetype: OverrideType
876 @param overridetype: OverrideType object
878 @type section: Section
879 @param section: Optional section object to limit results to
881 @type session: SQLAlchemy
882 @param session: Optional SQL session object (a temporary one will be
883 generated if not supplied)
886 @return: ResultsProxy object set up to return tuples of (filename, section,
890 # find me all of the contents for a given suite
891 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
895 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
896 JOIN content_file_names n ON (c.filename=n.id)
897 JOIN binaries b ON (b.id=c.binary_pkg)
898 JOIN override o ON (o.package=b.package)
899 JOIN section s ON (s.id=o.section)
900 WHERE o.suite = :suiteid AND o.type = :overridetypeid
901 AND b.type=:overridetypename"""
903 vals = {'suiteid': suite.suite_id,
904 'overridetypeid': overridetype.overridetype_id,
905 'overridetypename': overridetype.overridetype}
907 if section is not None:
908 contents_q += " AND s.id = :sectionid"
909 vals['sectionid'] = section.section_id
911 contents_q += " ORDER BY fn"
913 return session.execute(contents_q, vals)
915 __all__.append('get_contents')
917 ################################################################################
919 class ContentFilepath(object):
920 def __init__(self, *args, **kwargs):
924 return '<ContentFilepath %s>' % self.filepath
926 __all__.append('ContentFilepath')
929 def get_or_set_contents_path_id(filepath, session=None):
931 Returns database id for given path.
933 If no matching file is found, a row is inserted.
935 @type filepath: string
936 @param filepath: The filepath
938 @type session: SQLAlchemy
939 @param session: Optional SQL session object (a temporary one will be
940 generated if not supplied). If not passed, a commit will be performed at
941 the end of the function, otherwise the caller is responsible for commiting.
944 @return: the database id for the given path
947 q = session.query(ContentFilepath).filter_by(filepath=filepath)
950 ret = q.one().cafilepath_id
951 except NoResultFound:
952 cf = ContentFilepath()
953 cf.filepath = filepath
955 session.commit_or_flush()
956 ret = cf.cafilepath_id
960 __all__.append('get_or_set_contents_path_id')
962 ################################################################################
964 class ContentAssociation(object):
965 def __init__(self, *args, **kwargs):
969 return '<ContentAssociation %s>' % self.ca_id
971 __all__.append('ContentAssociation')
973 def insert_content_paths(binary_id, fullpaths, session=None):
975 Make sure given path is associated with given binary id
978 @param binary_id: the id of the binary
979 @type fullpaths: list
980 @param fullpaths: the list of paths of the file being associated with the binary
981 @type session: SQLAlchemy session
982 @param session: Optional SQLAlchemy session. If this is passed, the caller
983 is responsible for ensuring a transaction has begun and committing the
984 results or rolling back based on the result code. If not passed, a commit
985 will be performed at the end of the function, otherwise the caller is
986 responsible for commiting.
988 @return: True upon success
993 session = DBConn().session()
998 def generate_path_dicts():
999 for fullpath in fullpaths:
1000 if fullpath.startswith( './' ):
1001 fullpath = fullpath[2:]
1003 yield {'filename':fullpath, 'id': binary_id }
1005 for d in generate_path_dicts():
1006 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1015 traceback.print_exc()
1017 # Only rollback if we set up the session ourself
1024 __all__.append('insert_content_paths')
1026 ################################################################################
1028 class DSCFile(object):
1029 def __init__(self, *args, **kwargs):
1033 return '<DSCFile %s>' % self.dscfile_id
1035 __all__.append('DSCFile')
1038 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1040 Returns a list of DSCFiles which may be empty
1042 @type dscfile_id: int (optional)
1043 @param dscfile_id: the dscfile_id of the DSCFiles to find
1045 @type source_id: int (optional)
1046 @param source_id: the source id related to the DSCFiles to find
1048 @type poolfile_id: int (optional)
1049 @param poolfile_id: the poolfile id related to the DSCFiles to find
1052 @return: Possibly empty list of DSCFiles
1055 q = session.query(DSCFile)
1057 if dscfile_id is not None:
1058 q = q.filter_by(dscfile_id=dscfile_id)
1060 if source_id is not None:
1061 q = q.filter_by(source_id=source_id)
1063 if poolfile_id is not None:
1064 q = q.filter_by(poolfile_id=poolfile_id)
1068 __all__.append('get_dscfiles')
1070 ################################################################################
1072 class PoolFile(object):
1073 def __init__(self, filename = None, location = None, filesize = -1, \
1075 self.filename = filename
1076 self.location = location
1077 self.filesize = filesize
1078 self.md5sum = md5sum
1081 return '<PoolFile %s>' % self.filename
1085 return os.path.join(self.location.path, self.filename)
1087 __all__.append('PoolFile')
1090 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1093 (ValidFileFound [boolean or None], PoolFile object or None)
1095 @type filename: string
1096 @param filename: the filename of the file to check against the DB
1099 @param filesize: the size of the file to check against the DB
1101 @type md5sum: string
1102 @param md5sum: the md5sum of the file to check against the DB
1104 @type location_id: int
1105 @param location_id: the id of the location to look in
1108 @return: Tuple of length 2.
1109 - If more than one file found with that name: (C{None}, C{None})
1110 - If valid pool file found: (C{True}, C{PoolFile object})
1111 - If valid pool file not found:
1112 - (C{False}, C{None}) if no file found
1113 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1116 q = session.query(PoolFile).filter_by(filename=filename)
1117 q = q.join(Location).filter_by(location_id=location_id)
1127 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1135 __all__.append('check_poolfile')
1138 def get_poolfile_by_id(file_id, session=None):
1140 Returns a PoolFile objects or None for the given id
1143 @param file_id: the id of the file to look for
1145 @rtype: PoolFile or None
1146 @return: either the PoolFile object or None
1149 q = session.query(PoolFile).filter_by(file_id=file_id)
1153 except NoResultFound:
1156 __all__.append('get_poolfile_by_id')
1159 def get_poolfile_like_name(filename, session=None):
1161 Returns an array of PoolFile objects which are like the given name
1163 @type filename: string
1164 @param filename: the filename of the file to check against the DB
1167 @return: array of PoolFile objects
1170 # TODO: There must be a way of properly using bind parameters with %FOO%
1171 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1175 __all__.append('get_poolfile_like_name')
1178 def add_poolfile(filename, datadict, location_id, session=None):
1180 Add a new file to the pool
1182 @type filename: string
1183 @param filename: filename
1185 @type datadict: dict
1186 @param datadict: dict with needed data
1188 @type location_id: int
1189 @param location_id: database id of the location
1192 @return: the PoolFile object created
1194 poolfile = PoolFile()
1195 poolfile.filename = filename
1196 poolfile.filesize = datadict["size"]
1197 poolfile.md5sum = datadict["md5sum"]
1198 poolfile.sha1sum = datadict["sha1sum"]
1199 poolfile.sha256sum = datadict["sha256sum"]
1200 poolfile.location_id = location_id
1202 session.add(poolfile)
1203 # Flush to get a file id (NB: This is not a commit)
1208 __all__.append('add_poolfile')
1210 ################################################################################
1212 class Fingerprint(object):
1213 def __init__(self, fingerprint = None):
1214 self.fingerprint = fingerprint
1217 return '<Fingerprint %s>' % self.fingerprint
1219 __all__.append('Fingerprint')
1222 def get_fingerprint(fpr, session=None):
1224 Returns Fingerprint object for given fpr.
1227 @param fpr: The fpr to find / add
1229 @type session: SQLAlchemy
1230 @param session: Optional SQL session object (a temporary one will be
1231 generated if not supplied).
1234 @return: the Fingerprint object for the given fpr or None
1237 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1241 except NoResultFound:
1246 __all__.append('get_fingerprint')
1249 def get_or_set_fingerprint(fpr, session=None):
1251 Returns Fingerprint object for given fpr.
1253 If no matching fpr is found, a row is inserted.
1256 @param fpr: The fpr to find / add
1258 @type session: SQLAlchemy
1259 @param session: Optional SQL session object (a temporary one will be
1260 generated if not supplied). If not passed, a commit will be performed at
1261 the end of the function, otherwise the caller is responsible for commiting.
1262 A flush will be performed either way.
1265 @return: the Fingerprint object for the given fpr
1268 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1272 except NoResultFound:
1273 fingerprint = Fingerprint()
1274 fingerprint.fingerprint = fpr
1275 session.add(fingerprint)
1276 session.commit_or_flush()
1281 __all__.append('get_or_set_fingerprint')
1283 ################################################################################
1285 # Helper routine for Keyring class
1286 def get_ldap_name(entry):
1288 for k in ["cn", "mn", "sn"]:
1290 if ret and ret[0] != "" and ret[0] != "-":
1292 return " ".join(name)
1294 ################################################################################
1296 class Keyring(object):
1297 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1298 " --with-colons --fingerprint --fingerprint"
1303 def __init__(self, *args, **kwargs):
1307 return '<Keyring %s>' % self.keyring_name
1309 def de_escape_gpg_str(self, txt):
1310 esclist = re.split(r'(\\x..)', txt)
1311 for x in range(1,len(esclist),2):
1312 esclist[x] = "%c" % (int(esclist[x][2:],16))
1313 return "".join(esclist)
1315 def parse_address(self, uid):
1316 """parses uid and returns a tuple of real name and email address"""
1318 (name, address) = email.Utils.parseaddr(uid)
1319 name = re.sub(r"\s*[(].*[)]", "", name)
1320 name = self.de_escape_gpg_str(name)
1323 return (name, address)
1325 def load_keys(self, keyring):
1326 if not self.keyring_id:
1327 raise Exception('Must be initialized with database information')
1329 k = os.popen(self.gpg_invocation % keyring, "r")
1333 for line in k.xreadlines():
1334 field = line.split(":")
1335 if field[0] == "pub":
1338 (name, addr) = self.parse_address(field[9])
1340 self.keys[key]["email"] = addr
1341 self.keys[key]["name"] = name
1342 self.keys[key]["fingerprints"] = []
1344 elif key and field[0] == "sub" and len(field) >= 12:
1345 signingkey = ("s" in field[11])
1346 elif key and field[0] == "uid":
1347 (name, addr) = self.parse_address(field[9])
1348 if "email" not in self.keys[key] and "@" in addr:
1349 self.keys[key]["email"] = addr
1350 self.keys[key]["name"] = name
1351 elif signingkey and field[0] == "fpr":
1352 self.keys[key]["fingerprints"].append(field[9])
1353 self.fpr_lookup[field[9]] = key
1355 def import_users_from_ldap(self, session):
1359 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1360 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1362 l = ldap.open(LDAPServer)
1363 l.simple_bind_s("","")
1364 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1365 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1366 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1368 ldap_fin_uid_id = {}
1375 uid = entry["uid"][0]
1376 name = get_ldap_name(entry)
1377 fingerprints = entry["keyFingerPrint"]
1379 for f in fingerprints:
1380 key = self.fpr_lookup.get(f, None)
1381 if key not in self.keys:
1383 self.keys[key]["uid"] = uid
1387 keyid = get_or_set_uid(uid, session).uid_id
1388 byuid[keyid] = (uid, name)
1389 byname[uid] = (keyid, name)
1391 return (byname, byuid)
1393 def generate_users_from_keyring(self, format, session):
1397 for x in self.keys.keys():
1398 if "email" not in self.keys[x]:
1400 self.keys[x]["uid"] = format % "invalid-uid"
1402 uid = format % self.keys[x]["email"]
1403 keyid = get_or_set_uid(uid, session).uid_id
1404 byuid[keyid] = (uid, self.keys[x]["name"])
1405 byname[uid] = (keyid, self.keys[x]["name"])
1406 self.keys[x]["uid"] = uid
1409 uid = format % "invalid-uid"
1410 keyid = get_or_set_uid(uid, session).uid_id
1411 byuid[keyid] = (uid, "ungeneratable user id")
1412 byname[uid] = (keyid, "ungeneratable user id")
1414 return (byname, byuid)
1416 __all__.append('Keyring')
1419 def get_keyring(keyring, session=None):
1421 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1422 If C{keyring} already has an entry, simply return the existing Keyring
1424 @type keyring: string
1425 @param keyring: the keyring name
1428 @return: the Keyring object for this keyring
1431 q = session.query(Keyring).filter_by(keyring_name=keyring)
1435 except NoResultFound:
1438 __all__.append('get_keyring')
1440 ################################################################################
1442 class KeyringACLMap(object):
1443 def __init__(self, *args, **kwargs):
1447 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1449 __all__.append('KeyringACLMap')
1451 ################################################################################
1453 class DBChange(object):
1454 def __init__(self, *args, **kwargs):
1458 return '<DBChange %s>' % self.changesname
1460 def clean_from_queue(self):
1461 session = DBConn().session().object_session(self)
1463 # Remove changes_pool_files entries
1466 # Remove changes_pending_files references
1469 # Clear out of queue
1470 self.in_queue = None
1471 self.approved_for_id = None
1473 __all__.append('DBChange')
1476 def get_dbchange(filename, session=None):
1478 returns DBChange object for given C{filename}.
1480 @type filename: string
1481 @param filename: the name of the file
1483 @type session: Session
1484 @param session: Optional SQLA session object (a temporary one will be
1485 generated if not supplied)
1488 @return: DBChange object for the given filename (C{None} if not present)
1491 q = session.query(DBChange).filter_by(changesname=filename)
1495 except NoResultFound:
1498 __all__.append('get_dbchange')
1500 ################################################################################
1502 class Location(object):
1503 def __init__(self, path = None):
1505 # the column 'type' should go away, see comment at mapper
1506 self.archive_type = 'pool'
1509 return '<Location %s (%s)>' % (self.path, self.location_id)
1511 __all__.append('Location')
1514 def get_location(location, component=None, archive=None, session=None):
1516 Returns Location object for the given combination of location, component
1519 @type location: string
1520 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1522 @type component: string
1523 @param component: the component name (if None, no restriction applied)
1525 @type archive: string
1526 @param archive: the archive name (if None, no restriction applied)
1528 @rtype: Location / None
1529 @return: Either a Location object or None if one can't be found
1532 q = session.query(Location).filter_by(path=location)
1534 if archive is not None:
1535 q = q.join(Archive).filter_by(archive_name=archive)
1537 if component is not None:
1538 q = q.join(Component).filter_by(component_name=component)
1542 except NoResultFound:
1545 __all__.append('get_location')
1547 ################################################################################
1549 class Maintainer(object):
1550 def __init__(self, name = None):
1554 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1556 def get_split_maintainer(self):
1557 if not hasattr(self, 'name') or self.name is None:
1558 return ('', '', '', '')
1560 return fix_maintainer(self.name.strip())
1562 __all__.append('Maintainer')
1565 def get_or_set_maintainer(name, session=None):
1567 Returns Maintainer object for given maintainer name.
1569 If no matching maintainer name is found, a row is inserted.
1572 @param name: The maintainer name to add
1574 @type session: SQLAlchemy
1575 @param session: Optional SQL session object (a temporary one will be
1576 generated if not supplied). If not passed, a commit will be performed at
1577 the end of the function, otherwise the caller is responsible for commiting.
1578 A flush will be performed either way.
1581 @return: the Maintainer object for the given maintainer
1584 q = session.query(Maintainer).filter_by(name=name)
1587 except NoResultFound:
1588 maintainer = Maintainer()
1589 maintainer.name = name
1590 session.add(maintainer)
1591 session.commit_or_flush()
1596 __all__.append('get_or_set_maintainer')
1599 def get_maintainer(maintainer_id, session=None):
1601 Return the name of the maintainer behind C{maintainer_id} or None if that
1602 maintainer_id is invalid.
1604 @type maintainer_id: int
1605 @param maintainer_id: the id of the maintainer
1608 @return: the Maintainer with this C{maintainer_id}
1611 return session.query(Maintainer).get(maintainer_id)
1613 __all__.append('get_maintainer')
1615 ################################################################################
1617 class NewComment(object):
1618 def __init__(self, *args, **kwargs):
1622 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1624 __all__.append('NewComment')
1627 def has_new_comment(package, version, session=None):
1629 Returns true if the given combination of C{package}, C{version} has a comment.
1631 @type package: string
1632 @param package: name of the package
1634 @type version: string
1635 @param version: package version
1637 @type session: Session
1638 @param session: Optional SQLA session object (a temporary one will be
1639 generated if not supplied)
1645 q = session.query(NewComment)
1646 q = q.filter_by(package=package)
1647 q = q.filter_by(version=version)
1649 return bool(q.count() > 0)
1651 __all__.append('has_new_comment')
1654 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1656 Returns (possibly empty) list of NewComment objects for the given
1659 @type package: string (optional)
1660 @param package: name of the package
1662 @type version: string (optional)
1663 @param version: package version
1665 @type comment_id: int (optional)
1666 @param comment_id: An id of a comment
1668 @type session: Session
1669 @param session: Optional SQLA session object (a temporary one will be
1670 generated if not supplied)
1673 @return: A (possibly empty) list of NewComment objects will be returned
1676 q = session.query(NewComment)
1677 if package is not None: q = q.filter_by(package=package)
1678 if version is not None: q = q.filter_by(version=version)
1679 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1683 __all__.append('get_new_comments')
1685 ################################################################################
1687 class Override(object):
1688 def __init__(self, *args, **kwargs):
1692 return '<Override %s (%s)>' % (self.package, self.suite_id)
1694 __all__.append('Override')
1697 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1699 Returns Override object for the given parameters
1701 @type package: string
1702 @param package: The name of the package
1704 @type suite: string, list or None
1705 @param suite: The name of the suite (or suites if a list) to limit to. If
1706 None, don't limit. Defaults to None.
1708 @type component: string, list or None
1709 @param component: The name of the component (or components if a list) to
1710 limit to. If None, don't limit. Defaults to None.
1712 @type overridetype: string, list or None
1713 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1714 limit to. If None, don't limit. Defaults to None.
1716 @type session: Session
1717 @param session: Optional SQLA session object (a temporary one will be
1718 generated if not supplied)
1721 @return: A (possibly empty) list of Override objects will be returned
1724 q = session.query(Override)
1725 q = q.filter_by(package=package)
1727 if suite is not None:
1728 if not isinstance(suite, list): suite = [suite]
1729 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1731 if component is not None:
1732 if not isinstance(component, list): component = [component]
1733 q = q.join(Component).filter(Component.component_name.in_(component))
1735 if overridetype is not None:
1736 if not isinstance(overridetype, list): overridetype = [overridetype]
1737 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1741 __all__.append('get_override')
1744 ################################################################################
1746 class OverrideType(object):
1747 def __init__(self, *args, **kwargs):
1751 return '<OverrideType %s>' % self.overridetype
1753 __all__.append('OverrideType')
1756 def get_override_type(override_type, session=None):
1758 Returns OverrideType object for given C{override type}.
1760 @type override_type: string
1761 @param override_type: The name of the override type
1763 @type session: Session
1764 @param session: Optional SQLA session object (a temporary one will be
1765 generated if not supplied)
1768 @return: the database id for the given override type
1771 q = session.query(OverrideType).filter_by(overridetype=override_type)
1775 except NoResultFound:
1778 __all__.append('get_override_type')
1780 ################################################################################
1782 class DebContents(object):
1783 def __init__(self, *args, **kwargs):
1787 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1789 __all__.append('DebContents')
1792 class UdebContents(object):
1793 def __init__(self, *args, **kwargs):
1797 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1799 __all__.append('UdebContents')
1801 class PendingBinContents(object):
1802 def __init__(self, *args, **kwargs):
1806 return '<PendingBinContents %s>' % self.contents_id
1808 __all__.append('PendingBinContents')
1810 def insert_pending_content_paths(package,
1815 Make sure given paths are temporarily associated with given
1819 @param package: the package to associate with should have been read in from the binary control file
1820 @type fullpaths: list
1821 @param fullpaths: the list of paths of the file being associated with the binary
1822 @type session: SQLAlchemy session
1823 @param session: Optional SQLAlchemy session. If this is passed, the caller
1824 is responsible for ensuring a transaction has begun and committing the
1825 results or rolling back based on the result code. If not passed, a commit
1826 will be performed at the end of the function
1828 @return: True upon success, False if there is a problem
1831 privatetrans = False
1834 session = DBConn().session()
1838 arch = get_architecture(package['Architecture'], session)
1839 arch_id = arch.arch_id
1841 # Remove any already existing recorded files for this package
1842 q = session.query(PendingBinContents)
1843 q = q.filter_by(package=package['Package'])
1844 q = q.filter_by(version=package['Version'])
1845 q = q.filter_by(architecture=arch_id)
1848 for fullpath in fullpaths:
1850 if fullpath.startswith( "./" ):
1851 fullpath = fullpath[2:]
1853 pca = PendingBinContents()
1854 pca.package = package['Package']
1855 pca.version = package['Version']
1857 pca.architecture = arch_id
1860 pca.type = 8 # gross
1862 pca.type = 7 # also gross
1865 # Only commit if we set up the session ourself
1873 except Exception, e:
1874 traceback.print_exc()
1876 # Only rollback if we set up the session ourself
1883 __all__.append('insert_pending_content_paths')
1885 ################################################################################
1887 class PolicyQueue(object):
1888 def __init__(self, *args, **kwargs):
1892 return '<PolicyQueue %s>' % self.queue_name
1894 __all__.append('PolicyQueue')
1897 def get_policy_queue(queuename, session=None):
1899 Returns PolicyQueue object for given C{queue name}
1901 @type queuename: string
1902 @param queuename: The name of the queue
1904 @type session: Session
1905 @param session: Optional SQLA session object (a temporary one will be
1906 generated if not supplied)
1909 @return: PolicyQueue object for the given queue
1912 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1916 except NoResultFound:
1919 __all__.append('get_policy_queue')
1922 def get_policy_queue_from_path(pathname, session=None):
1924 Returns PolicyQueue object for given C{path name}
1926 @type queuename: string
1927 @param queuename: The path
1929 @type session: Session
1930 @param session: Optional SQLA session object (a temporary one will be
1931 generated if not supplied)
1934 @return: PolicyQueue object for the given queue
1937 q = session.query(PolicyQueue).filter_by(path=pathname)
1941 except NoResultFound:
1944 __all__.append('get_policy_queue_from_path')
1946 ################################################################################
1948 class Priority(object):
1949 def __init__(self, *args, **kwargs):
1952 def __eq__(self, val):
1953 if isinstance(val, str):
1954 return (self.priority == val)
1955 # This signals to use the normal comparison operator
1956 return NotImplemented
1958 def __ne__(self, val):
1959 if isinstance(val, str):
1960 return (self.priority != val)
1961 # This signals to use the normal comparison operator
1962 return NotImplemented
1965 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1967 __all__.append('Priority')
1970 def get_priority(priority, session=None):
1972 Returns Priority object for given C{priority name}.
1974 @type priority: string
1975 @param priority: The name of the priority
1977 @type session: Session
1978 @param session: Optional SQLA session object (a temporary one will be
1979 generated if not supplied)
1982 @return: Priority object for the given priority
1985 q = session.query(Priority).filter_by(priority=priority)
1989 except NoResultFound:
1992 __all__.append('get_priority')
1995 def get_priorities(session=None):
1997 Returns dictionary of priority names -> id mappings
1999 @type session: Session
2000 @param session: Optional SQL session object (a temporary one will be
2001 generated if not supplied)
2004 @return: dictionary of priority names -> id mappings
2008 q = session.query(Priority)
2010 ret[x.priority] = x.priority_id
2014 __all__.append('get_priorities')
2016 ################################################################################
2018 class Section(object):
2019 def __init__(self, *args, **kwargs):
2022 def __eq__(self, val):
2023 if isinstance(val, str):
2024 return (self.section == val)
2025 # This signals to use the normal comparison operator
2026 return NotImplemented
2028 def __ne__(self, val):
2029 if isinstance(val, str):
2030 return (self.section != val)
2031 # This signals to use the normal comparison operator
2032 return NotImplemented
2035 return '<Section %s>' % self.section
2037 __all__.append('Section')
2040 def get_section(section, session=None):
2042 Returns Section object for given C{section name}.
2044 @type section: string
2045 @param section: The name of the section
2047 @type session: Session
2048 @param session: Optional SQLA session object (a temporary one will be
2049 generated if not supplied)
2052 @return: Section object for the given section name
2055 q = session.query(Section).filter_by(section=section)
2059 except NoResultFound:
2062 __all__.append('get_section')
2065 def get_sections(session=None):
2067 Returns dictionary of section names -> id mappings
2069 @type session: Session
2070 @param session: Optional SQL session object (a temporary one will be
2071 generated if not supplied)
2074 @return: dictionary of section names -> id mappings
2078 q = session.query(Section)
2080 ret[x.section] = x.section_id
2084 __all__.append('get_sections')
2086 ################################################################################
2088 class DBSource(object):
2089 def __init__(self, maintainer = None, changedby = None):
2090 self.maintainer = maintainer
2091 self.changedby = changedby
2094 return '<DBSource %s (%s)>' % (self.source, self.version)
2096 __all__.append('DBSource')
2099 def source_exists(source, source_version, suites = ["any"], session=None):
2101 Ensure that source exists somewhere in the archive for the binary
2102 upload being processed.
2103 1. exact match => 1.0-3
2104 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2106 @type source: string
2107 @param source: source name
2109 @type source_version: string
2110 @param source_version: expected source version
2113 @param suites: list of suites to check in, default I{any}
2115 @type session: Session
2116 @param session: Optional SQLA session object (a temporary one will be
2117 generated if not supplied)
2120 @return: returns 1 if a source with expected version is found, otherwise 0
2127 for suite in suites:
2128 q = session.query(DBSource).filter_by(source=source)
2130 # source must exist in suite X, or in some other suite that's
2131 # mapped to X, recursively... silent-maps are counted too,
2132 # unreleased-maps aren't.
2133 maps = cnf.ValueList("SuiteMappings")[:]
2135 maps = [ m.split() for m in maps ]
2136 maps = [ (x[1], x[2]) for x in maps
2137 if x[0] == "map" or x[0] == "silent-map" ]
2140 if x[1] in s and x[0] not in s:
2143 q = q.join(SrcAssociation).join(Suite)
2144 q = q.filter(Suite.suite_name.in_(s))
2146 # Reduce the query results to a list of version numbers
2147 ql = [ j.version for j in q.all() ]
2150 if source_version in ql:
2154 from daklib.regexes import re_bin_only_nmu
2155 orig_source_version = re_bin_only_nmu.sub('', source_version)
2156 if orig_source_version in ql:
2159 # No source found so return not ok
2164 __all__.append('source_exists')
2167 def get_suites_source_in(source, session=None):
2169 Returns list of Suite objects which given C{source} name is in
2172 @param source: DBSource package name to search for
2175 @return: list of Suite objects for the given source
2178 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2180 __all__.append('get_suites_source_in')
2183 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2185 Returns list of DBSource objects for given C{source} name and other parameters
2188 @param source: DBSource package name to search for
2190 @type version: str or None
2191 @param version: DBSource version name to search for or None if not applicable
2193 @type dm_upload_allowed: bool
2194 @param dm_upload_allowed: If None, no effect. If True or False, only
2195 return packages with that dm_upload_allowed setting
2197 @type session: Session
2198 @param session: Optional SQL session object (a temporary one will be
2199 generated if not supplied)
2202 @return: list of DBSource objects for the given name (may be empty)
2205 q = session.query(DBSource).filter_by(source=source)
2207 if version is not None:
2208 q = q.filter_by(version=version)
2210 if dm_upload_allowed is not None:
2211 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2215 __all__.append('get_sources_from_name')
2218 def get_source_in_suite(source, suite, session=None):
2220 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2222 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2223 - B{suite} - a suite name, eg. I{unstable}
2225 @type source: string
2226 @param source: source package name
2229 @param suite: the suite name
2232 @return: the version for I{source} in I{suite}
2236 q = session.query(SrcAssociation)
2237 q = q.join('source').filter_by(source=source)
2238 q = q.join('suite').filter_by(suite_name=suite)
2241 return q.one().source
2242 except NoResultFound:
2245 __all__.append('get_source_in_suite')
2247 ################################################################################
2250 def add_dsc_to_db(u, filename, session=None):
2251 entry = u.pkg.files[filename]
2255 source.source = u.pkg.dsc["source"]
2256 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2257 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2258 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2259 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2260 source.install_date = datetime.now().date()
2262 dsc_component = entry["component"]
2263 dsc_location_id = entry["location id"]
2265 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2267 # Set up a new poolfile if necessary
2268 if not entry.has_key("files id") or not entry["files id"]:
2269 filename = entry["pool name"] + filename
2270 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2272 pfs.append(poolfile)
2273 entry["files id"] = poolfile.file_id
2275 source.poolfile_id = entry["files id"]
2279 for suite_name in u.pkg.changes["distribution"].keys():
2280 sa = SrcAssociation()
2281 sa.source_id = source.source_id
2282 sa.suite_id = get_suite(suite_name).suite_id
2287 # Add the source files to the DB (files and dsc_files)
2289 dscfile.source_id = source.source_id
2290 dscfile.poolfile_id = entry["files id"]
2291 session.add(dscfile)
2293 for dsc_file, dentry in u.pkg.dsc_files.items():
2295 df.source_id = source.source_id
2297 # If the .orig tarball is already in the pool, it's
2298 # files id is stored in dsc_files by check_dsc().
2299 files_id = dentry.get("files id", None)
2301 # Find the entry in the files hash
2302 # TODO: Bail out here properly
2304 for f, e in u.pkg.files.items():
2309 if files_id is None:
2310 filename = dfentry["pool name"] + dsc_file
2312 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2313 # FIXME: needs to check for -1/-2 and or handle exception
2314 if found and obj is not None:
2315 files_id = obj.file_id
2318 # If still not found, add it
2319 if files_id is None:
2320 # HACK: Force sha1sum etc into dentry
2321 dentry["sha1sum"] = dfentry["sha1sum"]
2322 dentry["sha256sum"] = dfentry["sha256sum"]
2323 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2324 pfs.append(poolfile)
2325 files_id = poolfile.file_id
2327 poolfile = get_poolfile_by_id(files_id, session)
2328 if poolfile is None:
2329 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2330 pfs.append(poolfile)
2332 df.poolfile_id = files_id
2337 # Add the src_uploaders to the DB
2338 uploader_ids = [source.maintainer_id]
2339 if u.pkg.dsc.has_key("uploaders"):
2340 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2342 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2345 for up_id in uploader_ids:
2346 if added_ids.has_key(up_id):
2348 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2354 su.maintainer_id = up_id
2355 su.source_id = source.source_id
2360 return source, dsc_component, dsc_location_id, pfs
2362 __all__.append('add_dsc_to_db')
2365 def add_deb_to_db(u, filename, session=None):
2367 Contrary to what you might expect, this routine deals with both
2368 debs and udebs. That info is in 'dbtype', whilst 'type' is
2369 'deb' for both of them
2372 entry = u.pkg.files[filename]
2375 bin.package = entry["package"]
2376 bin.version = entry["version"]
2377 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2378 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2379 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2380 bin.binarytype = entry["dbtype"]
2383 filename = entry["pool name"] + filename
2384 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2385 if not entry.get("location id", None):
2386 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2388 if entry.get("files id", None):
2389 poolfile = get_poolfile_by_id(bin.poolfile_id)
2390 bin.poolfile_id = entry["files id"]
2392 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2393 bin.poolfile_id = entry["files id"] = poolfile.file_id
2396 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2397 if len(bin_sources) != 1:
2398 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2399 (bin.package, bin.version, entry["architecture"],
2400 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2402 bin.source_id = bin_sources[0].source_id
2404 # Add and flush object so it has an ID
2408 # Add BinAssociations
2409 for suite_name in u.pkg.changes["distribution"].keys():
2410 ba = BinAssociation()
2411 ba.binary_id = bin.binary_id
2412 ba.suite_id = get_suite(suite_name).suite_id
2417 # Deal with contents - disabled for now
2418 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2420 # print "REJECT\nCould not determine contents of package %s" % bin.package
2421 # session.rollback()
2422 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2426 __all__.append('add_deb_to_db')
2428 ################################################################################
2430 class SourceACL(object):
2431 def __init__(self, *args, **kwargs):
2435 return '<SourceACL %s>' % self.source_acl_id
2437 __all__.append('SourceACL')
2439 ################################################################################
2441 class SrcAssociation(object):
2442 def __init__(self, *args, **kwargs):
2446 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2448 __all__.append('SrcAssociation')
2450 ################################################################################
2452 class SrcFormat(object):
2453 def __init__(self, *args, **kwargs):
2457 return '<SrcFormat %s>' % (self.format_name)
2459 __all__.append('SrcFormat')
2461 ################################################################################
2463 class SrcUploader(object):
2464 def __init__(self, *args, **kwargs):
2468 return '<SrcUploader %s>' % self.uploader_id
2470 __all__.append('SrcUploader')
2472 ################################################################################
2474 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2475 ('SuiteID', 'suite_id'),
2476 ('Version', 'version'),
2477 ('Origin', 'origin'),
2479 ('Description', 'description'),
2480 ('Untouchable', 'untouchable'),
2481 ('Announce', 'announce'),
2482 ('Codename', 'codename'),
2483 ('OverrideCodename', 'overridecodename'),
2484 ('ValidTime', 'validtime'),
2485 ('Priority', 'priority'),
2486 ('NotAutomatic', 'notautomatic'),
2487 ('CopyChanges', 'copychanges'),
2488 ('OverrideSuite', 'overridesuite')]
2490 class Suite(object):
2491 def __init__(self, suite_name = None, version = None):
2492 self.suite_name = suite_name
2493 self.version = version
2496 return '<Suite %s>' % self.suite_name
2498 def __eq__(self, val):
2499 if isinstance(val, str):
2500 return (self.suite_name == val)
2501 # This signals to use the normal comparison operator
2502 return NotImplemented
2504 def __ne__(self, val):
2505 if isinstance(val, str):
2506 return (self.suite_name != val)
2507 # This signals to use the normal comparison operator
2508 return NotImplemented
2512 for disp, field in SUITE_FIELDS:
2513 val = getattr(self, field, None)
2515 ret.append("%s: %s" % (disp, val))
2517 return "\n".join(ret)
2519 def get_architectures(self, skipsrc=False, skipall=False):
2521 Returns list of Architecture objects
2523 @type skipsrc: boolean
2524 @param skipsrc: Whether to skip returning the 'source' architecture entry
2527 @type skipall: boolean
2528 @param skipall: Whether to skip returning the 'all' architecture entry
2532 @return: list of Architecture objects for the given name (may be empty)
2535 q = object_session(self).query(Architecture). \
2536 filter(Architecture.suites.contains(self))
2538 q = q.filter(Architecture.arch_string != 'source')
2540 q = q.filter(Architecture.arch_string != 'all')
2541 return q.order_by(Architecture.arch_string).all()
2543 __all__.append('Suite')
2546 def get_suite(suite, session=None):
2548 Returns Suite object for given C{suite name}.
2551 @param suite: The name of the suite
2553 @type session: Session
2554 @param session: Optional SQLA session object (a temporary one will be
2555 generated if not supplied)
2558 @return: Suite object for the requested suite name (None if not present)
2561 q = session.query(Suite).filter_by(suite_name=suite)
2565 except NoResultFound:
2568 __all__.append('get_suite')
2570 ################################################################################
2572 # TODO: should be removed because the implementation is too trivial
2574 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2576 Returns list of Architecture objects for given C{suite} name
2579 @param suite: Suite name to search for
2581 @type skipsrc: boolean
2582 @param skipsrc: Whether to skip returning the 'source' architecture entry
2585 @type skipall: boolean
2586 @param skipall: Whether to skip returning the 'all' architecture entry
2589 @type session: Session
2590 @param session: Optional SQL session object (a temporary one will be
2591 generated if not supplied)
2594 @return: list of Architecture objects for the given name (may be empty)
2597 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2599 __all__.append('get_suite_architectures')
2601 ################################################################################
2603 class SuiteSrcFormat(object):
2604 def __init__(self, *args, **kwargs):
2608 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2610 __all__.append('SuiteSrcFormat')
2613 def get_suite_src_formats(suite, session=None):
2615 Returns list of allowed SrcFormat for C{suite}.
2618 @param suite: Suite name to search for
2620 @type session: Session
2621 @param session: Optional SQL session object (a temporary one will be
2622 generated if not supplied)
2625 @return: the list of allowed source formats for I{suite}
2628 q = session.query(SrcFormat)
2629 q = q.join(SuiteSrcFormat)
2630 q = q.join(Suite).filter_by(suite_name=suite)
2631 q = q.order_by('format_name')
2635 __all__.append('get_suite_src_formats')
2637 ################################################################################
2640 def __init__(self, uid = None, name = None):
2644 def __eq__(self, val):
2645 if isinstance(val, str):
2646 return (self.uid == val)
2647 # This signals to use the normal comparison operator
2648 return NotImplemented
2650 def __ne__(self, val):
2651 if isinstance(val, str):
2652 return (self.uid != val)
2653 # This signals to use the normal comparison operator
2654 return NotImplemented
2657 return '<Uid %s (%s)>' % (self.uid, self.name)
2659 __all__.append('Uid')
2662 def get_or_set_uid(uidname, session=None):
2664 Returns uid object for given uidname.
2666 If no matching uidname is found, a row is inserted.
2668 @type uidname: string
2669 @param uidname: The uid to add
2671 @type session: SQLAlchemy
2672 @param session: Optional SQL session object (a temporary one will be
2673 generated if not supplied). If not passed, a commit will be performed at
2674 the end of the function, otherwise the caller is responsible for commiting.
2677 @return: the uid object for the given uidname
2680 q = session.query(Uid).filter_by(uid=uidname)
2684 except NoResultFound:
2688 session.commit_or_flush()
2693 __all__.append('get_or_set_uid')
2696 def get_uid_from_fingerprint(fpr, session=None):
2697 q = session.query(Uid)
2698 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2702 except NoResultFound:
2705 __all__.append('get_uid_from_fingerprint')
2707 ################################################################################
2709 class UploadBlock(object):
2710 def __init__(self, *args, **kwargs):
2714 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2716 __all__.append('UploadBlock')
2718 ################################################################################
2720 class DBConn(object):
2722 database module init.
2726 def __init__(self, *args, **kwargs):
2727 self.__dict__ = self.__shared_state
2729 if not getattr(self, 'initialised', False):
2730 self.initialised = True
2731 self.debug = kwargs.has_key('debug')
2734 def __setuptables(self):
2735 tables_with_primary = (
2746 'changes_pending_binaries',
2747 'changes_pending_files',
2748 'changes_pending_source',
2758 'pending_bin_contents',
2770 # The following tables have primary keys but sqlalchemy
2771 # version 0.5 fails to reflect them correctly with database
2772 # versions before upgrade #41.
2774 #'build_queue_files',
2777 tables_no_primary = (
2779 'changes_pending_files_map',
2780 'changes_pending_source_files',
2781 'changes_pool_files',
2784 'suite_architectures',
2785 'suite_src_formats',
2786 'suite_build_queue_copy',
2788 # see the comment above
2790 'build_queue_files',
2794 'almost_obsolete_all_associations',
2795 'almost_obsolete_src_associations',
2796 'any_associations_source',
2797 'bin_assoc_by_arch',
2798 'bin_associations_binaries',
2799 'binaries_suite_arch',
2800 'binfiles_suite_component_arch',
2803 'newest_all_associations',
2804 'newest_any_associations',
2806 'newest_src_association',
2807 'obsolete_all_associations',
2808 'obsolete_any_associations',
2809 'obsolete_any_by_all_associations',
2810 'obsolete_src_associations',
2812 'src_associations_bin',
2813 'src_associations_src',
2814 'suite_arch_by_name',
2817 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2818 # correctly and that is why we have to use a workaround. It can
2819 # be removed as soon as we switch to version 0.6.
2820 for table_name in tables_with_primary:
2821 table = Table(table_name, self.db_meta, \
2822 Column('id', Integer, primary_key = True), \
2823 autoload=True, useexisting=True)
2824 setattr(self, 'tbl_%s' % table_name, table)
2826 for table_name in tables_no_primary:
2827 table = Table(table_name, self.db_meta, autoload=True)
2828 setattr(self, 'tbl_%s' % table_name, table)
2830 for view_name in views:
2831 view = Table(view_name, self.db_meta, autoload=True)
2832 setattr(self, 'view_%s' % view_name, view)
2834 def __setupmappers(self):
2835 mapper(Architecture, self.tbl_architecture,
2836 properties = dict(arch_id = self.tbl_architecture.c.id,
2837 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2838 order_by='suite_name',
2839 backref=backref('architectures', order_by='arch_string'))))
2841 mapper(Archive, self.tbl_archive,
2842 properties = dict(archive_id = self.tbl_archive.c.id,
2843 archive_name = self.tbl_archive.c.name))
2845 mapper(BinAssociation, self.tbl_bin_associations,
2846 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2847 suite_id = self.tbl_bin_associations.c.suite,
2848 suite = relation(Suite),
2849 binary_id = self.tbl_bin_associations.c.bin,
2850 binary = relation(DBBinary)))
2852 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2853 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2854 filename = self.tbl_pending_bin_contents.c.filename,
2855 package = self.tbl_pending_bin_contents.c.package,
2856 version = self.tbl_pending_bin_contents.c.version,
2857 arch = self.tbl_pending_bin_contents.c.arch,
2858 otype = self.tbl_pending_bin_contents.c.type))
2860 mapper(DebContents, self.tbl_deb_contents,
2861 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2862 package=self.tbl_deb_contents.c.package,
2863 suite=self.tbl_deb_contents.c.suite,
2864 arch=self.tbl_deb_contents.c.arch,
2865 section=self.tbl_deb_contents.c.section,
2866 filename=self.tbl_deb_contents.c.filename))
2868 mapper(UdebContents, self.tbl_udeb_contents,
2869 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2870 package=self.tbl_udeb_contents.c.package,
2871 suite=self.tbl_udeb_contents.c.suite,
2872 arch=self.tbl_udeb_contents.c.arch,
2873 section=self.tbl_udeb_contents.c.section,
2874 filename=self.tbl_udeb_contents.c.filename))
2876 mapper(BuildQueue, self.tbl_build_queue,
2877 properties = dict(queue_id = self.tbl_build_queue.c.id))
2879 mapper(BuildQueueFile, self.tbl_build_queue_files,
2880 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2881 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2883 mapper(DBBinary, self.tbl_binaries,
2884 properties = dict(binary_id = self.tbl_binaries.c.id,
2885 package = self.tbl_binaries.c.package,
2886 version = self.tbl_binaries.c.version,
2887 maintainer_id = self.tbl_binaries.c.maintainer,
2888 maintainer = relation(Maintainer),
2889 source_id = self.tbl_binaries.c.source,
2890 source = relation(DBSource),
2891 arch_id = self.tbl_binaries.c.architecture,
2892 architecture = relation(Architecture),
2893 poolfile_id = self.tbl_binaries.c.file,
2894 poolfile = relation(PoolFile),
2895 binarytype = self.tbl_binaries.c.type,
2896 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2897 fingerprint = relation(Fingerprint),
2898 install_date = self.tbl_binaries.c.install_date,
2899 binassociations = relation(BinAssociation,
2900 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2902 mapper(BinaryACL, self.tbl_binary_acl,
2903 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2905 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2906 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2907 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2908 architecture = relation(Architecture)))
2910 mapper(Component, self.tbl_component,
2911 properties = dict(component_id = self.tbl_component.c.id,
2912 component_name = self.tbl_component.c.name))
2914 mapper(DBConfig, self.tbl_config,
2915 properties = dict(config_id = self.tbl_config.c.id))
2917 mapper(DSCFile, self.tbl_dsc_files,
2918 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2919 source_id = self.tbl_dsc_files.c.source,
2920 source = relation(DBSource),
2921 poolfile_id = self.tbl_dsc_files.c.file,
2922 poolfile = relation(PoolFile)))
2924 mapper(PoolFile, self.tbl_files,
2925 properties = dict(file_id = self.tbl_files.c.id,
2926 filesize = self.tbl_files.c.size,
2927 location_id = self.tbl_files.c.location,
2928 location = relation(Location,
2929 # using lazy='dynamic' in the back
2930 # reference because we have A LOT of
2931 # files in one location
2932 backref=backref('files', lazy='dynamic'))))
2934 mapper(Fingerprint, self.tbl_fingerprint,
2935 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2936 uid_id = self.tbl_fingerprint.c.uid,
2937 uid = relation(Uid),
2938 keyring_id = self.tbl_fingerprint.c.keyring,
2939 keyring = relation(Keyring),
2940 source_acl = relation(SourceACL),
2941 binary_acl = relation(BinaryACL)))
2943 mapper(Keyring, self.tbl_keyrings,
2944 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2945 keyring_id = self.tbl_keyrings.c.id))
2947 mapper(DBChange, self.tbl_changes,
2948 properties = dict(change_id = self.tbl_changes.c.id,
2949 poolfiles = relation(PoolFile,
2950 secondary=self.tbl_changes_pool_files,
2951 backref="changeslinks"),
2952 seen = self.tbl_changes.c.seen,
2953 source = self.tbl_changes.c.source,
2954 binaries = self.tbl_changes.c.binaries,
2955 architecture = self.tbl_changes.c.architecture,
2956 distribution = self.tbl_changes.c.distribution,
2957 urgency = self.tbl_changes.c.urgency,
2958 maintainer = self.tbl_changes.c.maintainer,
2959 changedby = self.tbl_changes.c.changedby,
2960 date = self.tbl_changes.c.date,
2961 version = self.tbl_changes.c.version,
2962 files = relation(ChangePendingFile,
2963 secondary=self.tbl_changes_pending_files_map,
2964 backref="changesfile"),
2965 in_queue_id = self.tbl_changes.c.in_queue,
2966 in_queue = relation(PolicyQueue,
2967 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2968 approved_for_id = self.tbl_changes.c.approved_for))
2970 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2971 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2973 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2974 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2975 filename = self.tbl_changes_pending_files.c.filename,
2976 size = self.tbl_changes_pending_files.c.size,
2977 md5sum = self.tbl_changes_pending_files.c.md5sum,
2978 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2979 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2981 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2982 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2983 change = relation(DBChange),
2984 maintainer = relation(Maintainer,
2985 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
2986 changedby = relation(Maintainer,
2987 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
2988 fingerprint = relation(Fingerprint),
2989 source_files = relation(ChangePendingFile,
2990 secondary=self.tbl_changes_pending_source_files,
2991 backref="pending_sources")))
2994 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
2995 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
2996 keyring = relation(Keyring, backref="keyring_acl_map"),
2997 architecture = relation(Architecture)))
2999 mapper(Location, self.tbl_location,
3000 properties = dict(location_id = self.tbl_location.c.id,
3001 component_id = self.tbl_location.c.component,
3002 component = relation(Component),
3003 archive_id = self.tbl_location.c.archive,
3004 archive = relation(Archive),
3005 # FIXME: the 'type' column is old cruft and
3006 # should be removed in the future.
3007 archive_type = self.tbl_location.c.type))
3009 mapper(Maintainer, self.tbl_maintainer,
3010 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3011 maintains_sources = relation(DBSource, backref='maintainer',
3012 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3013 changed_sources = relation(DBSource, backref='changedby',
3014 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3016 mapper(NewComment, self.tbl_new_comments,
3017 properties = dict(comment_id = self.tbl_new_comments.c.id))
3019 mapper(Override, self.tbl_override,
3020 properties = dict(suite_id = self.tbl_override.c.suite,
3021 suite = relation(Suite),
3022 package = self.tbl_override.c.package,
3023 component_id = self.tbl_override.c.component,
3024 component = relation(Component),
3025 priority_id = self.tbl_override.c.priority,
3026 priority = relation(Priority),
3027 section_id = self.tbl_override.c.section,
3028 section = relation(Section),
3029 overridetype_id = self.tbl_override.c.type,
3030 overridetype = relation(OverrideType)))
3032 mapper(OverrideType, self.tbl_override_type,
3033 properties = dict(overridetype = self.tbl_override_type.c.type,
3034 overridetype_id = self.tbl_override_type.c.id))
3036 mapper(PolicyQueue, self.tbl_policy_queue,
3037 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3039 mapper(Priority, self.tbl_priority,
3040 properties = dict(priority_id = self.tbl_priority.c.id))
3042 mapper(Section, self.tbl_section,
3043 properties = dict(section_id = self.tbl_section.c.id,
3044 section=self.tbl_section.c.section))
3046 mapper(DBSource, self.tbl_source,
3047 properties = dict(source_id = self.tbl_source.c.id,
3048 version = self.tbl_source.c.version,
3049 maintainer_id = self.tbl_source.c.maintainer,
3050 poolfile_id = self.tbl_source.c.file,
3051 poolfile = relation(PoolFile),
3052 fingerprint_id = self.tbl_source.c.sig_fpr,
3053 fingerprint = relation(Fingerprint),
3054 changedby_id = self.tbl_source.c.changedby,
3055 srcfiles = relation(DSCFile,
3056 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3057 suites = relation(Suite, secondary=self.tbl_src_associations,
3059 srcuploaders = relation(SrcUploader)))
3061 mapper(SourceACL, self.tbl_source_acl,
3062 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3064 mapper(SrcAssociation, self.tbl_src_associations,
3065 properties = dict(sa_id = self.tbl_src_associations.c.id,
3066 suite_id = self.tbl_src_associations.c.suite,
3067 suite = relation(Suite),
3068 source_id = self.tbl_src_associations.c.source,
3069 source = relation(DBSource)))
3071 mapper(SrcFormat, self.tbl_src_format,
3072 properties = dict(src_format_id = self.tbl_src_format.c.id,
3073 format_name = self.tbl_src_format.c.format_name))
3075 mapper(SrcUploader, self.tbl_src_uploaders,
3076 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3077 source_id = self.tbl_src_uploaders.c.source,
3078 source = relation(DBSource,
3079 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3080 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3081 maintainer = relation(Maintainer,
3082 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3084 mapper(Suite, self.tbl_suite,
3085 properties = dict(suite_id = self.tbl_suite.c.id,
3086 policy_queue = relation(PolicyQueue),
3087 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3089 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3090 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3091 suite = relation(Suite, backref='suitesrcformats'),
3092 src_format_id = self.tbl_suite_src_formats.c.src_format,
3093 src_format = relation(SrcFormat)))
3095 mapper(Uid, self.tbl_uid,
3096 properties = dict(uid_id = self.tbl_uid.c.id,
3097 fingerprint = relation(Fingerprint)))
3099 mapper(UploadBlock, self.tbl_upload_blocks,
3100 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3101 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3102 uid = relation(Uid, backref="uploadblocks")))
3104 ## Connection functions
3105 def __createconn(self):
3106 from config import Config
3110 connstr = "postgres://%s" % cnf["DB::Host"]
3111 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3112 connstr += ":%s" % cnf["DB::Port"]
3113 connstr += "/%s" % cnf["DB::Name"]
3116 connstr = "postgres:///%s" % cnf["DB::Name"]
3117 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3118 connstr += "?port=%s" % cnf["DB::Port"]
3120 self.db_pg = create_engine(connstr, echo=self.debug)
3121 self.db_meta = MetaData()
3122 self.db_meta.bind = self.db_pg
3123 self.db_smaker = sessionmaker(bind=self.db_pg,
3127 self.__setuptables()
3128 self.__setupmappers()
3131 return self.db_smaker()
3133 __all__.append('DBConn')