5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 # suppress some deprecation warnings in squeeze related to sqlalchemy
64 warnings.filterwarnings('ignore', \
65 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
67 # TODO: sqlalchemy needs some extra configuration to correctly reflect
68 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
69 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
72 ################################################################################
74 # Patch in support for the debversion field type so that it works during
78 # that is for sqlalchemy 0.6
79 UserDefinedType = sqltypes.UserDefinedType
81 # this one for sqlalchemy 0.5
82 UserDefinedType = sqltypes.TypeEngine
84 class DebVersion(UserDefinedType):
85 def get_col_spec(self):
88 def bind_processor(self, dialect):
91 # ' = None' is needed for sqlalchemy 0.5:
92 def result_processor(self, dialect, coltype = None):
95 sa_major_version = sqlalchemy.__version__[0:3]
96 if sa_major_version in ["0.5", "0.6"]:
97 from sqlalchemy.databases import postgres
98 postgres.ischema_names['debversion'] = DebVersion
100 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
102 ################################################################################
104 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
106 ################################################################################
108 def session_wrapper(fn):
110 Wrapper around common ".., session=None):" handling. If the wrapped
111 function is called without passing 'session', we create a local one
112 and destroy it when the function ends.
114 Also attaches a commit_or_flush method to the session; if we created a
115 local session, this is a synonym for session.commit(), otherwise it is a
116 synonym for session.flush().
119 def wrapped(*args, **kwargs):
120 private_transaction = False
122 # Find the session object
123 session = kwargs.get('session')
126 if len(args) <= len(getargspec(fn)[0]) - 1:
127 # No session specified as last argument or in kwargs
128 private_transaction = True
129 session = kwargs['session'] = DBConn().session()
131 # Session is last argument in args
135 session = args[-1] = DBConn().session()
136 private_transaction = True
138 if private_transaction:
139 session.commit_or_flush = session.commit
141 session.commit_or_flush = session.flush
144 return fn(*args, **kwargs)
146 if private_transaction:
147 # We created a session; close it.
150 wrapped.__doc__ = fn.__doc__
151 wrapped.func_name = fn.func_name
155 __all__.append('session_wrapper')
157 ################################################################################
159 class Architecture(object):
160 def __init__(self, arch_string = None, description = None):
161 self.arch_string = arch_string
162 self.description = description
164 def __eq__(self, val):
165 if isinstance(val, str):
166 return (self.arch_string== val)
167 # This signals to use the normal comparison operator
168 return NotImplemented
170 def __ne__(self, val):
171 if isinstance(val, str):
172 return (self.arch_string != val)
173 # This signals to use the normal comparison operator
174 return NotImplemented
177 return '<Architecture %s>' % self.arch_string
179 __all__.append('Architecture')
182 def get_architecture(architecture, session=None):
184 Returns database id for given C{architecture}.
186 @type architecture: string
187 @param architecture: The name of the architecture
189 @type session: Session
190 @param session: Optional SQLA session object (a temporary one will be
191 generated if not supplied)
194 @return: Architecture object for the given arch (None if not present)
197 q = session.query(Architecture).filter_by(arch_string=architecture)
201 except NoResultFound:
204 __all__.append('get_architecture')
206 # TODO: should be removed because the implementation is too trivial
208 def get_architecture_suites(architecture, session=None):
210 Returns list of Suite objects for given C{architecture} name
212 @type architecture: str
213 @param architecture: Architecture name to search for
215 @type session: Session
216 @param session: Optional SQL session object (a temporary one will be
217 generated if not supplied)
220 @return: list of Suite objects for the given name (may be empty)
223 return get_architecture(architecture, session).suites
225 __all__.append('get_architecture_suites')
227 ################################################################################
229 class Archive(object):
230 def __init__(self, *args, **kwargs):
234 return '<Archive %s>' % self.archive_name
236 __all__.append('Archive')
239 def get_archive(archive, session=None):
241 returns database id for given C{archive}.
243 @type archive: string
244 @param archive: the name of the arhive
246 @type session: Session
247 @param session: Optional SQLA session object (a temporary one will be
248 generated if not supplied)
251 @return: Archive object for the given name (None if not present)
254 archive = archive.lower()
256 q = session.query(Archive).filter_by(archive_name=archive)
260 except NoResultFound:
263 __all__.append('get_archive')
265 ################################################################################
267 class BinAssociation(object):
268 def __init__(self, *args, **kwargs):
272 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
274 __all__.append('BinAssociation')
276 ################################################################################
278 class BinContents(object):
279 def __init__(self, *args, **kwargs):
283 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
285 __all__.append('BinContents')
287 ################################################################################
289 class DBBinary(object):
290 def __init__(self, *args, **kwargs):
294 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
296 __all__.append('DBBinary')
299 def get_suites_binary_in(package, session=None):
301 Returns list of Suite objects which given C{package} name is in
304 @param package: DBBinary package name to search for
307 @return: list of Suite objects for the given package
310 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
312 __all__.append('get_suites_binary_in')
315 def get_binary_from_id(binary_id, session=None):
317 Returns DBBinary object for given C{id}
320 @param binary_id: Id of the required binary
322 @type session: Session
323 @param session: Optional SQLA session object (a temporary one will be
324 generated if not supplied)
327 @return: DBBinary object for the given binary (None if not present)
330 q = session.query(DBBinary).filter_by(binary_id=binary_id)
334 except NoResultFound:
337 __all__.append('get_binary_from_id')
340 def get_binaries_from_name(package, version=None, architecture=None, session=None):
342 Returns list of DBBinary objects for given C{package} name
345 @param package: DBBinary package name to search for
347 @type version: str or None
348 @param version: Version to search for (or None)
350 @type architecture: str, list or None
351 @param architecture: Architectures to limit to (or None if no limit)
353 @type session: Session
354 @param session: Optional SQL session object (a temporary one will be
355 generated if not supplied)
358 @return: list of DBBinary objects for the given name (may be empty)
361 q = session.query(DBBinary).filter_by(package=package)
363 if version is not None:
364 q = q.filter_by(version=version)
366 if architecture is not None:
367 if not isinstance(architecture, list):
368 architecture = [architecture]
369 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
375 __all__.append('get_binaries_from_name')
378 def get_binaries_from_source_id(source_id, session=None):
380 Returns list of DBBinary objects for given C{source_id}
383 @param source_id: source_id to search for
385 @type session: Session
386 @param session: Optional SQL session object (a temporary one will be
387 generated if not supplied)
390 @return: list of DBBinary objects for the given name (may be empty)
393 return session.query(DBBinary).filter_by(source_id=source_id).all()
395 __all__.append('get_binaries_from_source_id')
398 def get_binary_from_name_suite(package, suitename, session=None):
399 ### For dak examine-package
400 ### XXX: Doesn't use object API yet
402 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
403 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
404 WHERE b.package='%(package)s'
406 AND fi.location = l.id
407 AND l.component = c.id
410 AND su.suite_name %(suitename)s
411 ORDER BY b.version DESC"""
413 return session.execute(sql % {'package': package, 'suitename': suitename})
415 __all__.append('get_binary_from_name_suite')
418 def get_binary_components(package, suitename, arch, session=None):
419 # Check for packages that have moved from one component to another
420 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
421 WHERE b.package=:package AND s.suite_name=:suitename
422 AND (a.arch_string = :arch OR a.arch_string = 'all')
423 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
424 AND f.location = l.id
425 AND l.component = c.id
428 vals = {'package': package, 'suitename': suitename, 'arch': arch}
430 return session.execute(query, vals)
432 __all__.append('get_binary_components')
434 ################################################################################
436 class BinaryACL(object):
437 def __init__(self, *args, **kwargs):
441 return '<BinaryACL %s>' % self.binary_acl_id
443 __all__.append('BinaryACL')
445 ################################################################################
447 class BinaryACLMap(object):
448 def __init__(self, *args, **kwargs):
452 return '<BinaryACLMap %s>' % self.binary_acl_map_id
454 __all__.append('BinaryACLMap')
456 ################################################################################
461 ArchiveDir "%(archivepath)s";
462 OverrideDir "%(overridedir)s";
463 CacheDir "%(cachedir)s";
468 Packages::Compress ". bzip2 gzip";
469 Sources::Compress ". bzip2 gzip";
474 bindirectory "incoming"
479 BinOverride "override.sid.all3";
480 BinCacheDB "packages-accepted.db";
482 FileList "%(filelist)s";
485 Packages::Extensions ".deb .udeb";
488 bindirectory "incoming/"
491 BinOverride "override.sid.all3";
492 SrcOverride "override.sid.all3.src";
493 FileList "%(filelist)s";
497 class BuildQueue(object):
498 def __init__(self, *args, **kwargs):
502 return '<BuildQueue %s>' % self.queue_name
504 def write_metadata(self, starttime, force=False):
505 # Do we write out metafiles?
506 if not (force or self.generate_metadata):
509 session = DBConn().session().object_session(self)
511 fl_fd = fl_name = ac_fd = ac_name = None
513 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
514 startdir = os.getcwd()
517 # Grab files we want to include
518 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
519 # Write file list with newer files
520 (fl_fd, fl_name) = mkstemp()
522 os.write(fl_fd, '%s\n' % n.fullpath)
527 # Write minimal apt.conf
528 # TODO: Remove hardcoding from template
529 (ac_fd, ac_name) = mkstemp()
530 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
532 'cachedir': cnf["Dir::Cache"],
533 'overridedir': cnf["Dir::Override"],
537 # Run apt-ftparchive generate
538 os.chdir(os.path.dirname(ac_name))
539 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
541 # Run apt-ftparchive release
542 # TODO: Eww - fix this
543 bname = os.path.basename(self.path)
547 # We have to remove the Release file otherwise it'll be included in the
550 os.unlink(os.path.join(bname, 'Release'))
554 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
556 # Crude hack with open and append, but this whole section is and should be redone.
557 if self.notautomatic:
558 release=open("Release", "a")
559 release.write("NotAutomatic: yes")
564 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
565 if cnf.has_key("Dinstall::SigningPubKeyring"):
566 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
568 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
570 # Move the files if we got this far
571 os.rename('Release', os.path.join(bname, 'Release'))
573 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
575 # Clean up any left behind files
602 def clean_and_update(self, starttime, Logger, dryrun=False):
603 """WARNING: This routine commits for you"""
604 session = DBConn().session().object_session(self)
606 if self.generate_metadata and not dryrun:
607 self.write_metadata(starttime)
609 # Grab files older than our execution time
610 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
616 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
618 Logger.log(["I: Removing %s from the queue" % o.fullpath])
619 os.unlink(o.fullpath)
622 # If it wasn't there, don't worry
623 if e.errno == ENOENT:
626 # TODO: Replace with proper logging call
627 Logger.log(["E: Could not remove %s" % o.fullpath])
634 for f in os.listdir(self.path):
635 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
639 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
640 except NoResultFound:
641 fp = os.path.join(self.path, f)
643 Logger.log(["I: Would remove unused link %s" % fp])
645 Logger.log(["I: Removing unused link %s" % fp])
649 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
651 def add_file_from_pool(self, poolfile):
652 """Copies a file into the pool. Assumes that the PoolFile object is
653 attached to the same SQLAlchemy session as the Queue object is.
655 The caller is responsible for committing after calling this function."""
656 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
658 # Check if we have a file of this name or this ID already
659 for f in self.queuefiles:
660 if f.fileid is not None and f.fileid == poolfile.file_id or \
661 f.poolfile.filename == poolfile_basename:
662 # In this case, update the BuildQueueFile entry so we
663 # don't remove it too early
664 f.lastused = datetime.now()
665 DBConn().session().object_session(poolfile).add(f)
668 # Prepare BuildQueueFile object
669 qf = BuildQueueFile()
670 qf.build_queue_id = self.queue_id
671 qf.lastused = datetime.now()
672 qf.filename = poolfile_basename
674 targetpath = poolfile.fullpath
675 queuepath = os.path.join(self.path, poolfile_basename)
679 # We need to copy instead of symlink
681 utils.copy(targetpath, queuepath)
682 # NULL in the fileid field implies a copy
685 os.symlink(targetpath, queuepath)
686 qf.fileid = poolfile.file_id
690 # Get the same session as the PoolFile is using and add the qf to it
691 DBConn().session().object_session(poolfile).add(qf)
696 __all__.append('BuildQueue')
699 def get_build_queue(queuename, session=None):
701 Returns BuildQueue object for given C{queue name}, creating it if it does not
704 @type queuename: string
705 @param queuename: The name of the queue
707 @type session: Session
708 @param session: Optional SQLA session object (a temporary one will be
709 generated if not supplied)
712 @return: BuildQueue object for the given queue
715 q = session.query(BuildQueue).filter_by(queue_name=queuename)
719 except NoResultFound:
722 __all__.append('get_build_queue')
724 ################################################################################
726 class BuildQueueFile(object):
727 def __init__(self, *args, **kwargs):
731 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
735 return os.path.join(self.buildqueue.path, self.filename)
738 __all__.append('BuildQueueFile')
740 ################################################################################
742 class ChangePendingBinary(object):
743 def __init__(self, *args, **kwargs):
747 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
749 __all__.append('ChangePendingBinary')
751 ################################################################################
753 class ChangePendingFile(object):
754 def __init__(self, *args, **kwargs):
758 return '<ChangePendingFile %s>' % self.change_pending_file_id
760 __all__.append('ChangePendingFile')
762 ################################################################################
764 class ChangePendingSource(object):
765 def __init__(self, *args, **kwargs):
769 return '<ChangePendingSource %s>' % self.change_pending_source_id
771 __all__.append('ChangePendingSource')
773 ################################################################################
775 class Component(object):
776 def __init__(self, *args, **kwargs):
779 def __eq__(self, val):
780 if isinstance(val, str):
781 return (self.component_name == val)
782 # This signals to use the normal comparison operator
783 return NotImplemented
785 def __ne__(self, val):
786 if isinstance(val, str):
787 return (self.component_name != val)
788 # This signals to use the normal comparison operator
789 return NotImplemented
792 return '<Component %s>' % self.component_name
795 __all__.append('Component')
798 def get_component(component, session=None):
800 Returns database id for given C{component}.
802 @type component: string
803 @param component: The name of the override type
806 @return: the database id for the given component
809 component = component.lower()
811 q = session.query(Component).filter_by(component_name=component)
815 except NoResultFound:
818 __all__.append('get_component')
820 ################################################################################
822 class DBConfig(object):
823 def __init__(self, *args, **kwargs):
827 return '<DBConfig %s>' % self.name
829 __all__.append('DBConfig')
831 ################################################################################
834 def get_or_set_contents_file_id(filename, session=None):
836 Returns database id for given filename.
838 If no matching file is found, a row is inserted.
840 @type filename: string
841 @param filename: The filename
842 @type session: SQLAlchemy
843 @param session: Optional SQL session object (a temporary one will be
844 generated if not supplied). If not passed, a commit will be performed at
845 the end of the function, otherwise the caller is responsible for commiting.
848 @return: the database id for the given component
851 q = session.query(ContentFilename).filter_by(filename=filename)
854 ret = q.one().cafilename_id
855 except NoResultFound:
856 cf = ContentFilename()
857 cf.filename = filename
859 session.commit_or_flush()
860 ret = cf.cafilename_id
864 __all__.append('get_or_set_contents_file_id')
867 def get_contents(suite, overridetype, section=None, session=None):
869 Returns contents for a suite / overridetype combination, limiting
870 to a section if not None.
873 @param suite: Suite object
875 @type overridetype: OverrideType
876 @param overridetype: OverrideType object
878 @type section: Section
879 @param section: Optional section object to limit results to
881 @type session: SQLAlchemy
882 @param session: Optional SQL session object (a temporary one will be
883 generated if not supplied)
886 @return: ResultsProxy object set up to return tuples of (filename, section,
890 # find me all of the contents for a given suite
891 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
895 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
896 JOIN content_file_names n ON (c.filename=n.id)
897 JOIN binaries b ON (b.id=c.binary_pkg)
898 JOIN override o ON (o.package=b.package)
899 JOIN section s ON (s.id=o.section)
900 WHERE o.suite = :suiteid AND o.type = :overridetypeid
901 AND b.type=:overridetypename"""
903 vals = {'suiteid': suite.suite_id,
904 'overridetypeid': overridetype.overridetype_id,
905 'overridetypename': overridetype.overridetype}
907 if section is not None:
908 contents_q += " AND s.id = :sectionid"
909 vals['sectionid'] = section.section_id
911 contents_q += " ORDER BY fn"
913 return session.execute(contents_q, vals)
915 __all__.append('get_contents')
917 ################################################################################
919 class ContentFilepath(object):
920 def __init__(self, *args, **kwargs):
924 return '<ContentFilepath %s>' % self.filepath
926 __all__.append('ContentFilepath')
929 def get_or_set_contents_path_id(filepath, session=None):
931 Returns database id for given path.
933 If no matching file is found, a row is inserted.
935 @type filepath: string
936 @param filepath: The filepath
938 @type session: SQLAlchemy
939 @param session: Optional SQL session object (a temporary one will be
940 generated if not supplied). If not passed, a commit will be performed at
941 the end of the function, otherwise the caller is responsible for commiting.
944 @return: the database id for the given path
947 q = session.query(ContentFilepath).filter_by(filepath=filepath)
950 ret = q.one().cafilepath_id
951 except NoResultFound:
952 cf = ContentFilepath()
953 cf.filepath = filepath
955 session.commit_or_flush()
956 ret = cf.cafilepath_id
960 __all__.append('get_or_set_contents_path_id')
962 ################################################################################
964 class ContentAssociation(object):
965 def __init__(self, *args, **kwargs):
969 return '<ContentAssociation %s>' % self.ca_id
971 __all__.append('ContentAssociation')
973 def insert_content_paths(binary_id, fullpaths, session=None):
975 Make sure given path is associated with given binary id
978 @param binary_id: the id of the binary
979 @type fullpaths: list
980 @param fullpaths: the list of paths of the file being associated with the binary
981 @type session: SQLAlchemy session
982 @param session: Optional SQLAlchemy session. If this is passed, the caller
983 is responsible for ensuring a transaction has begun and committing the
984 results or rolling back based on the result code. If not passed, a commit
985 will be performed at the end of the function, otherwise the caller is
986 responsible for commiting.
988 @return: True upon success
993 session = DBConn().session()
998 def generate_path_dicts():
999 for fullpath in fullpaths:
1000 if fullpath.startswith( './' ):
1001 fullpath = fullpath[2:]
1003 yield {'filename':fullpath, 'id': binary_id }
1005 for d in generate_path_dicts():
1006 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1015 traceback.print_exc()
1017 # Only rollback if we set up the session ourself
1024 __all__.append('insert_content_paths')
1026 ################################################################################
1028 class DSCFile(object):
1029 def __init__(self, *args, **kwargs):
1033 return '<DSCFile %s>' % self.dscfile_id
1035 __all__.append('DSCFile')
1038 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1040 Returns a list of DSCFiles which may be empty
1042 @type dscfile_id: int (optional)
1043 @param dscfile_id: the dscfile_id of the DSCFiles to find
1045 @type source_id: int (optional)
1046 @param source_id: the source id related to the DSCFiles to find
1048 @type poolfile_id: int (optional)
1049 @param poolfile_id: the poolfile id related to the DSCFiles to find
1052 @return: Possibly empty list of DSCFiles
1055 q = session.query(DSCFile)
1057 if dscfile_id is not None:
1058 q = q.filter_by(dscfile_id=dscfile_id)
1060 if source_id is not None:
1061 q = q.filter_by(source_id=source_id)
1063 if poolfile_id is not None:
1064 q = q.filter_by(poolfile_id=poolfile_id)
1068 __all__.append('get_dscfiles')
1070 ################################################################################
1072 class PoolFile(object):
1073 def __init__(self, *args, **kwargs):
1077 return '<PoolFile %s>' % self.filename
1081 return os.path.join(self.location.path, self.filename)
1083 __all__.append('PoolFile')
1086 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1089 (ValidFileFound [boolean or None], PoolFile object or None)
1091 @type filename: string
1092 @param filename: the filename of the file to check against the DB
1095 @param filesize: the size of the file to check against the DB
1097 @type md5sum: string
1098 @param md5sum: the md5sum of the file to check against the DB
1100 @type location_id: int
1101 @param location_id: the id of the location to look in
1104 @return: Tuple of length 2.
1105 - If more than one file found with that name: (C{None}, C{None})
1106 - If valid pool file found: (C{True}, C{PoolFile object})
1107 - If valid pool file not found:
1108 - (C{False}, C{None}) if no file found
1109 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1112 q = session.query(PoolFile).filter_by(filename=filename)
1113 q = q.join(Location).filter_by(location_id=location_id)
1123 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1131 __all__.append('check_poolfile')
1134 def get_poolfile_by_id(file_id, session=None):
1136 Returns a PoolFile objects or None for the given id
1139 @param file_id: the id of the file to look for
1141 @rtype: PoolFile or None
1142 @return: either the PoolFile object or None
1145 q = session.query(PoolFile).filter_by(file_id=file_id)
1149 except NoResultFound:
1152 __all__.append('get_poolfile_by_id')
1156 def get_poolfile_by_name(filename, location_id=None, session=None):
1158 Returns an array of PoolFile objects for the given filename and
1159 (optionally) location_id
1161 @type filename: string
1162 @param filename: the filename of the file to check against the DB
1164 @type location_id: int
1165 @param location_id: the id of the location to look in (optional)
1168 @return: array of PoolFile objects
1171 q = session.query(PoolFile).filter_by(filename=filename)
1173 if location_id is not None:
1174 q = q.join(Location).filter_by(location_id=location_id)
1178 __all__.append('get_poolfile_by_name')
1181 def get_poolfile_like_name(filename, session=None):
1183 Returns an array of PoolFile objects which are like the given name
1185 @type filename: string
1186 @param filename: the filename of the file to check against the DB
1189 @return: array of PoolFile objects
1192 # TODO: There must be a way of properly using bind parameters with %FOO%
1193 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1197 __all__.append('get_poolfile_like_name')
1200 def add_poolfile(filename, datadict, location_id, session=None):
1202 Add a new file to the pool
1204 @type filename: string
1205 @param filename: filename
1207 @type datadict: dict
1208 @param datadict: dict with needed data
1210 @type location_id: int
1211 @param location_id: database id of the location
1214 @return: the PoolFile object created
1216 poolfile = PoolFile()
1217 poolfile.filename = filename
1218 poolfile.filesize = datadict["size"]
1219 poolfile.md5sum = datadict["md5sum"]
1220 poolfile.sha1sum = datadict["sha1sum"]
1221 poolfile.sha256sum = datadict["sha256sum"]
1222 poolfile.location_id = location_id
1224 session.add(poolfile)
1225 # Flush to get a file id (NB: This is not a commit)
1230 __all__.append('add_poolfile')
1232 ################################################################################
1234 class Fingerprint(object):
1235 def __init__(self, fingerprint = None):
1236 self.fingerprint = fingerprint
1239 return '<Fingerprint %s>' % self.fingerprint
1241 __all__.append('Fingerprint')
1244 def get_fingerprint(fpr, session=None):
1246 Returns Fingerprint object for given fpr.
1249 @param fpr: The fpr to find / add
1251 @type session: SQLAlchemy
1252 @param session: Optional SQL session object (a temporary one will be
1253 generated if not supplied).
1256 @return: the Fingerprint object for the given fpr or None
1259 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1263 except NoResultFound:
1268 __all__.append('get_fingerprint')
1271 def get_or_set_fingerprint(fpr, session=None):
1273 Returns Fingerprint object for given fpr.
1275 If no matching fpr is found, a row is inserted.
1278 @param fpr: The fpr to find / add
1280 @type session: SQLAlchemy
1281 @param session: Optional SQL session object (a temporary one will be
1282 generated if not supplied). If not passed, a commit will be performed at
1283 the end of the function, otherwise the caller is responsible for commiting.
1284 A flush will be performed either way.
1287 @return: the Fingerprint object for the given fpr
1290 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1294 except NoResultFound:
1295 fingerprint = Fingerprint()
1296 fingerprint.fingerprint = fpr
1297 session.add(fingerprint)
1298 session.commit_or_flush()
1303 __all__.append('get_or_set_fingerprint')
1305 ################################################################################
1307 # Helper routine for Keyring class
1308 def get_ldap_name(entry):
1310 for k in ["cn", "mn", "sn"]:
1312 if ret and ret[0] != "" and ret[0] != "-":
1314 return " ".join(name)
1316 ################################################################################
1318 class Keyring(object):
1319 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1320 " --with-colons --fingerprint --fingerprint"
1325 def __init__(self, *args, **kwargs):
1329 return '<Keyring %s>' % self.keyring_name
1331 def de_escape_gpg_str(self, txt):
1332 esclist = re.split(r'(\\x..)', txt)
1333 for x in range(1,len(esclist),2):
1334 esclist[x] = "%c" % (int(esclist[x][2:],16))
1335 return "".join(esclist)
1337 def parse_address(self, uid):
1338 """parses uid and returns a tuple of real name and email address"""
1340 (name, address) = email.Utils.parseaddr(uid)
1341 name = re.sub(r"\s*[(].*[)]", "", name)
1342 name = self.de_escape_gpg_str(name)
1345 return (name, address)
1347 def load_keys(self, keyring):
1348 if not self.keyring_id:
1349 raise Exception('Must be initialized with database information')
1351 k = os.popen(self.gpg_invocation % keyring, "r")
1355 for line in k.xreadlines():
1356 field = line.split(":")
1357 if field[0] == "pub":
1360 (name, addr) = self.parse_address(field[9])
1362 self.keys[key]["email"] = addr
1363 self.keys[key]["name"] = name
1364 self.keys[key]["fingerprints"] = []
1366 elif key and field[0] == "sub" and len(field) >= 12:
1367 signingkey = ("s" in field[11])
1368 elif key and field[0] == "uid":
1369 (name, addr) = self.parse_address(field[9])
1370 if "email" not in self.keys[key] and "@" in addr:
1371 self.keys[key]["email"] = addr
1372 self.keys[key]["name"] = name
1373 elif signingkey and field[0] == "fpr":
1374 self.keys[key]["fingerprints"].append(field[9])
1375 self.fpr_lookup[field[9]] = key
1377 def import_users_from_ldap(self, session):
1381 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1382 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1384 l = ldap.open(LDAPServer)
1385 l.simple_bind_s("","")
1386 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1387 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1388 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1390 ldap_fin_uid_id = {}
1397 uid = entry["uid"][0]
1398 name = get_ldap_name(entry)
1399 fingerprints = entry["keyFingerPrint"]
1401 for f in fingerprints:
1402 key = self.fpr_lookup.get(f, None)
1403 if key not in self.keys:
1405 self.keys[key]["uid"] = uid
1409 keyid = get_or_set_uid(uid, session).uid_id
1410 byuid[keyid] = (uid, name)
1411 byname[uid] = (keyid, name)
1413 return (byname, byuid)
1415 def generate_users_from_keyring(self, format, session):
1419 for x in self.keys.keys():
1420 if "email" not in self.keys[x]:
1422 self.keys[x]["uid"] = format % "invalid-uid"
1424 uid = format % self.keys[x]["email"]
1425 keyid = get_or_set_uid(uid, session).uid_id
1426 byuid[keyid] = (uid, self.keys[x]["name"])
1427 byname[uid] = (keyid, self.keys[x]["name"])
1428 self.keys[x]["uid"] = uid
1431 uid = format % "invalid-uid"
1432 keyid = get_or_set_uid(uid, session).uid_id
1433 byuid[keyid] = (uid, "ungeneratable user id")
1434 byname[uid] = (keyid, "ungeneratable user id")
1436 return (byname, byuid)
1438 __all__.append('Keyring')
1441 def get_keyring(keyring, session=None):
1443 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1444 If C{keyring} already has an entry, simply return the existing Keyring
1446 @type keyring: string
1447 @param keyring: the keyring name
1450 @return: the Keyring object for this keyring
1453 q = session.query(Keyring).filter_by(keyring_name=keyring)
1457 except NoResultFound:
1460 __all__.append('get_keyring')
1462 ################################################################################
1464 class KeyringACLMap(object):
1465 def __init__(self, *args, **kwargs):
1469 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1471 __all__.append('KeyringACLMap')
1473 ################################################################################
1475 class DBChange(object):
1476 def __init__(self, *args, **kwargs):
1480 return '<DBChange %s>' % self.changesname
1482 def clean_from_queue(self):
1483 session = DBConn().session().object_session(self)
1485 # Remove changes_pool_files entries
1488 # Remove changes_pending_files references
1491 # Clear out of queue
1492 self.in_queue = None
1493 self.approved_for_id = None
1495 __all__.append('DBChange')
1498 def get_dbchange(filename, session=None):
1500 returns DBChange object for given C{filename}.
1502 @type filename: string
1503 @param filename: the name of the file
1505 @type session: Session
1506 @param session: Optional SQLA session object (a temporary one will be
1507 generated if not supplied)
1510 @return: DBChange object for the given filename (C{None} if not present)
1513 q = session.query(DBChange).filter_by(changesname=filename)
1517 except NoResultFound:
1520 __all__.append('get_dbchange')
1522 ################################################################################
1524 class Location(object):
1525 def __init__(self, *args, **kwargs):
1529 return '<Location %s (%s)>' % (self.path, self.location_id)
1531 __all__.append('Location')
1534 def get_location(location, component=None, archive=None, session=None):
1536 Returns Location object for the given combination of location, component
1539 @type location: string
1540 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1542 @type component: string
1543 @param component: the component name (if None, no restriction applied)
1545 @type archive: string
1546 @param archive: the archive name (if None, no restriction applied)
1548 @rtype: Location / None
1549 @return: Either a Location object or None if one can't be found
1552 q = session.query(Location).filter_by(path=location)
1554 if archive is not None:
1555 q = q.join(Archive).filter_by(archive_name=archive)
1557 if component is not None:
1558 q = q.join(Component).filter_by(component_name=component)
1562 except NoResultFound:
1565 __all__.append('get_location')
1567 ################################################################################
1569 class Maintainer(object):
1570 def __init__(self, name = None):
1574 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1576 def get_split_maintainer(self):
1577 if not hasattr(self, 'name') or self.name is None:
1578 return ('', '', '', '')
1580 return fix_maintainer(self.name.strip())
1582 __all__.append('Maintainer')
1585 def get_or_set_maintainer(name, session=None):
1587 Returns Maintainer object for given maintainer name.
1589 If no matching maintainer name is found, a row is inserted.
1592 @param name: The maintainer name to add
1594 @type session: SQLAlchemy
1595 @param session: Optional SQL session object (a temporary one will be
1596 generated if not supplied). If not passed, a commit will be performed at
1597 the end of the function, otherwise the caller is responsible for commiting.
1598 A flush will be performed either way.
1601 @return: the Maintainer object for the given maintainer
1604 q = session.query(Maintainer).filter_by(name=name)
1607 except NoResultFound:
1608 maintainer = Maintainer()
1609 maintainer.name = name
1610 session.add(maintainer)
1611 session.commit_or_flush()
1616 __all__.append('get_or_set_maintainer')
1619 def get_maintainer(maintainer_id, session=None):
1621 Return the name of the maintainer behind C{maintainer_id} or None if that
1622 maintainer_id is invalid.
1624 @type maintainer_id: int
1625 @param maintainer_id: the id of the maintainer
1628 @return: the Maintainer with this C{maintainer_id}
1631 return session.query(Maintainer).get(maintainer_id)
1633 __all__.append('get_maintainer')
1635 ################################################################################
1637 class NewComment(object):
1638 def __init__(self, *args, **kwargs):
1642 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1644 __all__.append('NewComment')
1647 def has_new_comment(package, version, session=None):
1649 Returns true if the given combination of C{package}, C{version} has a comment.
1651 @type package: string
1652 @param package: name of the package
1654 @type version: string
1655 @param version: package version
1657 @type session: Session
1658 @param session: Optional SQLA session object (a temporary one will be
1659 generated if not supplied)
1665 q = session.query(NewComment)
1666 q = q.filter_by(package=package)
1667 q = q.filter_by(version=version)
1669 return bool(q.count() > 0)
1671 __all__.append('has_new_comment')
1674 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1676 Returns (possibly empty) list of NewComment objects for the given
1679 @type package: string (optional)
1680 @param package: name of the package
1682 @type version: string (optional)
1683 @param version: package version
1685 @type comment_id: int (optional)
1686 @param comment_id: An id of a comment
1688 @type session: Session
1689 @param session: Optional SQLA session object (a temporary one will be
1690 generated if not supplied)
1693 @return: A (possibly empty) list of NewComment objects will be returned
1696 q = session.query(NewComment)
1697 if package is not None: q = q.filter_by(package=package)
1698 if version is not None: q = q.filter_by(version=version)
1699 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1703 __all__.append('get_new_comments')
1705 ################################################################################
1707 class Override(object):
1708 def __init__(self, *args, **kwargs):
1712 return '<Override %s (%s)>' % (self.package, self.suite_id)
1714 __all__.append('Override')
1717 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1719 Returns Override object for the given parameters
1721 @type package: string
1722 @param package: The name of the package
1724 @type suite: string, list or None
1725 @param suite: The name of the suite (or suites if a list) to limit to. If
1726 None, don't limit. Defaults to None.
1728 @type component: string, list or None
1729 @param component: The name of the component (or components if a list) to
1730 limit to. If None, don't limit. Defaults to None.
1732 @type overridetype: string, list or None
1733 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1734 limit to. If None, don't limit. Defaults to None.
1736 @type session: Session
1737 @param session: Optional SQLA session object (a temporary one will be
1738 generated if not supplied)
1741 @return: A (possibly empty) list of Override objects will be returned
1744 q = session.query(Override)
1745 q = q.filter_by(package=package)
1747 if suite is not None:
1748 if not isinstance(suite, list): suite = [suite]
1749 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1751 if component is not None:
1752 if not isinstance(component, list): component = [component]
1753 q = q.join(Component).filter(Component.component_name.in_(component))
1755 if overridetype is not None:
1756 if not isinstance(overridetype, list): overridetype = [overridetype]
1757 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1761 __all__.append('get_override')
1764 ################################################################################
1766 class OverrideType(object):
1767 def __init__(self, *args, **kwargs):
1771 return '<OverrideType %s>' % self.overridetype
1773 __all__.append('OverrideType')
1776 def get_override_type(override_type, session=None):
1778 Returns OverrideType object for given C{override type}.
1780 @type override_type: string
1781 @param override_type: The name of the override type
1783 @type session: Session
1784 @param session: Optional SQLA session object (a temporary one will be
1785 generated if not supplied)
1788 @return: the database id for the given override type
1791 q = session.query(OverrideType).filter_by(overridetype=override_type)
1795 except NoResultFound:
1798 __all__.append('get_override_type')
1800 ################################################################################
1802 class DebContents(object):
1803 def __init__(self, *args, **kwargs):
1807 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1809 __all__.append('DebContents')
1812 class UdebContents(object):
1813 def __init__(self, *args, **kwargs):
1817 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1819 __all__.append('UdebContents')
1821 class PendingBinContents(object):
1822 def __init__(self, *args, **kwargs):
1826 return '<PendingBinContents %s>' % self.contents_id
1828 __all__.append('PendingBinContents')
1830 def insert_pending_content_paths(package,
1835 Make sure given paths are temporarily associated with given
1839 @param package: the package to associate with should have been read in from the binary control file
1840 @type fullpaths: list
1841 @param fullpaths: the list of paths of the file being associated with the binary
1842 @type session: SQLAlchemy session
1843 @param session: Optional SQLAlchemy session. If this is passed, the caller
1844 is responsible for ensuring a transaction has begun and committing the
1845 results or rolling back based on the result code. If not passed, a commit
1846 will be performed at the end of the function
1848 @return: True upon success, False if there is a problem
1851 privatetrans = False
1854 session = DBConn().session()
1858 arch = get_architecture(package['Architecture'], session)
1859 arch_id = arch.arch_id
1861 # Remove any already existing recorded files for this package
1862 q = session.query(PendingBinContents)
1863 q = q.filter_by(package=package['Package'])
1864 q = q.filter_by(version=package['Version'])
1865 q = q.filter_by(architecture=arch_id)
1868 for fullpath in fullpaths:
1870 if fullpath.startswith( "./" ):
1871 fullpath = fullpath[2:]
1873 pca = PendingBinContents()
1874 pca.package = package['Package']
1875 pca.version = package['Version']
1877 pca.architecture = arch_id
1880 pca.type = 8 # gross
1882 pca.type = 7 # also gross
1885 # Only commit if we set up the session ourself
1893 except Exception, e:
1894 traceback.print_exc()
1896 # Only rollback if we set up the session ourself
1903 __all__.append('insert_pending_content_paths')
1905 ################################################################################
1907 class PolicyQueue(object):
1908 def __init__(self, *args, **kwargs):
1912 return '<PolicyQueue %s>' % self.queue_name
1914 __all__.append('PolicyQueue')
1917 def get_policy_queue(queuename, session=None):
1919 Returns PolicyQueue object for given C{queue name}
1921 @type queuename: string
1922 @param queuename: The name of the queue
1924 @type session: Session
1925 @param session: Optional SQLA session object (a temporary one will be
1926 generated if not supplied)
1929 @return: PolicyQueue object for the given queue
1932 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1936 except NoResultFound:
1939 __all__.append('get_policy_queue')
1942 def get_policy_queue_from_path(pathname, session=None):
1944 Returns PolicyQueue object for given C{path name}
1946 @type queuename: string
1947 @param queuename: The path
1949 @type session: Session
1950 @param session: Optional SQLA session object (a temporary one will be
1951 generated if not supplied)
1954 @return: PolicyQueue object for the given queue
1957 q = session.query(PolicyQueue).filter_by(path=pathname)
1961 except NoResultFound:
1964 __all__.append('get_policy_queue_from_path')
1966 ################################################################################
1968 class Priority(object):
1969 def __init__(self, *args, **kwargs):
1972 def __eq__(self, val):
1973 if isinstance(val, str):
1974 return (self.priority == val)
1975 # This signals to use the normal comparison operator
1976 return NotImplemented
1978 def __ne__(self, val):
1979 if isinstance(val, str):
1980 return (self.priority != val)
1981 # This signals to use the normal comparison operator
1982 return NotImplemented
1985 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1987 __all__.append('Priority')
1990 def get_priority(priority, session=None):
1992 Returns Priority object for given C{priority name}.
1994 @type priority: string
1995 @param priority: The name of the priority
1997 @type session: Session
1998 @param session: Optional SQLA session object (a temporary one will be
1999 generated if not supplied)
2002 @return: Priority object for the given priority
2005 q = session.query(Priority).filter_by(priority=priority)
2009 except NoResultFound:
2012 __all__.append('get_priority')
2015 def get_priorities(session=None):
2017 Returns dictionary of priority names -> id mappings
2019 @type session: Session
2020 @param session: Optional SQL session object (a temporary one will be
2021 generated if not supplied)
2024 @return: dictionary of priority names -> id mappings
2028 q = session.query(Priority)
2030 ret[x.priority] = x.priority_id
2034 __all__.append('get_priorities')
2036 ################################################################################
2038 class Section(object):
2039 def __init__(self, *args, **kwargs):
2042 def __eq__(self, val):
2043 if isinstance(val, str):
2044 return (self.section == val)
2045 # This signals to use the normal comparison operator
2046 return NotImplemented
2048 def __ne__(self, val):
2049 if isinstance(val, str):
2050 return (self.section != val)
2051 # This signals to use the normal comparison operator
2052 return NotImplemented
2055 return '<Section %s>' % self.section
2057 __all__.append('Section')
2060 def get_section(section, session=None):
2062 Returns Section object for given C{section name}.
2064 @type section: string
2065 @param section: The name of the section
2067 @type session: Session
2068 @param session: Optional SQLA session object (a temporary one will be
2069 generated if not supplied)
2072 @return: Section object for the given section name
2075 q = session.query(Section).filter_by(section=section)
2079 except NoResultFound:
2082 __all__.append('get_section')
2085 def get_sections(session=None):
2087 Returns dictionary of section names -> id mappings
2089 @type session: Session
2090 @param session: Optional SQL session object (a temporary one will be
2091 generated if not supplied)
2094 @return: dictionary of section names -> id mappings
2098 q = session.query(Section)
2100 ret[x.section] = x.section_id
2104 __all__.append('get_sections')
2106 ################################################################################
2108 class DBSource(object):
2109 def __init__(self, maintainer = None, changedby = None):
2110 self.maintainer = maintainer
2111 self.changedby = changedby
2114 return '<DBSource %s (%s)>' % (self.source, self.version)
2116 __all__.append('DBSource')
2119 def source_exists(source, source_version, suites = ["any"], session=None):
2121 Ensure that source exists somewhere in the archive for the binary
2122 upload being processed.
2123 1. exact match => 1.0-3
2124 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2126 @type source: string
2127 @param source: source name
2129 @type source_version: string
2130 @param source_version: expected source version
2133 @param suites: list of suites to check in, default I{any}
2135 @type session: Session
2136 @param session: Optional SQLA session object (a temporary one will be
2137 generated if not supplied)
2140 @return: returns 1 if a source with expected version is found, otherwise 0
2147 for suite in suites:
2148 q = session.query(DBSource).filter_by(source=source)
2150 # source must exist in suite X, or in some other suite that's
2151 # mapped to X, recursively... silent-maps are counted too,
2152 # unreleased-maps aren't.
2153 maps = cnf.ValueList("SuiteMappings")[:]
2155 maps = [ m.split() for m in maps ]
2156 maps = [ (x[1], x[2]) for x in maps
2157 if x[0] == "map" or x[0] == "silent-map" ]
2160 if x[1] in s and x[0] not in s:
2163 q = q.join(SrcAssociation).join(Suite)
2164 q = q.filter(Suite.suite_name.in_(s))
2166 # Reduce the query results to a list of version numbers
2167 ql = [ j.version for j in q.all() ]
2170 if source_version in ql:
2174 from daklib.regexes import re_bin_only_nmu
2175 orig_source_version = re_bin_only_nmu.sub('', source_version)
2176 if orig_source_version in ql:
2179 # No source found so return not ok
2184 __all__.append('source_exists')
2187 def get_suites_source_in(source, session=None):
2189 Returns list of Suite objects which given C{source} name is in
2192 @param source: DBSource package name to search for
2195 @return: list of Suite objects for the given source
2198 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2200 __all__.append('get_suites_source_in')
2203 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2205 Returns list of DBSource objects for given C{source} name and other parameters
2208 @param source: DBSource package name to search for
2210 @type version: str or None
2211 @param version: DBSource version name to search for or None if not applicable
2213 @type dm_upload_allowed: bool
2214 @param dm_upload_allowed: If None, no effect. If True or False, only
2215 return packages with that dm_upload_allowed setting
2217 @type session: Session
2218 @param session: Optional SQL session object (a temporary one will be
2219 generated if not supplied)
2222 @return: list of DBSource objects for the given name (may be empty)
2225 q = session.query(DBSource).filter_by(source=source)
2227 if version is not None:
2228 q = q.filter_by(version=version)
2230 if dm_upload_allowed is not None:
2231 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2235 __all__.append('get_sources_from_name')
2238 def get_source_in_suite(source, suite, session=None):
2240 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2242 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2243 - B{suite} - a suite name, eg. I{unstable}
2245 @type source: string
2246 @param source: source package name
2249 @param suite: the suite name
2252 @return: the version for I{source} in I{suite}
2256 q = session.query(SrcAssociation)
2257 q = q.join('source').filter_by(source=source)
2258 q = q.join('suite').filter_by(suite_name=suite)
2261 return q.one().source
2262 except NoResultFound:
2265 __all__.append('get_source_in_suite')
2267 ################################################################################
2270 def add_dsc_to_db(u, filename, session=None):
2271 entry = u.pkg.files[filename]
2275 source.source = u.pkg.dsc["source"]
2276 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2277 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2278 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2279 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2280 source.install_date = datetime.now().date()
2282 dsc_component = entry["component"]
2283 dsc_location_id = entry["location id"]
2285 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2287 # Set up a new poolfile if necessary
2288 if not entry.has_key("files id") or not entry["files id"]:
2289 filename = entry["pool name"] + filename
2290 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2292 pfs.append(poolfile)
2293 entry["files id"] = poolfile.file_id
2295 source.poolfile_id = entry["files id"]
2299 for suite_name in u.pkg.changes["distribution"].keys():
2300 sa = SrcAssociation()
2301 sa.source_id = source.source_id
2302 sa.suite_id = get_suite(suite_name).suite_id
2307 # Add the source files to the DB (files and dsc_files)
2309 dscfile.source_id = source.source_id
2310 dscfile.poolfile_id = entry["files id"]
2311 session.add(dscfile)
2313 for dsc_file, dentry in u.pkg.dsc_files.items():
2315 df.source_id = source.source_id
2317 # If the .orig tarball is already in the pool, it's
2318 # files id is stored in dsc_files by check_dsc().
2319 files_id = dentry.get("files id", None)
2321 # Find the entry in the files hash
2322 # TODO: Bail out here properly
2324 for f, e in u.pkg.files.items():
2329 if files_id is None:
2330 filename = dfentry["pool name"] + dsc_file
2332 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2333 # FIXME: needs to check for -1/-2 and or handle exception
2334 if found and obj is not None:
2335 files_id = obj.file_id
2338 # If still not found, add it
2339 if files_id is None:
2340 # HACK: Force sha1sum etc into dentry
2341 dentry["sha1sum"] = dfentry["sha1sum"]
2342 dentry["sha256sum"] = dfentry["sha256sum"]
2343 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2344 pfs.append(poolfile)
2345 files_id = poolfile.file_id
2347 poolfile = get_poolfile_by_id(files_id, session)
2348 if poolfile is None:
2349 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2350 pfs.append(poolfile)
2352 df.poolfile_id = files_id
2357 # Add the src_uploaders to the DB
2358 uploader_ids = [source.maintainer_id]
2359 if u.pkg.dsc.has_key("uploaders"):
2360 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2362 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2365 for up_id in uploader_ids:
2366 if added_ids.has_key(up_id):
2368 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2374 su.maintainer_id = up_id
2375 su.source_id = source.source_id
2380 return source, dsc_component, dsc_location_id, pfs
2382 __all__.append('add_dsc_to_db')
2385 def add_deb_to_db(u, filename, session=None):
2387 Contrary to what you might expect, this routine deals with both
2388 debs and udebs. That info is in 'dbtype', whilst 'type' is
2389 'deb' for both of them
2392 entry = u.pkg.files[filename]
2395 bin.package = entry["package"]
2396 bin.version = entry["version"]
2397 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2398 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2399 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2400 bin.binarytype = entry["dbtype"]
2403 filename = entry["pool name"] + filename
2404 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2405 if not entry.get("location id", None):
2406 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2408 if entry.get("files id", None):
2409 poolfile = get_poolfile_by_id(bin.poolfile_id)
2410 bin.poolfile_id = entry["files id"]
2412 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2413 bin.poolfile_id = entry["files id"] = poolfile.file_id
2416 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2417 if len(bin_sources) != 1:
2418 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2419 (bin.package, bin.version, entry["architecture"],
2420 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2422 bin.source_id = bin_sources[0].source_id
2424 # Add and flush object so it has an ID
2428 # Add BinAssociations
2429 for suite_name in u.pkg.changes["distribution"].keys():
2430 ba = BinAssociation()
2431 ba.binary_id = bin.binary_id
2432 ba.suite_id = get_suite(suite_name).suite_id
2437 # Deal with contents - disabled for now
2438 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2440 # print "REJECT\nCould not determine contents of package %s" % bin.package
2441 # session.rollback()
2442 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2446 __all__.append('add_deb_to_db')
2448 ################################################################################
2450 class SourceACL(object):
2451 def __init__(self, *args, **kwargs):
2455 return '<SourceACL %s>' % self.source_acl_id
2457 __all__.append('SourceACL')
2459 ################################################################################
2461 class SrcAssociation(object):
2462 def __init__(self, *args, **kwargs):
2466 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2468 __all__.append('SrcAssociation')
2470 ################################################################################
2472 class SrcFormat(object):
2473 def __init__(self, *args, **kwargs):
2477 return '<SrcFormat %s>' % (self.format_name)
2479 __all__.append('SrcFormat')
2481 ################################################################################
2483 class SrcUploader(object):
2484 def __init__(self, *args, **kwargs):
2488 return '<SrcUploader %s>' % self.uploader_id
2490 __all__.append('SrcUploader')
2492 ################################################################################
2494 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2495 ('SuiteID', 'suite_id'),
2496 ('Version', 'version'),
2497 ('Origin', 'origin'),
2499 ('Description', 'description'),
2500 ('Untouchable', 'untouchable'),
2501 ('Announce', 'announce'),
2502 ('Codename', 'codename'),
2503 ('OverrideCodename', 'overridecodename'),
2504 ('ValidTime', 'validtime'),
2505 ('Priority', 'priority'),
2506 ('NotAutomatic', 'notautomatic'),
2507 ('CopyChanges', 'copychanges'),
2508 ('OverrideSuite', 'overridesuite')]
2510 class Suite(object):
2511 def __init__(self, suite_name = None, version = None):
2512 self.suite_name = suite_name
2513 self.version = version
2516 return '<Suite %s>' % self.suite_name
2518 def __eq__(self, val):
2519 if isinstance(val, str):
2520 return (self.suite_name == val)
2521 # This signals to use the normal comparison operator
2522 return NotImplemented
2524 def __ne__(self, val):
2525 if isinstance(val, str):
2526 return (self.suite_name != val)
2527 # This signals to use the normal comparison operator
2528 return NotImplemented
2532 for disp, field in SUITE_FIELDS:
2533 val = getattr(self, field, None)
2535 ret.append("%s: %s" % (disp, val))
2537 return "\n".join(ret)
2539 def get_architectures(self, skipsrc=False, skipall=False):
2541 Returns list of Architecture objects
2543 @type skipsrc: boolean
2544 @param skipsrc: Whether to skip returning the 'source' architecture entry
2547 @type skipall: boolean
2548 @param skipall: Whether to skip returning the 'all' architecture entry
2552 @return: list of Architecture objects for the given name (may be empty)
2555 q = object_session(self).query(Architecture). \
2556 filter(Architecture.suites.contains(self))
2558 q = q.filter(Architecture.arch_string != 'source')
2560 q = q.filter(Architecture.arch_string != 'all')
2561 return q.order_by(Architecture.arch_string).all()
2563 __all__.append('Suite')
2566 def get_suite(suite, session=None):
2568 Returns Suite object for given C{suite name}.
2571 @param suite: The name of the suite
2573 @type session: Session
2574 @param session: Optional SQLA session object (a temporary one will be
2575 generated if not supplied)
2578 @return: Suite object for the requested suite name (None if not present)
2581 q = session.query(Suite).filter_by(suite_name=suite)
2585 except NoResultFound:
2588 __all__.append('get_suite')
2590 ################################################################################
2592 # TODO: should be removed because the implementation is too trivial
2594 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2596 Returns list of Architecture objects for given C{suite} name
2599 @param suite: Suite name to search for
2601 @type skipsrc: boolean
2602 @param skipsrc: Whether to skip returning the 'source' architecture entry
2605 @type skipall: boolean
2606 @param skipall: Whether to skip returning the 'all' architecture entry
2609 @type session: Session
2610 @param session: Optional SQL session object (a temporary one will be
2611 generated if not supplied)
2614 @return: list of Architecture objects for the given name (may be empty)
2617 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2619 __all__.append('get_suite_architectures')
2621 ################################################################################
2623 class SuiteSrcFormat(object):
2624 def __init__(self, *args, **kwargs):
2628 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2630 __all__.append('SuiteSrcFormat')
2633 def get_suite_src_formats(suite, session=None):
2635 Returns list of allowed SrcFormat for C{suite}.
2638 @param suite: Suite name to search for
2640 @type session: Session
2641 @param session: Optional SQL session object (a temporary one will be
2642 generated if not supplied)
2645 @return: the list of allowed source formats for I{suite}
2648 q = session.query(SrcFormat)
2649 q = q.join(SuiteSrcFormat)
2650 q = q.join(Suite).filter_by(suite_name=suite)
2651 q = q.order_by('format_name')
2655 __all__.append('get_suite_src_formats')
2657 ################################################################################
2660 def __init__(self, uid = None, name = None):
2664 def __eq__(self, val):
2665 if isinstance(val, str):
2666 return (self.uid == val)
2667 # This signals to use the normal comparison operator
2668 return NotImplemented
2670 def __ne__(self, val):
2671 if isinstance(val, str):
2672 return (self.uid != val)
2673 # This signals to use the normal comparison operator
2674 return NotImplemented
2677 return '<Uid %s (%s)>' % (self.uid, self.name)
2679 __all__.append('Uid')
2682 def get_or_set_uid(uidname, session=None):
2684 Returns uid object for given uidname.
2686 If no matching uidname is found, a row is inserted.
2688 @type uidname: string
2689 @param uidname: The uid to add
2691 @type session: SQLAlchemy
2692 @param session: Optional SQL session object (a temporary one will be
2693 generated if not supplied). If not passed, a commit will be performed at
2694 the end of the function, otherwise the caller is responsible for commiting.
2697 @return: the uid object for the given uidname
2700 q = session.query(Uid).filter_by(uid=uidname)
2704 except NoResultFound:
2708 session.commit_or_flush()
2713 __all__.append('get_or_set_uid')
2716 def get_uid_from_fingerprint(fpr, session=None):
2717 q = session.query(Uid)
2718 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2722 except NoResultFound:
2725 __all__.append('get_uid_from_fingerprint')
2727 ################################################################################
2729 class UploadBlock(object):
2730 def __init__(self, *args, **kwargs):
2734 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2736 __all__.append('UploadBlock')
2738 ################################################################################
2740 class DBConn(object):
2742 database module init.
2746 def __init__(self, *args, **kwargs):
2747 self.__dict__ = self.__shared_state
2749 if not getattr(self, 'initialised', False):
2750 self.initialised = True
2751 self.debug = kwargs.has_key('debug')
2754 def __setuptables(self):
2755 tables_with_primary = (
2766 'changes_pending_binaries',
2767 'changes_pending_files',
2768 'changes_pending_source',
2778 'pending_bin_contents',
2790 # The following tables have primary keys but sqlalchemy
2791 # version 0.5 fails to reflect them correctly with database
2792 # versions before upgrade #41.
2794 #'build_queue_files',
2797 tables_no_primary = (
2799 'changes_pending_files_map',
2800 'changes_pending_source_files',
2801 'changes_pool_files',
2804 'suite_architectures',
2805 'suite_src_formats',
2806 'suite_build_queue_copy',
2808 # see the comment above
2810 'build_queue_files',
2814 'almost_obsolete_all_associations',
2815 'almost_obsolete_src_associations',
2816 'any_associations_source',
2817 'bin_assoc_by_arch',
2818 'bin_associations_binaries',
2819 'binaries_suite_arch',
2820 'binfiles_suite_component_arch',
2823 'newest_all_associations',
2824 'newest_any_associations',
2826 'newest_src_association',
2827 'obsolete_all_associations',
2828 'obsolete_any_associations',
2829 'obsolete_any_by_all_associations',
2830 'obsolete_src_associations',
2832 'src_associations_bin',
2833 'src_associations_src',
2834 'suite_arch_by_name',
2837 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2838 # correctly and that is why we have to use a workaround. It can
2839 # be removed as soon as we switch to version 0.6.
2840 for table_name in tables_with_primary:
2841 table = Table(table_name, self.db_meta, \
2842 Column('id', Integer, primary_key = True), \
2843 autoload=True, useexisting=True)
2844 setattr(self, 'tbl_%s' % table_name, table)
2846 for table_name in tables_no_primary:
2847 table = Table(table_name, self.db_meta, autoload=True)
2848 setattr(self, 'tbl_%s' % table_name, table)
2850 for view_name in views:
2851 view = Table(view_name, self.db_meta, autoload=True)
2852 setattr(self, 'view_%s' % view_name, view)
2854 def __setupmappers(self):
2855 mapper(Architecture, self.tbl_architecture,
2856 properties = dict(arch_id = self.tbl_architecture.c.id,
2857 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2858 order_by='suite_name',
2859 backref=backref('architectures', order_by='arch_string'))))
2861 mapper(Archive, self.tbl_archive,
2862 properties = dict(archive_id = self.tbl_archive.c.id,
2863 archive_name = self.tbl_archive.c.name))
2865 mapper(BinAssociation, self.tbl_bin_associations,
2866 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2867 suite_id = self.tbl_bin_associations.c.suite,
2868 suite = relation(Suite),
2869 binary_id = self.tbl_bin_associations.c.bin,
2870 binary = relation(DBBinary)))
2872 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2873 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2874 filename = self.tbl_pending_bin_contents.c.filename,
2875 package = self.tbl_pending_bin_contents.c.package,
2876 version = self.tbl_pending_bin_contents.c.version,
2877 arch = self.tbl_pending_bin_contents.c.arch,
2878 otype = self.tbl_pending_bin_contents.c.type))
2880 mapper(DebContents, self.tbl_deb_contents,
2881 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2882 package=self.tbl_deb_contents.c.package,
2883 suite=self.tbl_deb_contents.c.suite,
2884 arch=self.tbl_deb_contents.c.arch,
2885 section=self.tbl_deb_contents.c.section,
2886 filename=self.tbl_deb_contents.c.filename))
2888 mapper(UdebContents, self.tbl_udeb_contents,
2889 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2890 package=self.tbl_udeb_contents.c.package,
2891 suite=self.tbl_udeb_contents.c.suite,
2892 arch=self.tbl_udeb_contents.c.arch,
2893 section=self.tbl_udeb_contents.c.section,
2894 filename=self.tbl_udeb_contents.c.filename))
2896 mapper(BuildQueue, self.tbl_build_queue,
2897 properties = dict(queue_id = self.tbl_build_queue.c.id))
2899 mapper(BuildQueueFile, self.tbl_build_queue_files,
2900 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2901 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2903 mapper(DBBinary, self.tbl_binaries,
2904 properties = dict(binary_id = self.tbl_binaries.c.id,
2905 package = self.tbl_binaries.c.package,
2906 version = self.tbl_binaries.c.version,
2907 maintainer_id = self.tbl_binaries.c.maintainer,
2908 maintainer = relation(Maintainer),
2909 source_id = self.tbl_binaries.c.source,
2910 source = relation(DBSource),
2911 arch_id = self.tbl_binaries.c.architecture,
2912 architecture = relation(Architecture),
2913 poolfile_id = self.tbl_binaries.c.file,
2914 poolfile = relation(PoolFile),
2915 binarytype = self.tbl_binaries.c.type,
2916 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2917 fingerprint = relation(Fingerprint),
2918 install_date = self.tbl_binaries.c.install_date,
2919 binassociations = relation(BinAssociation,
2920 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2922 mapper(BinaryACL, self.tbl_binary_acl,
2923 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2925 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2926 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2927 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2928 architecture = relation(Architecture)))
2930 mapper(Component, self.tbl_component,
2931 properties = dict(component_id = self.tbl_component.c.id,
2932 component_name = self.tbl_component.c.name))
2934 mapper(DBConfig, self.tbl_config,
2935 properties = dict(config_id = self.tbl_config.c.id))
2937 mapper(DSCFile, self.tbl_dsc_files,
2938 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2939 source_id = self.tbl_dsc_files.c.source,
2940 source = relation(DBSource),
2941 poolfile_id = self.tbl_dsc_files.c.file,
2942 poolfile = relation(PoolFile)))
2944 mapper(PoolFile, self.tbl_files,
2945 properties = dict(file_id = self.tbl_files.c.id,
2946 filesize = self.tbl_files.c.size,
2947 location_id = self.tbl_files.c.location,
2948 location = relation(Location)))
2950 mapper(Fingerprint, self.tbl_fingerprint,
2951 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2952 uid_id = self.tbl_fingerprint.c.uid,
2953 uid = relation(Uid),
2954 keyring_id = self.tbl_fingerprint.c.keyring,
2955 keyring = relation(Keyring),
2956 source_acl = relation(SourceACL),
2957 binary_acl = relation(BinaryACL)))
2959 mapper(Keyring, self.tbl_keyrings,
2960 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2961 keyring_id = self.tbl_keyrings.c.id))
2963 mapper(DBChange, self.tbl_changes,
2964 properties = dict(change_id = self.tbl_changes.c.id,
2965 poolfiles = relation(PoolFile,
2966 secondary=self.tbl_changes_pool_files,
2967 backref="changeslinks"),
2968 seen = self.tbl_changes.c.seen,
2969 source = self.tbl_changes.c.source,
2970 binaries = self.tbl_changes.c.binaries,
2971 architecture = self.tbl_changes.c.architecture,
2972 distribution = self.tbl_changes.c.distribution,
2973 urgency = self.tbl_changes.c.urgency,
2974 maintainer = self.tbl_changes.c.maintainer,
2975 changedby = self.tbl_changes.c.changedby,
2976 date = self.tbl_changes.c.date,
2977 version = self.tbl_changes.c.version,
2978 files = relation(ChangePendingFile,
2979 secondary=self.tbl_changes_pending_files_map,
2980 backref="changesfile"),
2981 in_queue_id = self.tbl_changes.c.in_queue,
2982 in_queue = relation(PolicyQueue,
2983 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2984 approved_for_id = self.tbl_changes.c.approved_for))
2986 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2987 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2989 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2990 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2991 filename = self.tbl_changes_pending_files.c.filename,
2992 size = self.tbl_changes_pending_files.c.size,
2993 md5sum = self.tbl_changes_pending_files.c.md5sum,
2994 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2995 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2997 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2998 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2999 change = relation(DBChange),
3000 maintainer = relation(Maintainer,
3001 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3002 changedby = relation(Maintainer,
3003 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3004 fingerprint = relation(Fingerprint),
3005 source_files = relation(ChangePendingFile,
3006 secondary=self.tbl_changes_pending_source_files,
3007 backref="pending_sources")))
3010 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3011 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3012 keyring = relation(Keyring, backref="keyring_acl_map"),
3013 architecture = relation(Architecture)))
3015 mapper(Location, self.tbl_location,
3016 properties = dict(location_id = self.tbl_location.c.id,
3017 component_id = self.tbl_location.c.component,
3018 component = relation(Component),
3019 archive_id = self.tbl_location.c.archive,
3020 archive = relation(Archive),
3021 archive_type = self.tbl_location.c.type))
3023 mapper(Maintainer, self.tbl_maintainer,
3024 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3025 maintains_sources = relation(DBSource, backref='maintainer',
3026 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3027 changed_sources = relation(DBSource, backref='changedby',
3028 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3030 mapper(NewComment, self.tbl_new_comments,
3031 properties = dict(comment_id = self.tbl_new_comments.c.id))
3033 mapper(Override, self.tbl_override,
3034 properties = dict(suite_id = self.tbl_override.c.suite,
3035 suite = relation(Suite),
3036 package = self.tbl_override.c.package,
3037 component_id = self.tbl_override.c.component,
3038 component = relation(Component),
3039 priority_id = self.tbl_override.c.priority,
3040 priority = relation(Priority),
3041 section_id = self.tbl_override.c.section,
3042 section = relation(Section),
3043 overridetype_id = self.tbl_override.c.type,
3044 overridetype = relation(OverrideType)))
3046 mapper(OverrideType, self.tbl_override_type,
3047 properties = dict(overridetype = self.tbl_override_type.c.type,
3048 overridetype_id = self.tbl_override_type.c.id))
3050 mapper(PolicyQueue, self.tbl_policy_queue,
3051 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3053 mapper(Priority, self.tbl_priority,
3054 properties = dict(priority_id = self.tbl_priority.c.id))
3056 mapper(Section, self.tbl_section,
3057 properties = dict(section_id = self.tbl_section.c.id,
3058 section=self.tbl_section.c.section))
3060 mapper(DBSource, self.tbl_source,
3061 properties = dict(source_id = self.tbl_source.c.id,
3062 version = self.tbl_source.c.version,
3063 maintainer_id = self.tbl_source.c.maintainer,
3064 poolfile_id = self.tbl_source.c.file,
3065 poolfile = relation(PoolFile),
3066 fingerprint_id = self.tbl_source.c.sig_fpr,
3067 fingerprint = relation(Fingerprint),
3068 changedby_id = self.tbl_source.c.changedby,
3069 srcfiles = relation(DSCFile,
3070 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3071 suites = relation(Suite, secondary=self.tbl_src_associations,
3073 srcuploaders = relation(SrcUploader)))
3075 mapper(SourceACL, self.tbl_source_acl,
3076 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3078 mapper(SrcAssociation, self.tbl_src_associations,
3079 properties = dict(sa_id = self.tbl_src_associations.c.id,
3080 suite_id = self.tbl_src_associations.c.suite,
3081 suite = relation(Suite),
3082 source_id = self.tbl_src_associations.c.source,
3083 source = relation(DBSource)))
3085 mapper(SrcFormat, self.tbl_src_format,
3086 properties = dict(src_format_id = self.tbl_src_format.c.id,
3087 format_name = self.tbl_src_format.c.format_name))
3089 mapper(SrcUploader, self.tbl_src_uploaders,
3090 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3091 source_id = self.tbl_src_uploaders.c.source,
3092 source = relation(DBSource,
3093 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3094 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3095 maintainer = relation(Maintainer,
3096 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3098 mapper(Suite, self.tbl_suite,
3099 properties = dict(suite_id = self.tbl_suite.c.id,
3100 policy_queue = relation(PolicyQueue),
3101 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3103 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3104 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3105 suite = relation(Suite, backref='suitesrcformats'),
3106 src_format_id = self.tbl_suite_src_formats.c.src_format,
3107 src_format = relation(SrcFormat)))
3109 mapper(Uid, self.tbl_uid,
3110 properties = dict(uid_id = self.tbl_uid.c.id,
3111 fingerprint = relation(Fingerprint)))
3113 mapper(UploadBlock, self.tbl_upload_blocks,
3114 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3115 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3116 uid = relation(Uid, backref="uploadblocks")))
3118 ## Connection functions
3119 def __createconn(self):
3120 from config import Config
3124 connstr = "postgres://%s" % cnf["DB::Host"]
3125 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3126 connstr += ":%s" % cnf["DB::Port"]
3127 connstr += "/%s" % cnf["DB::Name"]
3130 connstr = "postgres:///%s" % cnf["DB::Name"]
3131 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3132 connstr += "?port=%s" % cnf["DB::Port"]
3134 self.db_pg = create_engine(connstr, echo=self.debug)
3135 self.db_meta = MetaData()
3136 self.db_meta.bind = self.db_pg
3137 self.db_smaker = sessionmaker(bind=self.db_pg,
3141 self.__setuptables()
3142 self.__setupmappers()
3145 return self.db_smaker()
3147 __all__.append('DBConn')