5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 # suppress some deprecation warnings in squeeze related to sqlalchemy
64 warnings.filterwarnings('ignore', \
65 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
67 # TODO: sqlalchemy needs some extra configuration to correctly reflect
68 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
69 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
72 ################################################################################
74 # Patch in support for the debversion field type so that it works during
78 # that is for sqlalchemy 0.6
79 UserDefinedType = sqltypes.UserDefinedType
81 # this one for sqlalchemy 0.5
82 UserDefinedType = sqltypes.TypeEngine
84 class DebVersion(UserDefinedType):
85 def get_col_spec(self):
88 def bind_processor(self, dialect):
91 # ' = None' is needed for sqlalchemy 0.5:
92 def result_processor(self, dialect, coltype = None):
95 sa_major_version = sqlalchemy.__version__[0:3]
96 if sa_major_version in ["0.5", "0.6"]:
97 from sqlalchemy.databases import postgres
98 postgres.ischema_names['debversion'] = DebVersion
100 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
102 ################################################################################
104 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
106 ################################################################################
108 def session_wrapper(fn):
110 Wrapper around common ".., session=None):" handling. If the wrapped
111 function is called without passing 'session', we create a local one
112 and destroy it when the function ends.
114 Also attaches a commit_or_flush method to the session; if we created a
115 local session, this is a synonym for session.commit(), otherwise it is a
116 synonym for session.flush().
119 def wrapped(*args, **kwargs):
120 private_transaction = False
122 # Find the session object
123 session = kwargs.get('session')
126 if len(args) <= len(getargspec(fn)[0]) - 1:
127 # No session specified as last argument or in kwargs
128 private_transaction = True
129 session = kwargs['session'] = DBConn().session()
131 # Session is last argument in args
135 session = args[-1] = DBConn().session()
136 private_transaction = True
138 if private_transaction:
139 session.commit_or_flush = session.commit
141 session.commit_or_flush = session.flush
144 return fn(*args, **kwargs)
146 if private_transaction:
147 # We created a session; close it.
150 wrapped.__doc__ = fn.__doc__
151 wrapped.func_name = fn.func_name
155 __all__.append('session_wrapper')
157 ################################################################################
159 class Architecture(object):
160 def __init__(self, arch_string = None, description = None):
161 self.arch_string = arch_string
162 self.description = description
164 def __eq__(self, val):
165 if isinstance(val, str):
166 return (self.arch_string== val)
167 # This signals to use the normal comparison operator
168 return NotImplemented
170 def __ne__(self, val):
171 if isinstance(val, str):
172 return (self.arch_string != val)
173 # This signals to use the normal comparison operator
174 return NotImplemented
177 return '<Architecture %s>' % self.arch_string
179 __all__.append('Architecture')
182 def get_architecture(architecture, session=None):
184 Returns database id for given C{architecture}.
186 @type architecture: string
187 @param architecture: The name of the architecture
189 @type session: Session
190 @param session: Optional SQLA session object (a temporary one will be
191 generated if not supplied)
194 @return: Architecture object for the given arch (None if not present)
197 q = session.query(Architecture).filter_by(arch_string=architecture)
201 except NoResultFound:
204 __all__.append('get_architecture')
206 # TODO: should be removed because the implementation is too trivial
208 def get_architecture_suites(architecture, session=None):
210 Returns list of Suite objects for given C{architecture} name
212 @type architecture: str
213 @param architecture: Architecture name to search for
215 @type session: Session
216 @param session: Optional SQL session object (a temporary one will be
217 generated if not supplied)
220 @return: list of Suite objects for the given name (may be empty)
223 return get_architecture(architecture, session).suites
225 __all__.append('get_architecture_suites')
227 ################################################################################
229 class Archive(object):
230 def __init__(self, *args, **kwargs):
234 return '<Archive %s>' % self.archive_name
236 __all__.append('Archive')
239 def get_archive(archive, session=None):
241 returns database id for given C{archive}.
243 @type archive: string
244 @param archive: the name of the arhive
246 @type session: Session
247 @param session: Optional SQLA session object (a temporary one will be
248 generated if not supplied)
251 @return: Archive object for the given name (None if not present)
254 archive = archive.lower()
256 q = session.query(Archive).filter_by(archive_name=archive)
260 except NoResultFound:
263 __all__.append('get_archive')
265 ################################################################################
267 class BinAssociation(object):
268 def __init__(self, *args, **kwargs):
272 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
274 __all__.append('BinAssociation')
276 ################################################################################
278 class BinContents(object):
279 def __init__(self, *args, **kwargs):
283 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
285 __all__.append('BinContents')
287 ################################################################################
289 class DBBinary(object):
290 def __init__(self, *args, **kwargs):
294 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
296 __all__.append('DBBinary')
299 def get_suites_binary_in(package, session=None):
301 Returns list of Suite objects which given C{package} name is in
304 @param package: DBBinary package name to search for
307 @return: list of Suite objects for the given package
310 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
312 __all__.append('get_suites_binary_in')
315 def get_binary_from_id(binary_id, session=None):
317 Returns DBBinary object for given C{id}
320 @param binary_id: Id of the required binary
322 @type session: Session
323 @param session: Optional SQLA session object (a temporary one will be
324 generated if not supplied)
327 @return: DBBinary object for the given binary (None if not present)
330 q = session.query(DBBinary).filter_by(binary_id=binary_id)
334 except NoResultFound:
337 __all__.append('get_binary_from_id')
340 def get_binaries_from_name(package, version=None, architecture=None, session=None):
342 Returns list of DBBinary objects for given C{package} name
345 @param package: DBBinary package name to search for
347 @type version: str or None
348 @param version: Version to search for (or None)
350 @type architecture: str, list or None
351 @param architecture: Architectures to limit to (or None if no limit)
353 @type session: Session
354 @param session: Optional SQL session object (a temporary one will be
355 generated if not supplied)
358 @return: list of DBBinary objects for the given name (may be empty)
361 q = session.query(DBBinary).filter_by(package=package)
363 if version is not None:
364 q = q.filter_by(version=version)
366 if architecture is not None:
367 if not isinstance(architecture, list):
368 architecture = [architecture]
369 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
375 __all__.append('get_binaries_from_name')
378 def get_binaries_from_source_id(source_id, session=None):
380 Returns list of DBBinary objects for given C{source_id}
383 @param source_id: source_id to search for
385 @type session: Session
386 @param session: Optional SQL session object (a temporary one will be
387 generated if not supplied)
390 @return: list of DBBinary objects for the given name (may be empty)
393 return session.query(DBBinary).filter_by(source_id=source_id).all()
395 __all__.append('get_binaries_from_source_id')
398 def get_binary_from_name_suite(package, suitename, session=None):
399 ### For dak examine-package
400 ### XXX: Doesn't use object API yet
402 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
403 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
404 WHERE b.package='%(package)s'
406 AND fi.location = l.id
407 AND l.component = c.id
410 AND su.suite_name %(suitename)s
411 ORDER BY b.version DESC"""
413 return session.execute(sql % {'package': package, 'suitename': suitename})
415 __all__.append('get_binary_from_name_suite')
418 def get_binary_components(package, suitename, arch, session=None):
419 # Check for packages that have moved from one component to another
420 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
421 WHERE b.package=:package AND s.suite_name=:suitename
422 AND (a.arch_string = :arch OR a.arch_string = 'all')
423 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
424 AND f.location = l.id
425 AND l.component = c.id
428 vals = {'package': package, 'suitename': suitename, 'arch': arch}
430 return session.execute(query, vals)
432 __all__.append('get_binary_components')
434 ################################################################################
436 class BinaryACL(object):
437 def __init__(self, *args, **kwargs):
441 return '<BinaryACL %s>' % self.binary_acl_id
443 __all__.append('BinaryACL')
445 ################################################################################
447 class BinaryACLMap(object):
448 def __init__(self, *args, **kwargs):
452 return '<BinaryACLMap %s>' % self.binary_acl_map_id
454 __all__.append('BinaryACLMap')
456 ################################################################################
461 ArchiveDir "%(archivepath)s";
462 OverrideDir "%(overridedir)s";
463 CacheDir "%(cachedir)s";
468 Packages::Compress ". bzip2 gzip";
469 Sources::Compress ". bzip2 gzip";
474 bindirectory "incoming"
479 BinOverride "override.sid.all3";
480 BinCacheDB "packages-accepted.db";
482 FileList "%(filelist)s";
485 Packages::Extensions ".deb .udeb";
488 bindirectory "incoming/"
491 BinOverride "override.sid.all3";
492 SrcOverride "override.sid.all3.src";
493 FileList "%(filelist)s";
497 class BuildQueue(object):
498 def __init__(self, *args, **kwargs):
502 return '<BuildQueue %s>' % self.queue_name
504 def write_metadata(self, starttime, force=False):
505 # Do we write out metafiles?
506 if not (force or self.generate_metadata):
509 session = DBConn().session().object_session(self)
511 fl_fd = fl_name = ac_fd = ac_name = None
513 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
514 startdir = os.getcwd()
517 # Grab files we want to include
518 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
519 # Write file list with newer files
520 (fl_fd, fl_name) = mkstemp()
522 os.write(fl_fd, '%s\n' % n.fullpath)
527 # Write minimal apt.conf
528 # TODO: Remove hardcoding from template
529 (ac_fd, ac_name) = mkstemp()
530 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
532 'cachedir': cnf["Dir::Cache"],
533 'overridedir': cnf["Dir::Override"],
537 # Run apt-ftparchive generate
538 os.chdir(os.path.dirname(ac_name))
539 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
541 # Run apt-ftparchive release
542 # TODO: Eww - fix this
543 bname = os.path.basename(self.path)
547 # We have to remove the Release file otherwise it'll be included in the
550 os.unlink(os.path.join(bname, 'Release'))
554 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
556 # Crude hack with open and append, but this whole section is and should be redone.
557 if self.notautomatic:
558 release=open("Release", "a")
559 release.write("NotAutomatic: yes")
564 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
565 if cnf.has_key("Dinstall::SigningPubKeyring"):
566 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
568 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
570 # Move the files if we got this far
571 os.rename('Release', os.path.join(bname, 'Release'))
573 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
575 # Clean up any left behind files
602 def clean_and_update(self, starttime, Logger, dryrun=False):
603 """WARNING: This routine commits for you"""
604 session = DBConn().session().object_session(self)
606 if self.generate_metadata and not dryrun:
607 self.write_metadata(starttime)
609 # Grab files older than our execution time
610 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
616 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
618 Logger.log(["I: Removing %s from the queue" % o.fullpath])
619 os.unlink(o.fullpath)
622 # If it wasn't there, don't worry
623 if e.errno == ENOENT:
626 # TODO: Replace with proper logging call
627 Logger.log(["E: Could not remove %s" % o.fullpath])
634 for f in os.listdir(self.path):
635 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
639 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
640 except NoResultFound:
641 fp = os.path.join(self.path, f)
643 Logger.log(["I: Would remove unused link %s" % fp])
645 Logger.log(["I: Removing unused link %s" % fp])
649 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
651 def add_file_from_pool(self, poolfile):
652 """Copies a file into the pool. Assumes that the PoolFile object is
653 attached to the same SQLAlchemy session as the Queue object is.
655 The caller is responsible for committing after calling this function."""
656 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
658 # Check if we have a file of this name or this ID already
659 for f in self.queuefiles:
660 if f.fileid is not None and f.fileid == poolfile.file_id or \
661 f.poolfile.filename == poolfile_basename:
662 # In this case, update the BuildQueueFile entry so we
663 # don't remove it too early
664 f.lastused = datetime.now()
665 DBConn().session().object_session(poolfile).add(f)
668 # Prepare BuildQueueFile object
669 qf = BuildQueueFile()
670 qf.build_queue_id = self.queue_id
671 qf.lastused = datetime.now()
672 qf.filename = poolfile_basename
674 targetpath = poolfile.fullpath
675 queuepath = os.path.join(self.path, poolfile_basename)
679 # We need to copy instead of symlink
681 utils.copy(targetpath, queuepath)
682 # NULL in the fileid field implies a copy
685 os.symlink(targetpath, queuepath)
686 qf.fileid = poolfile.file_id
690 # Get the same session as the PoolFile is using and add the qf to it
691 DBConn().session().object_session(poolfile).add(qf)
696 __all__.append('BuildQueue')
699 def get_build_queue(queuename, session=None):
701 Returns BuildQueue object for given C{queue name}, creating it if it does not
704 @type queuename: string
705 @param queuename: The name of the queue
707 @type session: Session
708 @param session: Optional SQLA session object (a temporary one will be
709 generated if not supplied)
712 @return: BuildQueue object for the given queue
715 q = session.query(BuildQueue).filter_by(queue_name=queuename)
719 except NoResultFound:
722 __all__.append('get_build_queue')
724 ################################################################################
726 class BuildQueueFile(object):
727 def __init__(self, *args, **kwargs):
731 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
735 return os.path.join(self.buildqueue.path, self.filename)
738 __all__.append('BuildQueueFile')
740 ################################################################################
742 class ChangePendingBinary(object):
743 def __init__(self, *args, **kwargs):
747 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
749 __all__.append('ChangePendingBinary')
751 ################################################################################
753 class ChangePendingFile(object):
754 def __init__(self, *args, **kwargs):
758 return '<ChangePendingFile %s>' % self.change_pending_file_id
760 __all__.append('ChangePendingFile')
762 ################################################################################
764 class ChangePendingSource(object):
765 def __init__(self, *args, **kwargs):
769 return '<ChangePendingSource %s>' % self.change_pending_source_id
771 __all__.append('ChangePendingSource')
773 ################################################################################
775 class Component(object):
776 def __init__(self, *args, **kwargs):
779 def __eq__(self, val):
780 if isinstance(val, str):
781 return (self.component_name == val)
782 # This signals to use the normal comparison operator
783 return NotImplemented
785 def __ne__(self, val):
786 if isinstance(val, str):
787 return (self.component_name != val)
788 # This signals to use the normal comparison operator
789 return NotImplemented
792 return '<Component %s>' % self.component_name
795 __all__.append('Component')
798 def get_component(component, session=None):
800 Returns database id for given C{component}.
802 @type component: string
803 @param component: The name of the override type
806 @return: the database id for the given component
809 component = component.lower()
811 q = session.query(Component).filter_by(component_name=component)
815 except NoResultFound:
818 __all__.append('get_component')
820 ################################################################################
822 class DBConfig(object):
823 def __init__(self, *args, **kwargs):
827 return '<DBConfig %s>' % self.name
829 __all__.append('DBConfig')
831 ################################################################################
834 def get_or_set_contents_file_id(filename, session=None):
836 Returns database id for given filename.
838 If no matching file is found, a row is inserted.
840 @type filename: string
841 @param filename: The filename
842 @type session: SQLAlchemy
843 @param session: Optional SQL session object (a temporary one will be
844 generated if not supplied). If not passed, a commit will be performed at
845 the end of the function, otherwise the caller is responsible for commiting.
848 @return: the database id for the given component
851 q = session.query(ContentFilename).filter_by(filename=filename)
854 ret = q.one().cafilename_id
855 except NoResultFound:
856 cf = ContentFilename()
857 cf.filename = filename
859 session.commit_or_flush()
860 ret = cf.cafilename_id
864 __all__.append('get_or_set_contents_file_id')
867 def get_contents(suite, overridetype, section=None, session=None):
869 Returns contents for a suite / overridetype combination, limiting
870 to a section if not None.
873 @param suite: Suite object
875 @type overridetype: OverrideType
876 @param overridetype: OverrideType object
878 @type section: Section
879 @param section: Optional section object to limit results to
881 @type session: SQLAlchemy
882 @param session: Optional SQL session object (a temporary one will be
883 generated if not supplied)
886 @return: ResultsProxy object set up to return tuples of (filename, section,
890 # find me all of the contents for a given suite
891 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
895 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
896 JOIN content_file_names n ON (c.filename=n.id)
897 JOIN binaries b ON (b.id=c.binary_pkg)
898 JOIN override o ON (o.package=b.package)
899 JOIN section s ON (s.id=o.section)
900 WHERE o.suite = :suiteid AND o.type = :overridetypeid
901 AND b.type=:overridetypename"""
903 vals = {'suiteid': suite.suite_id,
904 'overridetypeid': overridetype.overridetype_id,
905 'overridetypename': overridetype.overridetype}
907 if section is not None:
908 contents_q += " AND s.id = :sectionid"
909 vals['sectionid'] = section.section_id
911 contents_q += " ORDER BY fn"
913 return session.execute(contents_q, vals)
915 __all__.append('get_contents')
917 ################################################################################
919 class ContentFilepath(object):
920 def __init__(self, *args, **kwargs):
924 return '<ContentFilepath %s>' % self.filepath
926 __all__.append('ContentFilepath')
929 def get_or_set_contents_path_id(filepath, session=None):
931 Returns database id for given path.
933 If no matching file is found, a row is inserted.
935 @type filepath: string
936 @param filepath: The filepath
938 @type session: SQLAlchemy
939 @param session: Optional SQL session object (a temporary one will be
940 generated if not supplied). If not passed, a commit will be performed at
941 the end of the function, otherwise the caller is responsible for commiting.
944 @return: the database id for the given path
947 q = session.query(ContentFilepath).filter_by(filepath=filepath)
950 ret = q.one().cafilepath_id
951 except NoResultFound:
952 cf = ContentFilepath()
953 cf.filepath = filepath
955 session.commit_or_flush()
956 ret = cf.cafilepath_id
960 __all__.append('get_or_set_contents_path_id')
962 ################################################################################
964 class ContentAssociation(object):
965 def __init__(self, *args, **kwargs):
969 return '<ContentAssociation %s>' % self.ca_id
971 __all__.append('ContentAssociation')
973 def insert_content_paths(binary_id, fullpaths, session=None):
975 Make sure given path is associated with given binary id
978 @param binary_id: the id of the binary
979 @type fullpaths: list
980 @param fullpaths: the list of paths of the file being associated with the binary
981 @type session: SQLAlchemy session
982 @param session: Optional SQLAlchemy session. If this is passed, the caller
983 is responsible for ensuring a transaction has begun and committing the
984 results or rolling back based on the result code. If not passed, a commit
985 will be performed at the end of the function, otherwise the caller is
986 responsible for commiting.
988 @return: True upon success
993 session = DBConn().session()
998 def generate_path_dicts():
999 for fullpath in fullpaths:
1000 if fullpath.startswith( './' ):
1001 fullpath = fullpath[2:]
1003 yield {'filename':fullpath, 'id': binary_id }
1005 for d in generate_path_dicts():
1006 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1015 traceback.print_exc()
1017 # Only rollback if we set up the session ourself
1024 __all__.append('insert_content_paths')
1026 ################################################################################
1028 class DSCFile(object):
1029 def __init__(self, *args, **kwargs):
1033 return '<DSCFile %s>' % self.dscfile_id
1035 __all__.append('DSCFile')
1038 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1040 Returns a list of DSCFiles which may be empty
1042 @type dscfile_id: int (optional)
1043 @param dscfile_id: the dscfile_id of the DSCFiles to find
1045 @type source_id: int (optional)
1046 @param source_id: the source id related to the DSCFiles to find
1048 @type poolfile_id: int (optional)
1049 @param poolfile_id: the poolfile id related to the DSCFiles to find
1052 @return: Possibly empty list of DSCFiles
1055 q = session.query(DSCFile)
1057 if dscfile_id is not None:
1058 q = q.filter_by(dscfile_id=dscfile_id)
1060 if source_id is not None:
1061 q = q.filter_by(source_id=source_id)
1063 if poolfile_id is not None:
1064 q = q.filter_by(poolfile_id=poolfile_id)
1068 __all__.append('get_dscfiles')
1070 ################################################################################
1072 class PoolFile(object):
1073 def __init__(self, *args, **kwargs):
1077 return '<PoolFile %s>' % self.filename
1081 return os.path.join(self.location.path, self.filename)
1083 __all__.append('PoolFile')
1086 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1089 (ValidFileFound [boolean or None], PoolFile object or None)
1091 @type filename: string
1092 @param filename: the filename of the file to check against the DB
1095 @param filesize: the size of the file to check against the DB
1097 @type md5sum: string
1098 @param md5sum: the md5sum of the file to check against the DB
1100 @type location_id: int
1101 @param location_id: the id of the location to look in
1104 @return: Tuple of length 2.
1105 - If more than one file found with that name: (C{None}, C{None})
1106 - If valid pool file found: (C{True}, C{PoolFile object})
1107 - If valid pool file not found:
1108 - (C{False}, C{None}) if no file found
1109 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1112 q = session.query(PoolFile).filter_by(filename=filename)
1113 q = q.join(Location).filter_by(location_id=location_id)
1123 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1131 __all__.append('check_poolfile')
1134 def get_poolfile_by_id(file_id, session=None):
1136 Returns a PoolFile objects or None for the given id
1139 @param file_id: the id of the file to look for
1141 @rtype: PoolFile or None
1142 @return: either the PoolFile object or None
1145 q = session.query(PoolFile).filter_by(file_id=file_id)
1149 except NoResultFound:
1152 __all__.append('get_poolfile_by_id')
1156 def get_poolfile_by_name(filename, location_id=None, session=None):
1158 Returns an array of PoolFile objects for the given filename and
1159 (optionally) location_id
1161 @type filename: string
1162 @param filename: the filename of the file to check against the DB
1164 @type location_id: int
1165 @param location_id: the id of the location to look in (optional)
1168 @return: array of PoolFile objects
1171 q = session.query(PoolFile).filter_by(filename=filename)
1173 if location_id is not None:
1174 q = q.join(Location).filter_by(location_id=location_id)
1178 __all__.append('get_poolfile_by_name')
1181 def get_poolfile_like_name(filename, session=None):
1183 Returns an array of PoolFile objects which are like the given name
1185 @type filename: string
1186 @param filename: the filename of the file to check against the DB
1189 @return: array of PoolFile objects
1192 # TODO: There must be a way of properly using bind parameters with %FOO%
1193 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1197 __all__.append('get_poolfile_like_name')
1200 def add_poolfile(filename, datadict, location_id, session=None):
1202 Add a new file to the pool
1204 @type filename: string
1205 @param filename: filename
1207 @type datadict: dict
1208 @param datadict: dict with needed data
1210 @type location_id: int
1211 @param location_id: database id of the location
1214 @return: the PoolFile object created
1216 poolfile = PoolFile()
1217 poolfile.filename = filename
1218 poolfile.filesize = datadict["size"]
1219 poolfile.md5sum = datadict["md5sum"]
1220 poolfile.sha1sum = datadict["sha1sum"]
1221 poolfile.sha256sum = datadict["sha256sum"]
1222 poolfile.location_id = location_id
1224 session.add(poolfile)
1225 # Flush to get a file id (NB: This is not a commit)
1230 __all__.append('add_poolfile')
1232 ################################################################################
1234 class Fingerprint(object):
1235 def __init__(self, fingerprint = None):
1236 self.fingerprint = fingerprint
1239 return '<Fingerprint %s>' % self.fingerprint
1241 __all__.append('Fingerprint')
1244 def get_fingerprint(fpr, session=None):
1246 Returns Fingerprint object for given fpr.
1249 @param fpr: The fpr to find / add
1251 @type session: SQLAlchemy
1252 @param session: Optional SQL session object (a temporary one will be
1253 generated if not supplied).
1256 @return: the Fingerprint object for the given fpr or None
1259 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1263 except NoResultFound:
1268 __all__.append('get_fingerprint')
1271 def get_or_set_fingerprint(fpr, session=None):
1273 Returns Fingerprint object for given fpr.
1275 If no matching fpr is found, a row is inserted.
1278 @param fpr: The fpr to find / add
1280 @type session: SQLAlchemy
1281 @param session: Optional SQL session object (a temporary one will be
1282 generated if not supplied). If not passed, a commit will be performed at
1283 the end of the function, otherwise the caller is responsible for commiting.
1284 A flush will be performed either way.
1287 @return: the Fingerprint object for the given fpr
1290 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1294 except NoResultFound:
1295 fingerprint = Fingerprint()
1296 fingerprint.fingerprint = fpr
1297 session.add(fingerprint)
1298 session.commit_or_flush()
1303 __all__.append('get_or_set_fingerprint')
1305 ################################################################################
1307 # Helper routine for Keyring class
1308 def get_ldap_name(entry):
1310 for k in ["cn", "mn", "sn"]:
1312 if ret and ret[0] != "" and ret[0] != "-":
1314 return " ".join(name)
1316 ################################################################################
1318 class Keyring(object):
1319 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1320 " --with-colons --fingerprint --fingerprint"
1325 def __init__(self, *args, **kwargs):
1329 return '<Keyring %s>' % self.keyring_name
1331 def de_escape_gpg_str(self, txt):
1332 esclist = re.split(r'(\\x..)', txt)
1333 for x in range(1,len(esclist),2):
1334 esclist[x] = "%c" % (int(esclist[x][2:],16))
1335 return "".join(esclist)
1337 def parse_address(self, uid):
1338 """parses uid and returns a tuple of real name and email address"""
1340 (name, address) = email.Utils.parseaddr(uid)
1341 name = re.sub(r"\s*[(].*[)]", "", name)
1342 name = self.de_escape_gpg_str(name)
1345 return (name, address)
1347 def load_keys(self, keyring):
1348 if not self.keyring_id:
1349 raise Exception('Must be initialized with database information')
1351 k = os.popen(self.gpg_invocation % keyring, "r")
1355 for line in k.xreadlines():
1356 field = line.split(":")
1357 if field[0] == "pub":
1360 (name, addr) = self.parse_address(field[9])
1362 self.keys[key]["email"] = addr
1363 self.keys[key]["name"] = name
1364 self.keys[key]["fingerprints"] = []
1366 elif key and field[0] == "sub" and len(field) >= 12:
1367 signingkey = ("s" in field[11])
1368 elif key and field[0] == "uid":
1369 (name, addr) = self.parse_address(field[9])
1370 if "email" not in self.keys[key] and "@" in addr:
1371 self.keys[key]["email"] = addr
1372 self.keys[key]["name"] = name
1373 elif signingkey and field[0] == "fpr":
1374 self.keys[key]["fingerprints"].append(field[9])
1375 self.fpr_lookup[field[9]] = key
1377 def import_users_from_ldap(self, session):
1381 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1382 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1384 l = ldap.open(LDAPServer)
1385 l.simple_bind_s("","")
1386 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1387 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1388 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1390 ldap_fin_uid_id = {}
1397 uid = entry["uid"][0]
1398 name = get_ldap_name(entry)
1399 fingerprints = entry["keyFingerPrint"]
1401 for f in fingerprints:
1402 key = self.fpr_lookup.get(f, None)
1403 if key not in self.keys:
1405 self.keys[key]["uid"] = uid
1409 keyid = get_or_set_uid(uid, session).uid_id
1410 byuid[keyid] = (uid, name)
1411 byname[uid] = (keyid, name)
1413 return (byname, byuid)
1415 def generate_users_from_keyring(self, format, session):
1419 for x in self.keys.keys():
1420 if "email" not in self.keys[x]:
1422 self.keys[x]["uid"] = format % "invalid-uid"
1424 uid = format % self.keys[x]["email"]
1425 keyid = get_or_set_uid(uid, session).uid_id
1426 byuid[keyid] = (uid, self.keys[x]["name"])
1427 byname[uid] = (keyid, self.keys[x]["name"])
1428 self.keys[x]["uid"] = uid
1431 uid = format % "invalid-uid"
1432 keyid = get_or_set_uid(uid, session).uid_id
1433 byuid[keyid] = (uid, "ungeneratable user id")
1434 byname[uid] = (keyid, "ungeneratable user id")
1436 return (byname, byuid)
1438 __all__.append('Keyring')
1441 def get_keyring(keyring, session=None):
1443 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1444 If C{keyring} already has an entry, simply return the existing Keyring
1446 @type keyring: string
1447 @param keyring: the keyring name
1450 @return: the Keyring object for this keyring
1453 q = session.query(Keyring).filter_by(keyring_name=keyring)
1457 except NoResultFound:
1460 __all__.append('get_keyring')
1462 ################################################################################
1464 class KeyringACLMap(object):
1465 def __init__(self, *args, **kwargs):
1469 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1471 __all__.append('KeyringACLMap')
1473 ################################################################################
1475 class DBChange(object):
1476 def __init__(self, *args, **kwargs):
1480 return '<DBChange %s>' % self.changesname
1482 def clean_from_queue(self):
1483 session = DBConn().session().object_session(self)
1485 # Remove changes_pool_files entries
1488 # Remove changes_pending_files references
1491 # Clear out of queue
1492 self.in_queue = None
1493 self.approved_for_id = None
1495 __all__.append('DBChange')
1498 def get_dbchange(filename, session=None):
1500 returns DBChange object for given C{filename}.
1502 @type filename: string
1503 @param filename: the name of the file
1505 @type session: Session
1506 @param session: Optional SQLA session object (a temporary one will be
1507 generated if not supplied)
1510 @return: DBChange object for the given filename (C{None} if not present)
1513 q = session.query(DBChange).filter_by(changesname=filename)
1517 except NoResultFound:
1520 __all__.append('get_dbchange')
1522 ################################################################################
1524 class Location(object):
1525 def __init__(self, *args, **kwargs):
1529 return '<Location %s (%s)>' % (self.path, self.location_id)
1531 __all__.append('Location')
1534 def get_location(location, component=None, archive=None, session=None):
1536 Returns Location object for the given combination of location, component
1539 @type location: string
1540 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1542 @type component: string
1543 @param component: the component name (if None, no restriction applied)
1545 @type archive: string
1546 @param archive: the archive name (if None, no restriction applied)
1548 @rtype: Location / None
1549 @return: Either a Location object or None if one can't be found
1552 q = session.query(Location).filter_by(path=location)
1554 if archive is not None:
1555 q = q.join(Archive).filter_by(archive_name=archive)
1557 if component is not None:
1558 q = q.join(Component).filter_by(component_name=component)
1562 except NoResultFound:
1565 __all__.append('get_location')
1567 ################################################################################
1569 class Maintainer(object):
1570 def __init__(self, *args, **kwargs):
1574 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1576 def get_split_maintainer(self):
1577 if not hasattr(self, 'name') or self.name is None:
1578 return ('', '', '', '')
1580 return fix_maintainer(self.name.strip())
1582 __all__.append('Maintainer')
1585 def get_or_set_maintainer(name, session=None):
1587 Returns Maintainer object for given maintainer name.
1589 If no matching maintainer name is found, a row is inserted.
1592 @param name: The maintainer name to add
1594 @type session: SQLAlchemy
1595 @param session: Optional SQL session object (a temporary one will be
1596 generated if not supplied). If not passed, a commit will be performed at
1597 the end of the function, otherwise the caller is responsible for commiting.
1598 A flush will be performed either way.
1601 @return: the Maintainer object for the given maintainer
1604 q = session.query(Maintainer).filter_by(name=name)
1607 except NoResultFound:
1608 maintainer = Maintainer()
1609 maintainer.name = name
1610 session.add(maintainer)
1611 session.commit_or_flush()
1616 __all__.append('get_or_set_maintainer')
1619 def get_maintainer(maintainer_id, session=None):
1621 Return the name of the maintainer behind C{maintainer_id} or None if that
1622 maintainer_id is invalid.
1624 @type maintainer_id: int
1625 @param maintainer_id: the id of the maintainer
1628 @return: the Maintainer with this C{maintainer_id}
1631 return session.query(Maintainer).get(maintainer_id)
1633 __all__.append('get_maintainer')
1635 ################################################################################
1637 class NewComment(object):
1638 def __init__(self, *args, **kwargs):
1642 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1644 __all__.append('NewComment')
1647 def has_new_comment(package, version, session=None):
1649 Returns true if the given combination of C{package}, C{version} has a comment.
1651 @type package: string
1652 @param package: name of the package
1654 @type version: string
1655 @param version: package version
1657 @type session: Session
1658 @param session: Optional SQLA session object (a temporary one will be
1659 generated if not supplied)
1665 q = session.query(NewComment)
1666 q = q.filter_by(package=package)
1667 q = q.filter_by(version=version)
1669 return bool(q.count() > 0)
1671 __all__.append('has_new_comment')
1674 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1676 Returns (possibly empty) list of NewComment objects for the given
1679 @type package: string (optional)
1680 @param package: name of the package
1682 @type version: string (optional)
1683 @param version: package version
1685 @type comment_id: int (optional)
1686 @param comment_id: An id of a comment
1688 @type session: Session
1689 @param session: Optional SQLA session object (a temporary one will be
1690 generated if not supplied)
1693 @return: A (possibly empty) list of NewComment objects will be returned
1696 q = session.query(NewComment)
1697 if package is not None: q = q.filter_by(package=package)
1698 if version is not None: q = q.filter_by(version=version)
1699 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1703 __all__.append('get_new_comments')
1705 ################################################################################
1707 class Override(object):
1708 def __init__(self, *args, **kwargs):
1712 return '<Override %s (%s)>' % (self.package, self.suite_id)
1714 __all__.append('Override')
1717 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1719 Returns Override object for the given parameters
1721 @type package: string
1722 @param package: The name of the package
1724 @type suite: string, list or None
1725 @param suite: The name of the suite (or suites if a list) to limit to. If
1726 None, don't limit. Defaults to None.
1728 @type component: string, list or None
1729 @param component: The name of the component (or components if a list) to
1730 limit to. If None, don't limit. Defaults to None.
1732 @type overridetype: string, list or None
1733 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1734 limit to. If None, don't limit. Defaults to None.
1736 @type session: Session
1737 @param session: Optional SQLA session object (a temporary one will be
1738 generated if not supplied)
1741 @return: A (possibly empty) list of Override objects will be returned
1744 q = session.query(Override)
1745 q = q.filter_by(package=package)
1747 if suite is not None:
1748 if not isinstance(suite, list): suite = [suite]
1749 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1751 if component is not None:
1752 if not isinstance(component, list): component = [component]
1753 q = q.join(Component).filter(Component.component_name.in_(component))
1755 if overridetype is not None:
1756 if not isinstance(overridetype, list): overridetype = [overridetype]
1757 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1761 __all__.append('get_override')
1764 ################################################################################
1766 class OverrideType(object):
1767 def __init__(self, *args, **kwargs):
1771 return '<OverrideType %s>' % self.overridetype
1773 __all__.append('OverrideType')
1776 def get_override_type(override_type, session=None):
1778 Returns OverrideType object for given C{override type}.
1780 @type override_type: string
1781 @param override_type: The name of the override type
1783 @type session: Session
1784 @param session: Optional SQLA session object (a temporary one will be
1785 generated if not supplied)
1788 @return: the database id for the given override type
1791 q = session.query(OverrideType).filter_by(overridetype=override_type)
1795 except NoResultFound:
1798 __all__.append('get_override_type')
1800 ################################################################################
1802 class DebContents(object):
1803 def __init__(self, *args, **kwargs):
1807 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1809 __all__.append('DebContents')
1812 class UdebContents(object):
1813 def __init__(self, *args, **kwargs):
1817 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1819 __all__.append('UdebContents')
1821 class PendingBinContents(object):
1822 def __init__(self, *args, **kwargs):
1826 return '<PendingBinContents %s>' % self.contents_id
1828 __all__.append('PendingBinContents')
1830 def insert_pending_content_paths(package,
1835 Make sure given paths are temporarily associated with given
1839 @param package: the package to associate with should have been read in from the binary control file
1840 @type fullpaths: list
1841 @param fullpaths: the list of paths of the file being associated with the binary
1842 @type session: SQLAlchemy session
1843 @param session: Optional SQLAlchemy session. If this is passed, the caller
1844 is responsible for ensuring a transaction has begun and committing the
1845 results or rolling back based on the result code. If not passed, a commit
1846 will be performed at the end of the function
1848 @return: True upon success, False if there is a problem
1851 privatetrans = False
1854 session = DBConn().session()
1858 arch = get_architecture(package['Architecture'], session)
1859 arch_id = arch.arch_id
1861 # Remove any already existing recorded files for this package
1862 q = session.query(PendingBinContents)
1863 q = q.filter_by(package=package['Package'])
1864 q = q.filter_by(version=package['Version'])
1865 q = q.filter_by(architecture=arch_id)
1868 for fullpath in fullpaths:
1870 if fullpath.startswith( "./" ):
1871 fullpath = fullpath[2:]
1873 pca = PendingBinContents()
1874 pca.package = package['Package']
1875 pca.version = package['Version']
1877 pca.architecture = arch_id
1880 pca.type = 8 # gross
1882 pca.type = 7 # also gross
1885 # Only commit if we set up the session ourself
1893 except Exception, e:
1894 traceback.print_exc()
1896 # Only rollback if we set up the session ourself
1903 __all__.append('insert_pending_content_paths')
1905 ################################################################################
1907 class PolicyQueue(object):
1908 def __init__(self, *args, **kwargs):
1912 return '<PolicyQueue %s>' % self.queue_name
1914 __all__.append('PolicyQueue')
1917 def get_policy_queue(queuename, session=None):
1919 Returns PolicyQueue object for given C{queue name}
1921 @type queuename: string
1922 @param queuename: The name of the queue
1924 @type session: Session
1925 @param session: Optional SQLA session object (a temporary one will be
1926 generated if not supplied)
1929 @return: PolicyQueue object for the given queue
1932 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1936 except NoResultFound:
1939 __all__.append('get_policy_queue')
1942 def get_policy_queue_from_path(pathname, session=None):
1944 Returns PolicyQueue object for given C{path name}
1946 @type queuename: string
1947 @param queuename: The path
1949 @type session: Session
1950 @param session: Optional SQLA session object (a temporary one will be
1951 generated if not supplied)
1954 @return: PolicyQueue object for the given queue
1957 q = session.query(PolicyQueue).filter_by(path=pathname)
1961 except NoResultFound:
1964 __all__.append('get_policy_queue_from_path')
1966 ################################################################################
1968 class Priority(object):
1969 def __init__(self, *args, **kwargs):
1972 def __eq__(self, val):
1973 if isinstance(val, str):
1974 return (self.priority == val)
1975 # This signals to use the normal comparison operator
1976 return NotImplemented
1978 def __ne__(self, val):
1979 if isinstance(val, str):
1980 return (self.priority != val)
1981 # This signals to use the normal comparison operator
1982 return NotImplemented
1985 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1987 __all__.append('Priority')
1990 def get_priority(priority, session=None):
1992 Returns Priority object for given C{priority name}.
1994 @type priority: string
1995 @param priority: The name of the priority
1997 @type session: Session
1998 @param session: Optional SQLA session object (a temporary one will be
1999 generated if not supplied)
2002 @return: Priority object for the given priority
2005 q = session.query(Priority).filter_by(priority=priority)
2009 except NoResultFound:
2012 __all__.append('get_priority')
2015 def get_priorities(session=None):
2017 Returns dictionary of priority names -> id mappings
2019 @type session: Session
2020 @param session: Optional SQL session object (a temporary one will be
2021 generated if not supplied)
2024 @return: dictionary of priority names -> id mappings
2028 q = session.query(Priority)
2030 ret[x.priority] = x.priority_id
2034 __all__.append('get_priorities')
2036 ################################################################################
2038 class Section(object):
2039 def __init__(self, *args, **kwargs):
2042 def __eq__(self, val):
2043 if isinstance(val, str):
2044 return (self.section == val)
2045 # This signals to use the normal comparison operator
2046 return NotImplemented
2048 def __ne__(self, val):
2049 if isinstance(val, str):
2050 return (self.section != val)
2051 # This signals to use the normal comparison operator
2052 return NotImplemented
2055 return '<Section %s>' % self.section
2057 __all__.append('Section')
2060 def get_section(section, session=None):
2062 Returns Section object for given C{section name}.
2064 @type section: string
2065 @param section: The name of the section
2067 @type session: Session
2068 @param session: Optional SQLA session object (a temporary one will be
2069 generated if not supplied)
2072 @return: Section object for the given section name
2075 q = session.query(Section).filter_by(section=section)
2079 except NoResultFound:
2082 __all__.append('get_section')
2085 def get_sections(session=None):
2087 Returns dictionary of section names -> id mappings
2089 @type session: Session
2090 @param session: Optional SQL session object (a temporary one will be
2091 generated if not supplied)
2094 @return: dictionary of section names -> id mappings
2098 q = session.query(Section)
2100 ret[x.section] = x.section_id
2104 __all__.append('get_sections')
2106 ################################################################################
2108 class DBSource(object):
2109 def __init__(self, *args, **kwargs):
2113 return '<DBSource %s (%s)>' % (self.source, self.version)
2115 __all__.append('DBSource')
2118 def source_exists(source, source_version, suites = ["any"], session=None):
2120 Ensure that source exists somewhere in the archive for the binary
2121 upload being processed.
2122 1. exact match => 1.0-3
2123 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2125 @type source: string
2126 @param source: source name
2128 @type source_version: string
2129 @param source_version: expected source version
2132 @param suites: list of suites to check in, default I{any}
2134 @type session: Session
2135 @param session: Optional SQLA session object (a temporary one will be
2136 generated if not supplied)
2139 @return: returns 1 if a source with expected version is found, otherwise 0
2146 for suite in suites:
2147 q = session.query(DBSource).filter_by(source=source)
2149 # source must exist in suite X, or in some other suite that's
2150 # mapped to X, recursively... silent-maps are counted too,
2151 # unreleased-maps aren't.
2152 maps = cnf.ValueList("SuiteMappings")[:]
2154 maps = [ m.split() for m in maps ]
2155 maps = [ (x[1], x[2]) for x in maps
2156 if x[0] == "map" or x[0] == "silent-map" ]
2159 if x[1] in s and x[0] not in s:
2162 q = q.join(SrcAssociation).join(Suite)
2163 q = q.filter(Suite.suite_name.in_(s))
2165 # Reduce the query results to a list of version numbers
2166 ql = [ j.version for j in q.all() ]
2169 if source_version in ql:
2173 from daklib.regexes import re_bin_only_nmu
2174 orig_source_version = re_bin_only_nmu.sub('', source_version)
2175 if orig_source_version in ql:
2178 # No source found so return not ok
2183 __all__.append('source_exists')
2186 def get_suites_source_in(source, session=None):
2188 Returns list of Suite objects which given C{source} name is in
2191 @param source: DBSource package name to search for
2194 @return: list of Suite objects for the given source
2197 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2199 __all__.append('get_suites_source_in')
2202 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2204 Returns list of DBSource objects for given C{source} name and other parameters
2207 @param source: DBSource package name to search for
2209 @type version: str or None
2210 @param version: DBSource version name to search for or None if not applicable
2212 @type dm_upload_allowed: bool
2213 @param dm_upload_allowed: If None, no effect. If True or False, only
2214 return packages with that dm_upload_allowed setting
2216 @type session: Session
2217 @param session: Optional SQL session object (a temporary one will be
2218 generated if not supplied)
2221 @return: list of DBSource objects for the given name (may be empty)
2224 q = session.query(DBSource).filter_by(source=source)
2226 if version is not None:
2227 q = q.filter_by(version=version)
2229 if dm_upload_allowed is not None:
2230 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2234 __all__.append('get_sources_from_name')
2237 def get_source_in_suite(source, suite, session=None):
2239 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2241 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2242 - B{suite} - a suite name, eg. I{unstable}
2244 @type source: string
2245 @param source: source package name
2248 @param suite: the suite name
2251 @return: the version for I{source} in I{suite}
2255 q = session.query(SrcAssociation)
2256 q = q.join('source').filter_by(source=source)
2257 q = q.join('suite').filter_by(suite_name=suite)
2260 return q.one().source
2261 except NoResultFound:
2264 __all__.append('get_source_in_suite')
2266 ################################################################################
2269 def add_dsc_to_db(u, filename, session=None):
2270 entry = u.pkg.files[filename]
2274 source.source = u.pkg.dsc["source"]
2275 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2276 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2277 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2278 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2279 source.install_date = datetime.now().date()
2281 dsc_component = entry["component"]
2282 dsc_location_id = entry["location id"]
2284 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2286 # Set up a new poolfile if necessary
2287 if not entry.has_key("files id") or not entry["files id"]:
2288 filename = entry["pool name"] + filename
2289 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2291 pfs.append(poolfile)
2292 entry["files id"] = poolfile.file_id
2294 source.poolfile_id = entry["files id"]
2298 for suite_name in u.pkg.changes["distribution"].keys():
2299 sa = SrcAssociation()
2300 sa.source_id = source.source_id
2301 sa.suite_id = get_suite(suite_name).suite_id
2306 # Add the source files to the DB (files and dsc_files)
2308 dscfile.source_id = source.source_id
2309 dscfile.poolfile_id = entry["files id"]
2310 session.add(dscfile)
2312 for dsc_file, dentry in u.pkg.dsc_files.items():
2314 df.source_id = source.source_id
2316 # If the .orig tarball is already in the pool, it's
2317 # files id is stored in dsc_files by check_dsc().
2318 files_id = dentry.get("files id", None)
2320 # Find the entry in the files hash
2321 # TODO: Bail out here properly
2323 for f, e in u.pkg.files.items():
2328 if files_id is None:
2329 filename = dfentry["pool name"] + dsc_file
2331 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2332 # FIXME: needs to check for -1/-2 and or handle exception
2333 if found and obj is not None:
2334 files_id = obj.file_id
2337 # If still not found, add it
2338 if files_id is None:
2339 # HACK: Force sha1sum etc into dentry
2340 dentry["sha1sum"] = dfentry["sha1sum"]
2341 dentry["sha256sum"] = dfentry["sha256sum"]
2342 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2343 pfs.append(poolfile)
2344 files_id = poolfile.file_id
2346 poolfile = get_poolfile_by_id(files_id, session)
2347 if poolfile is None:
2348 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2349 pfs.append(poolfile)
2351 df.poolfile_id = files_id
2356 # Add the src_uploaders to the DB
2357 uploader_ids = [source.maintainer_id]
2358 if u.pkg.dsc.has_key("uploaders"):
2359 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2361 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2364 for up_id in uploader_ids:
2365 if added_ids.has_key(up_id):
2367 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2373 su.maintainer_id = up_id
2374 su.source_id = source.source_id
2379 return source, dsc_component, dsc_location_id, pfs
2381 __all__.append('add_dsc_to_db')
2384 def add_deb_to_db(u, filename, session=None):
2386 Contrary to what you might expect, this routine deals with both
2387 debs and udebs. That info is in 'dbtype', whilst 'type' is
2388 'deb' for both of them
2391 entry = u.pkg.files[filename]
2394 bin.package = entry["package"]
2395 bin.version = entry["version"]
2396 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2397 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2398 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2399 bin.binarytype = entry["dbtype"]
2402 filename = entry["pool name"] + filename
2403 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2404 if not entry.get("location id", None):
2405 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2407 if entry.get("files id", None):
2408 poolfile = get_poolfile_by_id(bin.poolfile_id)
2409 bin.poolfile_id = entry["files id"]
2411 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2412 bin.poolfile_id = entry["files id"] = poolfile.file_id
2415 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2416 if len(bin_sources) != 1:
2417 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2418 (bin.package, bin.version, entry["architecture"],
2419 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2421 bin.source_id = bin_sources[0].source_id
2423 # Add and flush object so it has an ID
2427 # Add BinAssociations
2428 for suite_name in u.pkg.changes["distribution"].keys():
2429 ba = BinAssociation()
2430 ba.binary_id = bin.binary_id
2431 ba.suite_id = get_suite(suite_name).suite_id
2436 # Deal with contents - disabled for now
2437 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2439 # print "REJECT\nCould not determine contents of package %s" % bin.package
2440 # session.rollback()
2441 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2445 __all__.append('add_deb_to_db')
2447 ################################################################################
2449 class SourceACL(object):
2450 def __init__(self, *args, **kwargs):
2454 return '<SourceACL %s>' % self.source_acl_id
2456 __all__.append('SourceACL')
2458 ################################################################################
2460 class SrcAssociation(object):
2461 def __init__(self, *args, **kwargs):
2465 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2467 __all__.append('SrcAssociation')
2469 ################################################################################
2471 class SrcFormat(object):
2472 def __init__(self, *args, **kwargs):
2476 return '<SrcFormat %s>' % (self.format_name)
2478 __all__.append('SrcFormat')
2480 ################################################################################
2482 class SrcUploader(object):
2483 def __init__(self, *args, **kwargs):
2487 return '<SrcUploader %s>' % self.uploader_id
2489 __all__.append('SrcUploader')
2491 ################################################################################
2493 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2494 ('SuiteID', 'suite_id'),
2495 ('Version', 'version'),
2496 ('Origin', 'origin'),
2498 ('Description', 'description'),
2499 ('Untouchable', 'untouchable'),
2500 ('Announce', 'announce'),
2501 ('Codename', 'codename'),
2502 ('OverrideCodename', 'overridecodename'),
2503 ('ValidTime', 'validtime'),
2504 ('Priority', 'priority'),
2505 ('NotAutomatic', 'notautomatic'),
2506 ('CopyChanges', 'copychanges'),
2507 ('OverrideSuite', 'overridesuite')]
2509 class Suite(object):
2510 def __init__(self, suite_name = None, version = None):
2511 self.suite_name = suite_name
2512 self.version = version
2515 return '<Suite %s>' % self.suite_name
2517 def __eq__(self, val):
2518 if isinstance(val, str):
2519 return (self.suite_name == val)
2520 # This signals to use the normal comparison operator
2521 return NotImplemented
2523 def __ne__(self, val):
2524 if isinstance(val, str):
2525 return (self.suite_name != val)
2526 # This signals to use the normal comparison operator
2527 return NotImplemented
2531 for disp, field in SUITE_FIELDS:
2532 val = getattr(self, field, None)
2534 ret.append("%s: %s" % (disp, val))
2536 return "\n".join(ret)
2538 def get_architectures(self, skipsrc=False, skipall=False):
2540 Returns list of Architecture objects
2542 @type skipsrc: boolean
2543 @param skipsrc: Whether to skip returning the 'source' architecture entry
2546 @type skipall: boolean
2547 @param skipall: Whether to skip returning the 'all' architecture entry
2551 @return: list of Architecture objects for the given name (may be empty)
2554 q = object_session(self).query(Architecture). \
2555 filter(Architecture.suites.contains(self))
2557 q = q.filter(Architecture.arch_string != 'source')
2559 q = q.filter(Architecture.arch_string != 'all')
2560 return q.order_by(Architecture.arch_string).all()
2562 __all__.append('Suite')
2565 def get_suite(suite, session=None):
2567 Returns Suite object for given C{suite name}.
2570 @param suite: The name of the suite
2572 @type session: Session
2573 @param session: Optional SQLA session object (a temporary one will be
2574 generated if not supplied)
2577 @return: Suite object for the requested suite name (None if not present)
2580 q = session.query(Suite).filter_by(suite_name=suite)
2584 except NoResultFound:
2587 __all__.append('get_suite')
2589 ################################################################################
2591 # TODO: should be removed because the implementation is too trivial
2593 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2595 Returns list of Architecture objects for given C{suite} name
2598 @param suite: Suite name to search for
2600 @type skipsrc: boolean
2601 @param skipsrc: Whether to skip returning the 'source' architecture entry
2604 @type skipall: boolean
2605 @param skipall: Whether to skip returning the 'all' architecture entry
2608 @type session: Session
2609 @param session: Optional SQL session object (a temporary one will be
2610 generated if not supplied)
2613 @return: list of Architecture objects for the given name (may be empty)
2616 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2618 __all__.append('get_suite_architectures')
2620 ################################################################################
2622 class SuiteSrcFormat(object):
2623 def __init__(self, *args, **kwargs):
2627 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2629 __all__.append('SuiteSrcFormat')
2632 def get_suite_src_formats(suite, session=None):
2634 Returns list of allowed SrcFormat for C{suite}.
2637 @param suite: Suite name to search for
2639 @type session: Session
2640 @param session: Optional SQL session object (a temporary one will be
2641 generated if not supplied)
2644 @return: the list of allowed source formats for I{suite}
2647 q = session.query(SrcFormat)
2648 q = q.join(SuiteSrcFormat)
2649 q = q.join(Suite).filter_by(suite_name=suite)
2650 q = q.order_by('format_name')
2654 __all__.append('get_suite_src_formats')
2656 ################################################################################
2659 def __init__(self, uid = None, name = None):
2663 def __eq__(self, val):
2664 if isinstance(val, str):
2665 return (self.uid == val)
2666 # This signals to use the normal comparison operator
2667 return NotImplemented
2669 def __ne__(self, val):
2670 if isinstance(val, str):
2671 return (self.uid != val)
2672 # This signals to use the normal comparison operator
2673 return NotImplemented
2676 return '<Uid %s (%s)>' % (self.uid, self.name)
2678 __all__.append('Uid')
2681 def get_or_set_uid(uidname, session=None):
2683 Returns uid object for given uidname.
2685 If no matching uidname is found, a row is inserted.
2687 @type uidname: string
2688 @param uidname: The uid to add
2690 @type session: SQLAlchemy
2691 @param session: Optional SQL session object (a temporary one will be
2692 generated if not supplied). If not passed, a commit will be performed at
2693 the end of the function, otherwise the caller is responsible for commiting.
2696 @return: the uid object for the given uidname
2699 q = session.query(Uid).filter_by(uid=uidname)
2703 except NoResultFound:
2707 session.commit_or_flush()
2712 __all__.append('get_or_set_uid')
2715 def get_uid_from_fingerprint(fpr, session=None):
2716 q = session.query(Uid)
2717 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2721 except NoResultFound:
2724 __all__.append('get_uid_from_fingerprint')
2726 ################################################################################
2728 class UploadBlock(object):
2729 def __init__(self, *args, **kwargs):
2733 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2735 __all__.append('UploadBlock')
2737 ################################################################################
2739 class DBConn(object):
2741 database module init.
2745 def __init__(self, *args, **kwargs):
2746 self.__dict__ = self.__shared_state
2748 if not getattr(self, 'initialised', False):
2749 self.initialised = True
2750 self.debug = kwargs.has_key('debug')
2753 def __setuptables(self):
2754 tables_with_primary = (
2765 'changes_pending_binaries',
2766 'changes_pending_files',
2767 'changes_pending_source',
2777 'pending_bin_contents',
2789 # The following tables have primary keys but sqlalchemy
2790 # version 0.5 fails to reflect them correctly with database
2791 # versions before upgrade #41.
2793 #'build_queue_files',
2796 tables_no_primary = (
2798 'changes_pending_files_map',
2799 'changes_pending_source_files',
2800 'changes_pool_files',
2803 'suite_architectures',
2804 'suite_src_formats',
2805 'suite_build_queue_copy',
2807 # see the comment above
2809 'build_queue_files',
2813 'almost_obsolete_all_associations',
2814 'almost_obsolete_src_associations',
2815 'any_associations_source',
2816 'bin_assoc_by_arch',
2817 'bin_associations_binaries',
2818 'binaries_suite_arch',
2819 'binfiles_suite_component_arch',
2822 'newest_all_associations',
2823 'newest_any_associations',
2825 'newest_src_association',
2826 'obsolete_all_associations',
2827 'obsolete_any_associations',
2828 'obsolete_any_by_all_associations',
2829 'obsolete_src_associations',
2831 'src_associations_bin',
2832 'src_associations_src',
2833 'suite_arch_by_name',
2836 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2837 # correctly and that is why we have to use a workaround. It can
2838 # be removed as soon as we switch to version 0.6.
2839 for table_name in tables_with_primary:
2840 table = Table(table_name, self.db_meta, \
2841 Column('id', Integer, primary_key = True), \
2842 autoload=True, useexisting=True)
2843 setattr(self, 'tbl_%s' % table_name, table)
2845 for table_name in tables_no_primary:
2846 table = Table(table_name, self.db_meta, autoload=True)
2847 setattr(self, 'tbl_%s' % table_name, table)
2849 for view_name in views:
2850 view = Table(view_name, self.db_meta, autoload=True)
2851 setattr(self, 'view_%s' % view_name, view)
2853 def __setupmappers(self):
2854 mapper(Architecture, self.tbl_architecture,
2855 properties = dict(arch_id = self.tbl_architecture.c.id,
2856 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2857 order_by='suite_name',
2858 backref=backref('architectures', order_by='arch_string'))))
2860 mapper(Archive, self.tbl_archive,
2861 properties = dict(archive_id = self.tbl_archive.c.id,
2862 archive_name = self.tbl_archive.c.name))
2864 mapper(BinAssociation, self.tbl_bin_associations,
2865 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2866 suite_id = self.tbl_bin_associations.c.suite,
2867 suite = relation(Suite),
2868 binary_id = self.tbl_bin_associations.c.bin,
2869 binary = relation(DBBinary)))
2871 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2872 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2873 filename = self.tbl_pending_bin_contents.c.filename,
2874 package = self.tbl_pending_bin_contents.c.package,
2875 version = self.tbl_pending_bin_contents.c.version,
2876 arch = self.tbl_pending_bin_contents.c.arch,
2877 otype = self.tbl_pending_bin_contents.c.type))
2879 mapper(DebContents, self.tbl_deb_contents,
2880 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2881 package=self.tbl_deb_contents.c.package,
2882 suite=self.tbl_deb_contents.c.suite,
2883 arch=self.tbl_deb_contents.c.arch,
2884 section=self.tbl_deb_contents.c.section,
2885 filename=self.tbl_deb_contents.c.filename))
2887 mapper(UdebContents, self.tbl_udeb_contents,
2888 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2889 package=self.tbl_udeb_contents.c.package,
2890 suite=self.tbl_udeb_contents.c.suite,
2891 arch=self.tbl_udeb_contents.c.arch,
2892 section=self.tbl_udeb_contents.c.section,
2893 filename=self.tbl_udeb_contents.c.filename))
2895 mapper(BuildQueue, self.tbl_build_queue,
2896 properties = dict(queue_id = self.tbl_build_queue.c.id))
2898 mapper(BuildQueueFile, self.tbl_build_queue_files,
2899 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2900 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2902 mapper(DBBinary, self.tbl_binaries,
2903 properties = dict(binary_id = self.tbl_binaries.c.id,
2904 package = self.tbl_binaries.c.package,
2905 version = self.tbl_binaries.c.version,
2906 maintainer_id = self.tbl_binaries.c.maintainer,
2907 maintainer = relation(Maintainer),
2908 source_id = self.tbl_binaries.c.source,
2909 source = relation(DBSource),
2910 arch_id = self.tbl_binaries.c.architecture,
2911 architecture = relation(Architecture),
2912 poolfile_id = self.tbl_binaries.c.file,
2913 poolfile = relation(PoolFile),
2914 binarytype = self.tbl_binaries.c.type,
2915 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2916 fingerprint = relation(Fingerprint),
2917 install_date = self.tbl_binaries.c.install_date,
2918 binassociations = relation(BinAssociation,
2919 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2921 mapper(BinaryACL, self.tbl_binary_acl,
2922 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2924 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2925 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2926 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2927 architecture = relation(Architecture)))
2929 mapper(Component, self.tbl_component,
2930 properties = dict(component_id = self.tbl_component.c.id,
2931 component_name = self.tbl_component.c.name))
2933 mapper(DBConfig, self.tbl_config,
2934 properties = dict(config_id = self.tbl_config.c.id))
2936 mapper(DSCFile, self.tbl_dsc_files,
2937 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2938 source_id = self.tbl_dsc_files.c.source,
2939 source = relation(DBSource),
2940 poolfile_id = self.tbl_dsc_files.c.file,
2941 poolfile = relation(PoolFile)))
2943 mapper(PoolFile, self.tbl_files,
2944 properties = dict(file_id = self.tbl_files.c.id,
2945 filesize = self.tbl_files.c.size,
2946 location_id = self.tbl_files.c.location,
2947 location = relation(Location)))
2949 mapper(Fingerprint, self.tbl_fingerprint,
2950 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2951 uid_id = self.tbl_fingerprint.c.uid,
2952 uid = relation(Uid),
2953 keyring_id = self.tbl_fingerprint.c.keyring,
2954 keyring = relation(Keyring),
2955 source_acl = relation(SourceACL),
2956 binary_acl = relation(BinaryACL)))
2958 mapper(Keyring, self.tbl_keyrings,
2959 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2960 keyring_id = self.tbl_keyrings.c.id))
2962 mapper(DBChange, self.tbl_changes,
2963 properties = dict(change_id = self.tbl_changes.c.id,
2964 poolfiles = relation(PoolFile,
2965 secondary=self.tbl_changes_pool_files,
2966 backref="changeslinks"),
2967 seen = self.tbl_changes.c.seen,
2968 source = self.tbl_changes.c.source,
2969 binaries = self.tbl_changes.c.binaries,
2970 architecture = self.tbl_changes.c.architecture,
2971 distribution = self.tbl_changes.c.distribution,
2972 urgency = self.tbl_changes.c.urgency,
2973 maintainer = self.tbl_changes.c.maintainer,
2974 changedby = self.tbl_changes.c.changedby,
2975 date = self.tbl_changes.c.date,
2976 version = self.tbl_changes.c.version,
2977 files = relation(ChangePendingFile,
2978 secondary=self.tbl_changes_pending_files_map,
2979 backref="changesfile"),
2980 in_queue_id = self.tbl_changes.c.in_queue,
2981 in_queue = relation(PolicyQueue,
2982 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2983 approved_for_id = self.tbl_changes.c.approved_for))
2985 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2986 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2988 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2989 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2990 filename = self.tbl_changes_pending_files.c.filename,
2991 size = self.tbl_changes_pending_files.c.size,
2992 md5sum = self.tbl_changes_pending_files.c.md5sum,
2993 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2994 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2996 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2997 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2998 change = relation(DBChange),
2999 maintainer = relation(Maintainer,
3000 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3001 changedby = relation(Maintainer,
3002 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3003 fingerprint = relation(Fingerprint),
3004 source_files = relation(ChangePendingFile,
3005 secondary=self.tbl_changes_pending_source_files,
3006 backref="pending_sources")))
3009 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3010 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3011 keyring = relation(Keyring, backref="keyring_acl_map"),
3012 architecture = relation(Architecture)))
3014 mapper(Location, self.tbl_location,
3015 properties = dict(location_id = self.tbl_location.c.id,
3016 component_id = self.tbl_location.c.component,
3017 component = relation(Component),
3018 archive_id = self.tbl_location.c.archive,
3019 archive = relation(Archive),
3020 archive_type = self.tbl_location.c.type))
3022 mapper(Maintainer, self.tbl_maintainer,
3023 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
3025 mapper(NewComment, self.tbl_new_comments,
3026 properties = dict(comment_id = self.tbl_new_comments.c.id))
3028 mapper(Override, self.tbl_override,
3029 properties = dict(suite_id = self.tbl_override.c.suite,
3030 suite = relation(Suite),
3031 package = self.tbl_override.c.package,
3032 component_id = self.tbl_override.c.component,
3033 component = relation(Component),
3034 priority_id = self.tbl_override.c.priority,
3035 priority = relation(Priority),
3036 section_id = self.tbl_override.c.section,
3037 section = relation(Section),
3038 overridetype_id = self.tbl_override.c.type,
3039 overridetype = relation(OverrideType)))
3041 mapper(OverrideType, self.tbl_override_type,
3042 properties = dict(overridetype = self.tbl_override_type.c.type,
3043 overridetype_id = self.tbl_override_type.c.id))
3045 mapper(PolicyQueue, self.tbl_policy_queue,
3046 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3048 mapper(Priority, self.tbl_priority,
3049 properties = dict(priority_id = self.tbl_priority.c.id))
3051 mapper(Section, self.tbl_section,
3052 properties = dict(section_id = self.tbl_section.c.id,
3053 section=self.tbl_section.c.section))
3055 mapper(DBSource, self.tbl_source,
3056 properties = dict(source_id = self.tbl_source.c.id,
3057 version = self.tbl_source.c.version,
3058 maintainer_id = self.tbl_source.c.maintainer,
3059 maintainer = relation(Maintainer,
3060 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3061 poolfile_id = self.tbl_source.c.file,
3062 poolfile = relation(PoolFile),
3063 fingerprint_id = self.tbl_source.c.sig_fpr,
3064 fingerprint = relation(Fingerprint),
3065 changedby_id = self.tbl_source.c.changedby,
3066 changedby = relation(Maintainer,
3067 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3068 srcfiles = relation(DSCFile,
3069 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3070 srcassociations = relation(SrcAssociation,
3071 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3072 srcuploaders = relation(SrcUploader)))
3074 mapper(SourceACL, self.tbl_source_acl,
3075 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3077 mapper(SrcAssociation, self.tbl_src_associations,
3078 properties = dict(sa_id = self.tbl_src_associations.c.id,
3079 suite_id = self.tbl_src_associations.c.suite,
3080 suite = relation(Suite),
3081 source_id = self.tbl_src_associations.c.source,
3082 source = relation(DBSource)))
3084 mapper(SrcFormat, self.tbl_src_format,
3085 properties = dict(src_format_id = self.tbl_src_format.c.id,
3086 format_name = self.tbl_src_format.c.format_name))
3088 mapper(SrcUploader, self.tbl_src_uploaders,
3089 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3090 source_id = self.tbl_src_uploaders.c.source,
3091 source = relation(DBSource,
3092 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3093 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3094 maintainer = relation(Maintainer,
3095 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3097 mapper(Suite, self.tbl_suite,
3098 properties = dict(suite_id = self.tbl_suite.c.id,
3099 policy_queue = relation(PolicyQueue),
3100 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3102 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3103 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3104 suite = relation(Suite, backref='suitesrcformats'),
3105 src_format_id = self.tbl_suite_src_formats.c.src_format,
3106 src_format = relation(SrcFormat)))
3108 mapper(Uid, self.tbl_uid,
3109 properties = dict(uid_id = self.tbl_uid.c.id,
3110 fingerprint = relation(Fingerprint)))
3112 mapper(UploadBlock, self.tbl_upload_blocks,
3113 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3114 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3115 uid = relation(Uid, backref="uploadblocks")))
3117 ## Connection functions
3118 def __createconn(self):
3119 from config import Config
3123 connstr = "postgres://%s" % cnf["DB::Host"]
3124 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3125 connstr += ":%s" % cnf["DB::Port"]
3126 connstr += "/%s" % cnf["DB::Name"]
3129 connstr = "postgres:///%s" % cnf["DB::Name"]
3130 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3131 connstr += "?port=%s" % cnf["DB::Port"]
3133 self.db_pg = create_engine(connstr, echo=self.debug)
3134 self.db_meta = MetaData()
3135 self.db_meta.bind = self.db_pg
3136 self.db_smaker = sessionmaker(bind=self.db_pg,
3140 self.__setuptables()
3141 self.__setupmappers()
3144 return self.db_smaker()
3146 __all__.append('DBConn')