5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 # suppress some deprecation warnings in squeeze related to sqlalchemy
64 warnings.filterwarnings('ignore', \
65 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
67 # TODO: sqlalchemy needs some extra configuration to correctly reflect
68 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
69 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
72 ################################################################################
74 # Patch in support for the debversion field type so that it works during
78 # that is for sqlalchemy 0.6
79 UserDefinedType = sqltypes.UserDefinedType
81 # this one for sqlalchemy 0.5
82 UserDefinedType = sqltypes.TypeEngine
84 class DebVersion(UserDefinedType):
85 def get_col_spec(self):
88 def bind_processor(self, dialect):
91 # ' = None' is needed for sqlalchemy 0.5:
92 def result_processor(self, dialect, coltype = None):
95 sa_major_version = sqlalchemy.__version__[0:3]
96 if sa_major_version in ["0.5", "0.6"]:
97 from sqlalchemy.databases import postgres
98 postgres.ischema_names['debversion'] = DebVersion
100 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
102 ################################################################################
104 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
106 ################################################################################
108 def session_wrapper(fn):
110 Wrapper around common ".., session=None):" handling. If the wrapped
111 function is called without passing 'session', we create a local one
112 and destroy it when the function ends.
114 Also attaches a commit_or_flush method to the session; if we created a
115 local session, this is a synonym for session.commit(), otherwise it is a
116 synonym for session.flush().
119 def wrapped(*args, **kwargs):
120 private_transaction = False
122 # Find the session object
123 session = kwargs.get('session')
126 if len(args) <= len(getargspec(fn)[0]) - 1:
127 # No session specified as last argument or in kwargs
128 private_transaction = True
129 session = kwargs['session'] = DBConn().session()
131 # Session is last argument in args
135 session = args[-1] = DBConn().session()
136 private_transaction = True
138 if private_transaction:
139 session.commit_or_flush = session.commit
141 session.commit_or_flush = session.flush
144 return fn(*args, **kwargs)
146 if private_transaction:
147 # We created a session; close it.
150 wrapped.__doc__ = fn.__doc__
151 wrapped.func_name = fn.func_name
155 __all__.append('session_wrapper')
157 ################################################################################
159 class Architecture(object):
160 def __init__(self, arch_string = None, description = None):
161 self.arch_string = arch_string
162 self.description = description
164 def __eq__(self, val):
165 if isinstance(val, str):
166 return (self.arch_string== val)
167 # This signals to use the normal comparison operator
168 return NotImplemented
170 def __ne__(self, val):
171 if isinstance(val, str):
172 return (self.arch_string != val)
173 # This signals to use the normal comparison operator
174 return NotImplemented
177 return '<Architecture %s>' % self.arch_string
179 __all__.append('Architecture')
182 def get_architecture(architecture, session=None):
184 Returns database id for given C{architecture}.
186 @type architecture: string
187 @param architecture: The name of the architecture
189 @type session: Session
190 @param session: Optional SQLA session object (a temporary one will be
191 generated if not supplied)
194 @return: Architecture object for the given arch (None if not present)
197 q = session.query(Architecture).filter_by(arch_string=architecture)
201 except NoResultFound:
204 __all__.append('get_architecture')
206 # TODO: should be removed because the implementation is too trivial
208 def get_architecture_suites(architecture, session=None):
210 Returns list of Suite objects for given C{architecture} name
212 @type architecture: str
213 @param architecture: Architecture name to search for
215 @type session: Session
216 @param session: Optional SQL session object (a temporary one will be
217 generated if not supplied)
220 @return: list of Suite objects for the given name (may be empty)
223 return get_architecture(architecture, session).suites
225 __all__.append('get_architecture_suites')
227 ################################################################################
229 class Archive(object):
230 def __init__(self, *args, **kwargs):
234 return '<Archive %s>' % self.archive_name
236 __all__.append('Archive')
239 def get_archive(archive, session=None):
241 returns database id for given C{archive}.
243 @type archive: string
244 @param archive: the name of the arhive
246 @type session: Session
247 @param session: Optional SQLA session object (a temporary one will be
248 generated if not supplied)
251 @return: Archive object for the given name (None if not present)
254 archive = archive.lower()
256 q = session.query(Archive).filter_by(archive_name=archive)
260 except NoResultFound:
263 __all__.append('get_archive')
265 ################################################################################
267 class BinAssociation(object):
268 def __init__(self, *args, **kwargs):
272 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
274 __all__.append('BinAssociation')
276 ################################################################################
278 class BinContents(object):
279 def __init__(self, *args, **kwargs):
283 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
285 __all__.append('BinContents')
287 ################################################################################
289 class DBBinary(object):
290 def __init__(self, *args, **kwargs):
294 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
296 __all__.append('DBBinary')
299 def get_suites_binary_in(package, session=None):
301 Returns list of Suite objects which given C{package} name is in
304 @param package: DBBinary package name to search for
307 @return: list of Suite objects for the given package
310 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
312 __all__.append('get_suites_binary_in')
315 def get_binary_from_id(binary_id, session=None):
317 Returns DBBinary object for given C{id}
320 @param binary_id: Id of the required binary
322 @type session: Session
323 @param session: Optional SQLA session object (a temporary one will be
324 generated if not supplied)
327 @return: DBBinary object for the given binary (None if not present)
330 q = session.query(DBBinary).filter_by(binary_id=binary_id)
334 except NoResultFound:
337 __all__.append('get_binary_from_id')
340 def get_binaries_from_name(package, version=None, architecture=None, session=None):
342 Returns list of DBBinary objects for given C{package} name
345 @param package: DBBinary package name to search for
347 @type version: str or None
348 @param version: Version to search for (or None)
350 @type architecture: str, list or None
351 @param architecture: Architectures to limit to (or None if no limit)
353 @type session: Session
354 @param session: Optional SQL session object (a temporary one will be
355 generated if not supplied)
358 @return: list of DBBinary objects for the given name (may be empty)
361 q = session.query(DBBinary).filter_by(package=package)
363 if version is not None:
364 q = q.filter_by(version=version)
366 if architecture is not None:
367 if not isinstance(architecture, list):
368 architecture = [architecture]
369 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
375 __all__.append('get_binaries_from_name')
378 def get_binaries_from_source_id(source_id, session=None):
380 Returns list of DBBinary objects for given C{source_id}
383 @param source_id: source_id to search for
385 @type session: Session
386 @param session: Optional SQL session object (a temporary one will be
387 generated if not supplied)
390 @return: list of DBBinary objects for the given name (may be empty)
393 return session.query(DBBinary).filter_by(source_id=source_id).all()
395 __all__.append('get_binaries_from_source_id')
398 def get_binary_from_name_suite(package, suitename, session=None):
399 ### For dak examine-package
400 ### XXX: Doesn't use object API yet
402 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
403 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
404 WHERE b.package='%(package)s'
406 AND fi.location = l.id
407 AND l.component = c.id
410 AND su.suite_name %(suitename)s
411 ORDER BY b.version DESC"""
413 return session.execute(sql % {'package': package, 'suitename': suitename})
415 __all__.append('get_binary_from_name_suite')
418 def get_binary_components(package, suitename, arch, session=None):
419 # Check for packages that have moved from one component to another
420 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
421 WHERE b.package=:package AND s.suite_name=:suitename
422 AND (a.arch_string = :arch OR a.arch_string = 'all')
423 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
424 AND f.location = l.id
425 AND l.component = c.id
428 vals = {'package': package, 'suitename': suitename, 'arch': arch}
430 return session.execute(query, vals)
432 __all__.append('get_binary_components')
434 ################################################################################
436 class BinaryACL(object):
437 def __init__(self, *args, **kwargs):
441 return '<BinaryACL %s>' % self.binary_acl_id
443 __all__.append('BinaryACL')
445 ################################################################################
447 class BinaryACLMap(object):
448 def __init__(self, *args, **kwargs):
452 return '<BinaryACLMap %s>' % self.binary_acl_map_id
454 __all__.append('BinaryACLMap')
456 ################################################################################
461 ArchiveDir "%(archivepath)s";
462 OverrideDir "%(overridedir)s";
463 CacheDir "%(cachedir)s";
468 Packages::Compress ". bzip2 gzip";
469 Sources::Compress ". bzip2 gzip";
474 bindirectory "incoming"
479 BinOverride "override.sid.all3";
480 BinCacheDB "packages-accepted.db";
482 FileList "%(filelist)s";
485 Packages::Extensions ".deb .udeb";
488 bindirectory "incoming/"
491 BinOverride "override.sid.all3";
492 SrcOverride "override.sid.all3.src";
493 FileList "%(filelist)s";
497 class BuildQueue(object):
498 def __init__(self, *args, **kwargs):
502 return '<BuildQueue %s>' % self.queue_name
504 def write_metadata(self, starttime, force=False):
505 # Do we write out metafiles?
506 if not (force or self.generate_metadata):
509 session = DBConn().session().object_session(self)
511 fl_fd = fl_name = ac_fd = ac_name = None
513 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
514 startdir = os.getcwd()
517 # Grab files we want to include
518 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
519 # Write file list with newer files
520 (fl_fd, fl_name) = mkstemp()
522 os.write(fl_fd, '%s\n' % n.fullpath)
527 # Write minimal apt.conf
528 # TODO: Remove hardcoding from template
529 (ac_fd, ac_name) = mkstemp()
530 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
532 'cachedir': cnf["Dir::Cache"],
533 'overridedir': cnf["Dir::Override"],
537 # Run apt-ftparchive generate
538 os.chdir(os.path.dirname(ac_name))
539 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
541 # Run apt-ftparchive release
542 # TODO: Eww - fix this
543 bname = os.path.basename(self.path)
547 # We have to remove the Release file otherwise it'll be included in the
550 os.unlink(os.path.join(bname, 'Release'))
554 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
556 # Crude hack with open and append, but this whole section is and should be redone.
557 if self.notautomatic:
558 release=open("Release", "a")
559 release.write("NotAutomatic: yes")
564 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
565 if cnf.has_key("Dinstall::SigningPubKeyring"):
566 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
568 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
570 # Move the files if we got this far
571 os.rename('Release', os.path.join(bname, 'Release'))
573 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
575 # Clean up any left behind files
602 def clean_and_update(self, starttime, Logger, dryrun=False):
603 """WARNING: This routine commits for you"""
604 session = DBConn().session().object_session(self)
606 if self.generate_metadata and not dryrun:
607 self.write_metadata(starttime)
609 # Grab files older than our execution time
610 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
616 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
618 Logger.log(["I: Removing %s from the queue" % o.fullpath])
619 os.unlink(o.fullpath)
622 # If it wasn't there, don't worry
623 if e.errno == ENOENT:
626 # TODO: Replace with proper logging call
627 Logger.log(["E: Could not remove %s" % o.fullpath])
634 for f in os.listdir(self.path):
635 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
639 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
640 except NoResultFound:
641 fp = os.path.join(self.path, f)
643 Logger.log(["I: Would remove unused link %s" % fp])
645 Logger.log(["I: Removing unused link %s" % fp])
649 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
651 def add_file_from_pool(self, poolfile):
652 """Copies a file into the pool. Assumes that the PoolFile object is
653 attached to the same SQLAlchemy session as the Queue object is.
655 The caller is responsible for committing after calling this function."""
656 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
658 # Check if we have a file of this name or this ID already
659 for f in self.queuefiles:
660 if f.fileid is not None and f.fileid == poolfile.file_id or \
661 f.poolfile.filename == poolfile_basename:
662 # In this case, update the BuildQueueFile entry so we
663 # don't remove it too early
664 f.lastused = datetime.now()
665 DBConn().session().object_session(poolfile).add(f)
668 # Prepare BuildQueueFile object
669 qf = BuildQueueFile()
670 qf.build_queue_id = self.queue_id
671 qf.lastused = datetime.now()
672 qf.filename = poolfile_basename
674 targetpath = poolfile.fullpath
675 queuepath = os.path.join(self.path, poolfile_basename)
679 # We need to copy instead of symlink
681 utils.copy(targetpath, queuepath)
682 # NULL in the fileid field implies a copy
685 os.symlink(targetpath, queuepath)
686 qf.fileid = poolfile.file_id
690 # Get the same session as the PoolFile is using and add the qf to it
691 DBConn().session().object_session(poolfile).add(qf)
696 __all__.append('BuildQueue')
699 def get_build_queue(queuename, session=None):
701 Returns BuildQueue object for given C{queue name}, creating it if it does not
704 @type queuename: string
705 @param queuename: The name of the queue
707 @type session: Session
708 @param session: Optional SQLA session object (a temporary one will be
709 generated if not supplied)
712 @return: BuildQueue object for the given queue
715 q = session.query(BuildQueue).filter_by(queue_name=queuename)
719 except NoResultFound:
722 __all__.append('get_build_queue')
724 ################################################################################
726 class BuildQueueFile(object):
727 def __init__(self, *args, **kwargs):
731 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
735 return os.path.join(self.buildqueue.path, self.filename)
738 __all__.append('BuildQueueFile')
740 ################################################################################
742 class ChangePendingBinary(object):
743 def __init__(self, *args, **kwargs):
747 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
749 __all__.append('ChangePendingBinary')
751 ################################################################################
753 class ChangePendingFile(object):
754 def __init__(self, *args, **kwargs):
758 return '<ChangePendingFile %s>' % self.change_pending_file_id
760 __all__.append('ChangePendingFile')
762 ################################################################################
764 class ChangePendingSource(object):
765 def __init__(self, *args, **kwargs):
769 return '<ChangePendingSource %s>' % self.change_pending_source_id
771 __all__.append('ChangePendingSource')
773 ################################################################################
775 class Component(object):
776 def __init__(self, *args, **kwargs):
779 def __eq__(self, val):
780 if isinstance(val, str):
781 return (self.component_name == val)
782 # This signals to use the normal comparison operator
783 return NotImplemented
785 def __ne__(self, val):
786 if isinstance(val, str):
787 return (self.component_name != val)
788 # This signals to use the normal comparison operator
789 return NotImplemented
792 return '<Component %s>' % self.component_name
795 __all__.append('Component')
798 def get_component(component, session=None):
800 Returns database id for given C{component}.
802 @type component: string
803 @param component: The name of the override type
806 @return: the database id for the given component
809 component = component.lower()
811 q = session.query(Component).filter_by(component_name=component)
815 except NoResultFound:
818 __all__.append('get_component')
820 ################################################################################
822 class DBConfig(object):
823 def __init__(self, *args, **kwargs):
827 return '<DBConfig %s>' % self.name
829 __all__.append('DBConfig')
831 ################################################################################
834 def get_or_set_contents_file_id(filename, session=None):
836 Returns database id for given filename.
838 If no matching file is found, a row is inserted.
840 @type filename: string
841 @param filename: The filename
842 @type session: SQLAlchemy
843 @param session: Optional SQL session object (a temporary one will be
844 generated if not supplied). If not passed, a commit will be performed at
845 the end of the function, otherwise the caller is responsible for commiting.
848 @return: the database id for the given component
851 q = session.query(ContentFilename).filter_by(filename=filename)
854 ret = q.one().cafilename_id
855 except NoResultFound:
856 cf = ContentFilename()
857 cf.filename = filename
859 session.commit_or_flush()
860 ret = cf.cafilename_id
864 __all__.append('get_or_set_contents_file_id')
867 def get_contents(suite, overridetype, section=None, session=None):
869 Returns contents for a suite / overridetype combination, limiting
870 to a section if not None.
873 @param suite: Suite object
875 @type overridetype: OverrideType
876 @param overridetype: OverrideType object
878 @type section: Section
879 @param section: Optional section object to limit results to
881 @type session: SQLAlchemy
882 @param session: Optional SQL session object (a temporary one will be
883 generated if not supplied)
886 @return: ResultsProxy object set up to return tuples of (filename, section,
890 # find me all of the contents for a given suite
891 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
895 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
896 JOIN content_file_names n ON (c.filename=n.id)
897 JOIN binaries b ON (b.id=c.binary_pkg)
898 JOIN override o ON (o.package=b.package)
899 JOIN section s ON (s.id=o.section)
900 WHERE o.suite = :suiteid AND o.type = :overridetypeid
901 AND b.type=:overridetypename"""
903 vals = {'suiteid': suite.suite_id,
904 'overridetypeid': overridetype.overridetype_id,
905 'overridetypename': overridetype.overridetype}
907 if section is not None:
908 contents_q += " AND s.id = :sectionid"
909 vals['sectionid'] = section.section_id
911 contents_q += " ORDER BY fn"
913 return session.execute(contents_q, vals)
915 __all__.append('get_contents')
917 ################################################################################
919 class ContentFilepath(object):
920 def __init__(self, *args, **kwargs):
924 return '<ContentFilepath %s>' % self.filepath
926 __all__.append('ContentFilepath')
929 def get_or_set_contents_path_id(filepath, session=None):
931 Returns database id for given path.
933 If no matching file is found, a row is inserted.
935 @type filepath: string
936 @param filepath: The filepath
938 @type session: SQLAlchemy
939 @param session: Optional SQL session object (a temporary one will be
940 generated if not supplied). If not passed, a commit will be performed at
941 the end of the function, otherwise the caller is responsible for commiting.
944 @return: the database id for the given path
947 q = session.query(ContentFilepath).filter_by(filepath=filepath)
950 ret = q.one().cafilepath_id
951 except NoResultFound:
952 cf = ContentFilepath()
953 cf.filepath = filepath
955 session.commit_or_flush()
956 ret = cf.cafilepath_id
960 __all__.append('get_or_set_contents_path_id')
962 ################################################################################
964 class ContentAssociation(object):
965 def __init__(self, *args, **kwargs):
969 return '<ContentAssociation %s>' % self.ca_id
971 __all__.append('ContentAssociation')
973 def insert_content_paths(binary_id, fullpaths, session=None):
975 Make sure given path is associated with given binary id
978 @param binary_id: the id of the binary
979 @type fullpaths: list
980 @param fullpaths: the list of paths of the file being associated with the binary
981 @type session: SQLAlchemy session
982 @param session: Optional SQLAlchemy session. If this is passed, the caller
983 is responsible for ensuring a transaction has begun and committing the
984 results or rolling back based on the result code. If not passed, a commit
985 will be performed at the end of the function, otherwise the caller is
986 responsible for commiting.
988 @return: True upon success
993 session = DBConn().session()
998 def generate_path_dicts():
999 for fullpath in fullpaths:
1000 if fullpath.startswith( './' ):
1001 fullpath = fullpath[2:]
1003 yield {'filename':fullpath, 'id': binary_id }
1005 for d in generate_path_dicts():
1006 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1015 traceback.print_exc()
1017 # Only rollback if we set up the session ourself
1024 __all__.append('insert_content_paths')
1026 ################################################################################
1028 class DSCFile(object):
1029 def __init__(self, *args, **kwargs):
1033 return '<DSCFile %s>' % self.dscfile_id
1035 __all__.append('DSCFile')
1038 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1040 Returns a list of DSCFiles which may be empty
1042 @type dscfile_id: int (optional)
1043 @param dscfile_id: the dscfile_id of the DSCFiles to find
1045 @type source_id: int (optional)
1046 @param source_id: the source id related to the DSCFiles to find
1048 @type poolfile_id: int (optional)
1049 @param poolfile_id: the poolfile id related to the DSCFiles to find
1052 @return: Possibly empty list of DSCFiles
1055 q = session.query(DSCFile)
1057 if dscfile_id is not None:
1058 q = q.filter_by(dscfile_id=dscfile_id)
1060 if source_id is not None:
1061 q = q.filter_by(source_id=source_id)
1063 if poolfile_id is not None:
1064 q = q.filter_by(poolfile_id=poolfile_id)
1068 __all__.append('get_dscfiles')
1070 ################################################################################
1072 class PoolFile(object):
1073 def __init__(self, filename = None, location = None, filesize = -1, \
1075 self.filename = filename
1076 self.location = location
1077 self.filesize = filesize
1078 self.md5sum = md5sum
1081 return '<PoolFile %s>' % self.filename
1085 return os.path.join(self.location.path, self.filename)
1087 __all__.append('PoolFile')
1090 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1093 (ValidFileFound [boolean or None], PoolFile object or None)
1095 @type filename: string
1096 @param filename: the filename of the file to check against the DB
1099 @param filesize: the size of the file to check against the DB
1101 @type md5sum: string
1102 @param md5sum: the md5sum of the file to check against the DB
1104 @type location_id: int
1105 @param location_id: the id of the location to look in
1108 @return: Tuple of length 2.
1109 - If more than one file found with that name: (C{None}, C{None})
1110 - If valid pool file found: (C{True}, C{PoolFile object})
1111 - If valid pool file not found:
1112 - (C{False}, C{None}) if no file found
1113 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1116 q = session.query(PoolFile).filter_by(filename=filename)
1117 q = q.join(Location).filter_by(location_id=location_id)
1127 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1135 __all__.append('check_poolfile')
1138 def get_poolfile_by_id(file_id, session=None):
1140 Returns a PoolFile objects or None for the given id
1143 @param file_id: the id of the file to look for
1145 @rtype: PoolFile or None
1146 @return: either the PoolFile object or None
1149 q = session.query(PoolFile).filter_by(file_id=file_id)
1153 except NoResultFound:
1156 __all__.append('get_poolfile_by_id')
1160 def get_poolfile_by_name(filename, location_id=None, session=None):
1162 Returns an array of PoolFile objects for the given filename and
1163 (optionally) location_id
1165 @type filename: string
1166 @param filename: the filename of the file to check against the DB
1168 @type location_id: int
1169 @param location_id: the id of the location to look in (optional)
1172 @return: array of PoolFile objects
1175 q = session.query(PoolFile).filter_by(filename=filename)
1177 if location_id is not None:
1178 q = q.join(Location).filter_by(location_id=location_id)
1182 __all__.append('get_poolfile_by_name')
1185 def get_poolfile_like_name(filename, session=None):
1187 Returns an array of PoolFile objects which are like the given name
1189 @type filename: string
1190 @param filename: the filename of the file to check against the DB
1193 @return: array of PoolFile objects
1196 # TODO: There must be a way of properly using bind parameters with %FOO%
1197 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1201 __all__.append('get_poolfile_like_name')
1204 def add_poolfile(filename, datadict, location_id, session=None):
1206 Add a new file to the pool
1208 @type filename: string
1209 @param filename: filename
1211 @type datadict: dict
1212 @param datadict: dict with needed data
1214 @type location_id: int
1215 @param location_id: database id of the location
1218 @return: the PoolFile object created
1220 poolfile = PoolFile()
1221 poolfile.filename = filename
1222 poolfile.filesize = datadict["size"]
1223 poolfile.md5sum = datadict["md5sum"]
1224 poolfile.sha1sum = datadict["sha1sum"]
1225 poolfile.sha256sum = datadict["sha256sum"]
1226 poolfile.location_id = location_id
1228 session.add(poolfile)
1229 # Flush to get a file id (NB: This is not a commit)
1234 __all__.append('add_poolfile')
1236 ################################################################################
1238 class Fingerprint(object):
1239 def __init__(self, fingerprint = None):
1240 self.fingerprint = fingerprint
1243 return '<Fingerprint %s>' % self.fingerprint
1245 __all__.append('Fingerprint')
1248 def get_fingerprint(fpr, session=None):
1250 Returns Fingerprint object for given fpr.
1253 @param fpr: The fpr to find / add
1255 @type session: SQLAlchemy
1256 @param session: Optional SQL session object (a temporary one will be
1257 generated if not supplied).
1260 @return: the Fingerprint object for the given fpr or None
1263 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1267 except NoResultFound:
1272 __all__.append('get_fingerprint')
1275 def get_or_set_fingerprint(fpr, session=None):
1277 Returns Fingerprint object for given fpr.
1279 If no matching fpr is found, a row is inserted.
1282 @param fpr: The fpr to find / add
1284 @type session: SQLAlchemy
1285 @param session: Optional SQL session object (a temporary one will be
1286 generated if not supplied). If not passed, a commit will be performed at
1287 the end of the function, otherwise the caller is responsible for commiting.
1288 A flush will be performed either way.
1291 @return: the Fingerprint object for the given fpr
1294 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1298 except NoResultFound:
1299 fingerprint = Fingerprint()
1300 fingerprint.fingerprint = fpr
1301 session.add(fingerprint)
1302 session.commit_or_flush()
1307 __all__.append('get_or_set_fingerprint')
1309 ################################################################################
1311 # Helper routine for Keyring class
1312 def get_ldap_name(entry):
1314 for k in ["cn", "mn", "sn"]:
1316 if ret and ret[0] != "" and ret[0] != "-":
1318 return " ".join(name)
1320 ################################################################################
1322 class Keyring(object):
1323 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1324 " --with-colons --fingerprint --fingerprint"
1329 def __init__(self, *args, **kwargs):
1333 return '<Keyring %s>' % self.keyring_name
1335 def de_escape_gpg_str(self, txt):
1336 esclist = re.split(r'(\\x..)', txt)
1337 for x in range(1,len(esclist),2):
1338 esclist[x] = "%c" % (int(esclist[x][2:],16))
1339 return "".join(esclist)
1341 def parse_address(self, uid):
1342 """parses uid and returns a tuple of real name and email address"""
1344 (name, address) = email.Utils.parseaddr(uid)
1345 name = re.sub(r"\s*[(].*[)]", "", name)
1346 name = self.de_escape_gpg_str(name)
1349 return (name, address)
1351 def load_keys(self, keyring):
1352 if not self.keyring_id:
1353 raise Exception('Must be initialized with database information')
1355 k = os.popen(self.gpg_invocation % keyring, "r")
1359 for line in k.xreadlines():
1360 field = line.split(":")
1361 if field[0] == "pub":
1364 (name, addr) = self.parse_address(field[9])
1366 self.keys[key]["email"] = addr
1367 self.keys[key]["name"] = name
1368 self.keys[key]["fingerprints"] = []
1370 elif key and field[0] == "sub" and len(field) >= 12:
1371 signingkey = ("s" in field[11])
1372 elif key and field[0] == "uid":
1373 (name, addr) = self.parse_address(field[9])
1374 if "email" not in self.keys[key] and "@" in addr:
1375 self.keys[key]["email"] = addr
1376 self.keys[key]["name"] = name
1377 elif signingkey and field[0] == "fpr":
1378 self.keys[key]["fingerprints"].append(field[9])
1379 self.fpr_lookup[field[9]] = key
1381 def import_users_from_ldap(self, session):
1385 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1386 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1388 l = ldap.open(LDAPServer)
1389 l.simple_bind_s("","")
1390 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1391 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1392 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1394 ldap_fin_uid_id = {}
1401 uid = entry["uid"][0]
1402 name = get_ldap_name(entry)
1403 fingerprints = entry["keyFingerPrint"]
1405 for f in fingerprints:
1406 key = self.fpr_lookup.get(f, None)
1407 if key not in self.keys:
1409 self.keys[key]["uid"] = uid
1413 keyid = get_or_set_uid(uid, session).uid_id
1414 byuid[keyid] = (uid, name)
1415 byname[uid] = (keyid, name)
1417 return (byname, byuid)
1419 def generate_users_from_keyring(self, format, session):
1423 for x in self.keys.keys():
1424 if "email" not in self.keys[x]:
1426 self.keys[x]["uid"] = format % "invalid-uid"
1428 uid = format % self.keys[x]["email"]
1429 keyid = get_or_set_uid(uid, session).uid_id
1430 byuid[keyid] = (uid, self.keys[x]["name"])
1431 byname[uid] = (keyid, self.keys[x]["name"])
1432 self.keys[x]["uid"] = uid
1435 uid = format % "invalid-uid"
1436 keyid = get_or_set_uid(uid, session).uid_id
1437 byuid[keyid] = (uid, "ungeneratable user id")
1438 byname[uid] = (keyid, "ungeneratable user id")
1440 return (byname, byuid)
1442 __all__.append('Keyring')
1445 def get_keyring(keyring, session=None):
1447 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1448 If C{keyring} already has an entry, simply return the existing Keyring
1450 @type keyring: string
1451 @param keyring: the keyring name
1454 @return: the Keyring object for this keyring
1457 q = session.query(Keyring).filter_by(keyring_name=keyring)
1461 except NoResultFound:
1464 __all__.append('get_keyring')
1466 ################################################################################
1468 class KeyringACLMap(object):
1469 def __init__(self, *args, **kwargs):
1473 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1475 __all__.append('KeyringACLMap')
1477 ################################################################################
1479 class DBChange(object):
1480 def __init__(self, *args, **kwargs):
1484 return '<DBChange %s>' % self.changesname
1486 def clean_from_queue(self):
1487 session = DBConn().session().object_session(self)
1489 # Remove changes_pool_files entries
1492 # Remove changes_pending_files references
1495 # Clear out of queue
1496 self.in_queue = None
1497 self.approved_for_id = None
1499 __all__.append('DBChange')
1502 def get_dbchange(filename, session=None):
1504 returns DBChange object for given C{filename}.
1506 @type filename: string
1507 @param filename: the name of the file
1509 @type session: Session
1510 @param session: Optional SQLA session object (a temporary one will be
1511 generated if not supplied)
1514 @return: DBChange object for the given filename (C{None} if not present)
1517 q = session.query(DBChange).filter_by(changesname=filename)
1521 except NoResultFound:
1524 __all__.append('get_dbchange')
1526 ################################################################################
1528 class Location(object):
1529 def __init__(self, path = None):
1531 # the column 'type' should go away, see comment at mapper
1532 self.archive_type = 'pool'
1535 return '<Location %s (%s)>' % (self.path, self.location_id)
1537 __all__.append('Location')
1540 def get_location(location, component=None, archive=None, session=None):
1542 Returns Location object for the given combination of location, component
1545 @type location: string
1546 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1548 @type component: string
1549 @param component: the component name (if None, no restriction applied)
1551 @type archive: string
1552 @param archive: the archive name (if None, no restriction applied)
1554 @rtype: Location / None
1555 @return: Either a Location object or None if one can't be found
1558 q = session.query(Location).filter_by(path=location)
1560 if archive is not None:
1561 q = q.join(Archive).filter_by(archive_name=archive)
1563 if component is not None:
1564 q = q.join(Component).filter_by(component_name=component)
1568 except NoResultFound:
1571 __all__.append('get_location')
1573 ################################################################################
1575 class Maintainer(object):
1576 def __init__(self, name = None):
1580 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1582 def get_split_maintainer(self):
1583 if not hasattr(self, 'name') or self.name is None:
1584 return ('', '', '', '')
1586 return fix_maintainer(self.name.strip())
1588 __all__.append('Maintainer')
1591 def get_or_set_maintainer(name, session=None):
1593 Returns Maintainer object for given maintainer name.
1595 If no matching maintainer name is found, a row is inserted.
1598 @param name: The maintainer name to add
1600 @type session: SQLAlchemy
1601 @param session: Optional SQL session object (a temporary one will be
1602 generated if not supplied). If not passed, a commit will be performed at
1603 the end of the function, otherwise the caller is responsible for commiting.
1604 A flush will be performed either way.
1607 @return: the Maintainer object for the given maintainer
1610 q = session.query(Maintainer).filter_by(name=name)
1613 except NoResultFound:
1614 maintainer = Maintainer()
1615 maintainer.name = name
1616 session.add(maintainer)
1617 session.commit_or_flush()
1622 __all__.append('get_or_set_maintainer')
1625 def get_maintainer(maintainer_id, session=None):
1627 Return the name of the maintainer behind C{maintainer_id} or None if that
1628 maintainer_id is invalid.
1630 @type maintainer_id: int
1631 @param maintainer_id: the id of the maintainer
1634 @return: the Maintainer with this C{maintainer_id}
1637 return session.query(Maintainer).get(maintainer_id)
1639 __all__.append('get_maintainer')
1641 ################################################################################
1643 class NewComment(object):
1644 def __init__(self, *args, **kwargs):
1648 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1650 __all__.append('NewComment')
1653 def has_new_comment(package, version, session=None):
1655 Returns true if the given combination of C{package}, C{version} has a comment.
1657 @type package: string
1658 @param package: name of the package
1660 @type version: string
1661 @param version: package version
1663 @type session: Session
1664 @param session: Optional SQLA session object (a temporary one will be
1665 generated if not supplied)
1671 q = session.query(NewComment)
1672 q = q.filter_by(package=package)
1673 q = q.filter_by(version=version)
1675 return bool(q.count() > 0)
1677 __all__.append('has_new_comment')
1680 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1682 Returns (possibly empty) list of NewComment objects for the given
1685 @type package: string (optional)
1686 @param package: name of the package
1688 @type version: string (optional)
1689 @param version: package version
1691 @type comment_id: int (optional)
1692 @param comment_id: An id of a comment
1694 @type session: Session
1695 @param session: Optional SQLA session object (a temporary one will be
1696 generated if not supplied)
1699 @return: A (possibly empty) list of NewComment objects will be returned
1702 q = session.query(NewComment)
1703 if package is not None: q = q.filter_by(package=package)
1704 if version is not None: q = q.filter_by(version=version)
1705 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1709 __all__.append('get_new_comments')
1711 ################################################################################
1713 class Override(object):
1714 def __init__(self, *args, **kwargs):
1718 return '<Override %s (%s)>' % (self.package, self.suite_id)
1720 __all__.append('Override')
1723 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1725 Returns Override object for the given parameters
1727 @type package: string
1728 @param package: The name of the package
1730 @type suite: string, list or None
1731 @param suite: The name of the suite (or suites if a list) to limit to. If
1732 None, don't limit. Defaults to None.
1734 @type component: string, list or None
1735 @param component: The name of the component (or components if a list) to
1736 limit to. If None, don't limit. Defaults to None.
1738 @type overridetype: string, list or None
1739 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1740 limit to. If None, don't limit. Defaults to None.
1742 @type session: Session
1743 @param session: Optional SQLA session object (a temporary one will be
1744 generated if not supplied)
1747 @return: A (possibly empty) list of Override objects will be returned
1750 q = session.query(Override)
1751 q = q.filter_by(package=package)
1753 if suite is not None:
1754 if not isinstance(suite, list): suite = [suite]
1755 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1757 if component is not None:
1758 if not isinstance(component, list): component = [component]
1759 q = q.join(Component).filter(Component.component_name.in_(component))
1761 if overridetype is not None:
1762 if not isinstance(overridetype, list): overridetype = [overridetype]
1763 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1767 __all__.append('get_override')
1770 ################################################################################
1772 class OverrideType(object):
1773 def __init__(self, *args, **kwargs):
1777 return '<OverrideType %s>' % self.overridetype
1779 __all__.append('OverrideType')
1782 def get_override_type(override_type, session=None):
1784 Returns OverrideType object for given C{override type}.
1786 @type override_type: string
1787 @param override_type: The name of the override type
1789 @type session: Session
1790 @param session: Optional SQLA session object (a temporary one will be
1791 generated if not supplied)
1794 @return: the database id for the given override type
1797 q = session.query(OverrideType).filter_by(overridetype=override_type)
1801 except NoResultFound:
1804 __all__.append('get_override_type')
1806 ################################################################################
1808 class DebContents(object):
1809 def __init__(self, *args, **kwargs):
1813 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1815 __all__.append('DebContents')
1818 class UdebContents(object):
1819 def __init__(self, *args, **kwargs):
1823 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1825 __all__.append('UdebContents')
1827 class PendingBinContents(object):
1828 def __init__(self, *args, **kwargs):
1832 return '<PendingBinContents %s>' % self.contents_id
1834 __all__.append('PendingBinContents')
1836 def insert_pending_content_paths(package,
1841 Make sure given paths are temporarily associated with given
1845 @param package: the package to associate with should have been read in from the binary control file
1846 @type fullpaths: list
1847 @param fullpaths: the list of paths of the file being associated with the binary
1848 @type session: SQLAlchemy session
1849 @param session: Optional SQLAlchemy session. If this is passed, the caller
1850 is responsible for ensuring a transaction has begun and committing the
1851 results or rolling back based on the result code. If not passed, a commit
1852 will be performed at the end of the function
1854 @return: True upon success, False if there is a problem
1857 privatetrans = False
1860 session = DBConn().session()
1864 arch = get_architecture(package['Architecture'], session)
1865 arch_id = arch.arch_id
1867 # Remove any already existing recorded files for this package
1868 q = session.query(PendingBinContents)
1869 q = q.filter_by(package=package['Package'])
1870 q = q.filter_by(version=package['Version'])
1871 q = q.filter_by(architecture=arch_id)
1874 for fullpath in fullpaths:
1876 if fullpath.startswith( "./" ):
1877 fullpath = fullpath[2:]
1879 pca = PendingBinContents()
1880 pca.package = package['Package']
1881 pca.version = package['Version']
1883 pca.architecture = arch_id
1886 pca.type = 8 # gross
1888 pca.type = 7 # also gross
1891 # Only commit if we set up the session ourself
1899 except Exception, e:
1900 traceback.print_exc()
1902 # Only rollback if we set up the session ourself
1909 __all__.append('insert_pending_content_paths')
1911 ################################################################################
1913 class PolicyQueue(object):
1914 def __init__(self, *args, **kwargs):
1918 return '<PolicyQueue %s>' % self.queue_name
1920 __all__.append('PolicyQueue')
1923 def get_policy_queue(queuename, session=None):
1925 Returns PolicyQueue object for given C{queue name}
1927 @type queuename: string
1928 @param queuename: The name of the queue
1930 @type session: Session
1931 @param session: Optional SQLA session object (a temporary one will be
1932 generated if not supplied)
1935 @return: PolicyQueue object for the given queue
1938 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1942 except NoResultFound:
1945 __all__.append('get_policy_queue')
1948 def get_policy_queue_from_path(pathname, session=None):
1950 Returns PolicyQueue object for given C{path name}
1952 @type queuename: string
1953 @param queuename: The path
1955 @type session: Session
1956 @param session: Optional SQLA session object (a temporary one will be
1957 generated if not supplied)
1960 @return: PolicyQueue object for the given queue
1963 q = session.query(PolicyQueue).filter_by(path=pathname)
1967 except NoResultFound:
1970 __all__.append('get_policy_queue_from_path')
1972 ################################################################################
1974 class Priority(object):
1975 def __init__(self, *args, **kwargs):
1978 def __eq__(self, val):
1979 if isinstance(val, str):
1980 return (self.priority == val)
1981 # This signals to use the normal comparison operator
1982 return NotImplemented
1984 def __ne__(self, val):
1985 if isinstance(val, str):
1986 return (self.priority != val)
1987 # This signals to use the normal comparison operator
1988 return NotImplemented
1991 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1993 __all__.append('Priority')
1996 def get_priority(priority, session=None):
1998 Returns Priority object for given C{priority name}.
2000 @type priority: string
2001 @param priority: The name of the priority
2003 @type session: Session
2004 @param session: Optional SQLA session object (a temporary one will be
2005 generated if not supplied)
2008 @return: Priority object for the given priority
2011 q = session.query(Priority).filter_by(priority=priority)
2015 except NoResultFound:
2018 __all__.append('get_priority')
2021 def get_priorities(session=None):
2023 Returns dictionary of priority names -> id mappings
2025 @type session: Session
2026 @param session: Optional SQL session object (a temporary one will be
2027 generated if not supplied)
2030 @return: dictionary of priority names -> id mappings
2034 q = session.query(Priority)
2036 ret[x.priority] = x.priority_id
2040 __all__.append('get_priorities')
2042 ################################################################################
2044 class Section(object):
2045 def __init__(self, *args, **kwargs):
2048 def __eq__(self, val):
2049 if isinstance(val, str):
2050 return (self.section == val)
2051 # This signals to use the normal comparison operator
2052 return NotImplemented
2054 def __ne__(self, val):
2055 if isinstance(val, str):
2056 return (self.section != val)
2057 # This signals to use the normal comparison operator
2058 return NotImplemented
2061 return '<Section %s>' % self.section
2063 __all__.append('Section')
2066 def get_section(section, session=None):
2068 Returns Section object for given C{section name}.
2070 @type section: string
2071 @param section: The name of the section
2073 @type session: Session
2074 @param session: Optional SQLA session object (a temporary one will be
2075 generated if not supplied)
2078 @return: Section object for the given section name
2081 q = session.query(Section).filter_by(section=section)
2085 except NoResultFound:
2088 __all__.append('get_section')
2091 def get_sections(session=None):
2093 Returns dictionary of section names -> id mappings
2095 @type session: Session
2096 @param session: Optional SQL session object (a temporary one will be
2097 generated if not supplied)
2100 @return: dictionary of section names -> id mappings
2104 q = session.query(Section)
2106 ret[x.section] = x.section_id
2110 __all__.append('get_sections')
2112 ################################################################################
2114 class DBSource(object):
2115 def __init__(self, maintainer = None, changedby = None):
2116 self.maintainer = maintainer
2117 self.changedby = changedby
2120 return '<DBSource %s (%s)>' % (self.source, self.version)
2122 __all__.append('DBSource')
2125 def source_exists(source, source_version, suites = ["any"], session=None):
2127 Ensure that source exists somewhere in the archive for the binary
2128 upload being processed.
2129 1. exact match => 1.0-3
2130 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2132 @type source: string
2133 @param source: source name
2135 @type source_version: string
2136 @param source_version: expected source version
2139 @param suites: list of suites to check in, default I{any}
2141 @type session: Session
2142 @param session: Optional SQLA session object (a temporary one will be
2143 generated if not supplied)
2146 @return: returns 1 if a source with expected version is found, otherwise 0
2153 for suite in suites:
2154 q = session.query(DBSource).filter_by(source=source)
2156 # source must exist in suite X, or in some other suite that's
2157 # mapped to X, recursively... silent-maps are counted too,
2158 # unreleased-maps aren't.
2159 maps = cnf.ValueList("SuiteMappings")[:]
2161 maps = [ m.split() for m in maps ]
2162 maps = [ (x[1], x[2]) for x in maps
2163 if x[0] == "map" or x[0] == "silent-map" ]
2166 if x[1] in s and x[0] not in s:
2169 q = q.join(SrcAssociation).join(Suite)
2170 q = q.filter(Suite.suite_name.in_(s))
2172 # Reduce the query results to a list of version numbers
2173 ql = [ j.version for j in q.all() ]
2176 if source_version in ql:
2180 from daklib.regexes import re_bin_only_nmu
2181 orig_source_version = re_bin_only_nmu.sub('', source_version)
2182 if orig_source_version in ql:
2185 # No source found so return not ok
2190 __all__.append('source_exists')
2193 def get_suites_source_in(source, session=None):
2195 Returns list of Suite objects which given C{source} name is in
2198 @param source: DBSource package name to search for
2201 @return: list of Suite objects for the given source
2204 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2206 __all__.append('get_suites_source_in')
2209 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2211 Returns list of DBSource objects for given C{source} name and other parameters
2214 @param source: DBSource package name to search for
2216 @type version: str or None
2217 @param version: DBSource version name to search for or None if not applicable
2219 @type dm_upload_allowed: bool
2220 @param dm_upload_allowed: If None, no effect. If True or False, only
2221 return packages with that dm_upload_allowed setting
2223 @type session: Session
2224 @param session: Optional SQL session object (a temporary one will be
2225 generated if not supplied)
2228 @return: list of DBSource objects for the given name (may be empty)
2231 q = session.query(DBSource).filter_by(source=source)
2233 if version is not None:
2234 q = q.filter_by(version=version)
2236 if dm_upload_allowed is not None:
2237 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2241 __all__.append('get_sources_from_name')
2244 def get_source_in_suite(source, suite, session=None):
2246 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2248 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2249 - B{suite} - a suite name, eg. I{unstable}
2251 @type source: string
2252 @param source: source package name
2255 @param suite: the suite name
2258 @return: the version for I{source} in I{suite}
2262 q = session.query(SrcAssociation)
2263 q = q.join('source').filter_by(source=source)
2264 q = q.join('suite').filter_by(suite_name=suite)
2267 return q.one().source
2268 except NoResultFound:
2271 __all__.append('get_source_in_suite')
2273 ################################################################################
2276 def add_dsc_to_db(u, filename, session=None):
2277 entry = u.pkg.files[filename]
2281 source.source = u.pkg.dsc["source"]
2282 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2283 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2284 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2285 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2286 source.install_date = datetime.now().date()
2288 dsc_component = entry["component"]
2289 dsc_location_id = entry["location id"]
2291 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2293 # Set up a new poolfile if necessary
2294 if not entry.has_key("files id") or not entry["files id"]:
2295 filename = entry["pool name"] + filename
2296 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2298 pfs.append(poolfile)
2299 entry["files id"] = poolfile.file_id
2301 source.poolfile_id = entry["files id"]
2305 for suite_name in u.pkg.changes["distribution"].keys():
2306 sa = SrcAssociation()
2307 sa.source_id = source.source_id
2308 sa.suite_id = get_suite(suite_name).suite_id
2313 # Add the source files to the DB (files and dsc_files)
2315 dscfile.source_id = source.source_id
2316 dscfile.poolfile_id = entry["files id"]
2317 session.add(dscfile)
2319 for dsc_file, dentry in u.pkg.dsc_files.items():
2321 df.source_id = source.source_id
2323 # If the .orig tarball is already in the pool, it's
2324 # files id is stored in dsc_files by check_dsc().
2325 files_id = dentry.get("files id", None)
2327 # Find the entry in the files hash
2328 # TODO: Bail out here properly
2330 for f, e in u.pkg.files.items():
2335 if files_id is None:
2336 filename = dfentry["pool name"] + dsc_file
2338 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2339 # FIXME: needs to check for -1/-2 and or handle exception
2340 if found and obj is not None:
2341 files_id = obj.file_id
2344 # If still not found, add it
2345 if files_id is None:
2346 # HACK: Force sha1sum etc into dentry
2347 dentry["sha1sum"] = dfentry["sha1sum"]
2348 dentry["sha256sum"] = dfentry["sha256sum"]
2349 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2350 pfs.append(poolfile)
2351 files_id = poolfile.file_id
2353 poolfile = get_poolfile_by_id(files_id, session)
2354 if poolfile is None:
2355 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2356 pfs.append(poolfile)
2358 df.poolfile_id = files_id
2363 # Add the src_uploaders to the DB
2364 uploader_ids = [source.maintainer_id]
2365 if u.pkg.dsc.has_key("uploaders"):
2366 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2368 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2371 for up_id in uploader_ids:
2372 if added_ids.has_key(up_id):
2374 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2380 su.maintainer_id = up_id
2381 su.source_id = source.source_id
2386 return source, dsc_component, dsc_location_id, pfs
2388 __all__.append('add_dsc_to_db')
2391 def add_deb_to_db(u, filename, session=None):
2393 Contrary to what you might expect, this routine deals with both
2394 debs and udebs. That info is in 'dbtype', whilst 'type' is
2395 'deb' for both of them
2398 entry = u.pkg.files[filename]
2401 bin.package = entry["package"]
2402 bin.version = entry["version"]
2403 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2404 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2405 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2406 bin.binarytype = entry["dbtype"]
2409 filename = entry["pool name"] + filename
2410 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2411 if not entry.get("location id", None):
2412 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2414 if entry.get("files id", None):
2415 poolfile = get_poolfile_by_id(bin.poolfile_id)
2416 bin.poolfile_id = entry["files id"]
2418 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2419 bin.poolfile_id = entry["files id"] = poolfile.file_id
2422 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2423 if len(bin_sources) != 1:
2424 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2425 (bin.package, bin.version, entry["architecture"],
2426 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2428 bin.source_id = bin_sources[0].source_id
2430 # Add and flush object so it has an ID
2434 # Add BinAssociations
2435 for suite_name in u.pkg.changes["distribution"].keys():
2436 ba = BinAssociation()
2437 ba.binary_id = bin.binary_id
2438 ba.suite_id = get_suite(suite_name).suite_id
2443 # Deal with contents - disabled for now
2444 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2446 # print "REJECT\nCould not determine contents of package %s" % bin.package
2447 # session.rollback()
2448 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2452 __all__.append('add_deb_to_db')
2454 ################################################################################
2456 class SourceACL(object):
2457 def __init__(self, *args, **kwargs):
2461 return '<SourceACL %s>' % self.source_acl_id
2463 __all__.append('SourceACL')
2465 ################################################################################
2467 class SrcAssociation(object):
2468 def __init__(self, *args, **kwargs):
2472 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2474 __all__.append('SrcAssociation')
2476 ################################################################################
2478 class SrcFormat(object):
2479 def __init__(self, *args, **kwargs):
2483 return '<SrcFormat %s>' % (self.format_name)
2485 __all__.append('SrcFormat')
2487 ################################################################################
2489 class SrcUploader(object):
2490 def __init__(self, *args, **kwargs):
2494 return '<SrcUploader %s>' % self.uploader_id
2496 __all__.append('SrcUploader')
2498 ################################################################################
2500 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2501 ('SuiteID', 'suite_id'),
2502 ('Version', 'version'),
2503 ('Origin', 'origin'),
2505 ('Description', 'description'),
2506 ('Untouchable', 'untouchable'),
2507 ('Announce', 'announce'),
2508 ('Codename', 'codename'),
2509 ('OverrideCodename', 'overridecodename'),
2510 ('ValidTime', 'validtime'),
2511 ('Priority', 'priority'),
2512 ('NotAutomatic', 'notautomatic'),
2513 ('CopyChanges', 'copychanges'),
2514 ('OverrideSuite', 'overridesuite')]
2516 class Suite(object):
2517 def __init__(self, suite_name = None, version = None):
2518 self.suite_name = suite_name
2519 self.version = version
2522 return '<Suite %s>' % self.suite_name
2524 def __eq__(self, val):
2525 if isinstance(val, str):
2526 return (self.suite_name == val)
2527 # This signals to use the normal comparison operator
2528 return NotImplemented
2530 def __ne__(self, val):
2531 if isinstance(val, str):
2532 return (self.suite_name != val)
2533 # This signals to use the normal comparison operator
2534 return NotImplemented
2538 for disp, field in SUITE_FIELDS:
2539 val = getattr(self, field, None)
2541 ret.append("%s: %s" % (disp, val))
2543 return "\n".join(ret)
2545 def get_architectures(self, skipsrc=False, skipall=False):
2547 Returns list of Architecture objects
2549 @type skipsrc: boolean
2550 @param skipsrc: Whether to skip returning the 'source' architecture entry
2553 @type skipall: boolean
2554 @param skipall: Whether to skip returning the 'all' architecture entry
2558 @return: list of Architecture objects for the given name (may be empty)
2561 q = object_session(self).query(Architecture). \
2562 filter(Architecture.suites.contains(self))
2564 q = q.filter(Architecture.arch_string != 'source')
2566 q = q.filter(Architecture.arch_string != 'all')
2567 return q.order_by(Architecture.arch_string).all()
2569 __all__.append('Suite')
2572 def get_suite(suite, session=None):
2574 Returns Suite object for given C{suite name}.
2577 @param suite: The name of the suite
2579 @type session: Session
2580 @param session: Optional SQLA session object (a temporary one will be
2581 generated if not supplied)
2584 @return: Suite object for the requested suite name (None if not present)
2587 q = session.query(Suite).filter_by(suite_name=suite)
2591 except NoResultFound:
2594 __all__.append('get_suite')
2596 ################################################################################
2598 # TODO: should be removed because the implementation is too trivial
2600 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2602 Returns list of Architecture objects for given C{suite} name
2605 @param suite: Suite name to search for
2607 @type skipsrc: boolean
2608 @param skipsrc: Whether to skip returning the 'source' architecture entry
2611 @type skipall: boolean
2612 @param skipall: Whether to skip returning the 'all' architecture entry
2615 @type session: Session
2616 @param session: Optional SQL session object (a temporary one will be
2617 generated if not supplied)
2620 @return: list of Architecture objects for the given name (may be empty)
2623 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2625 __all__.append('get_suite_architectures')
2627 ################################################################################
2629 class SuiteSrcFormat(object):
2630 def __init__(self, *args, **kwargs):
2634 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2636 __all__.append('SuiteSrcFormat')
2639 def get_suite_src_formats(suite, session=None):
2641 Returns list of allowed SrcFormat for C{suite}.
2644 @param suite: Suite name to search for
2646 @type session: Session
2647 @param session: Optional SQL session object (a temporary one will be
2648 generated if not supplied)
2651 @return: the list of allowed source formats for I{suite}
2654 q = session.query(SrcFormat)
2655 q = q.join(SuiteSrcFormat)
2656 q = q.join(Suite).filter_by(suite_name=suite)
2657 q = q.order_by('format_name')
2661 __all__.append('get_suite_src_formats')
2663 ################################################################################
2666 def __init__(self, uid = None, name = None):
2670 def __eq__(self, val):
2671 if isinstance(val, str):
2672 return (self.uid == val)
2673 # This signals to use the normal comparison operator
2674 return NotImplemented
2676 def __ne__(self, val):
2677 if isinstance(val, str):
2678 return (self.uid != val)
2679 # This signals to use the normal comparison operator
2680 return NotImplemented
2683 return '<Uid %s (%s)>' % (self.uid, self.name)
2685 __all__.append('Uid')
2688 def get_or_set_uid(uidname, session=None):
2690 Returns uid object for given uidname.
2692 If no matching uidname is found, a row is inserted.
2694 @type uidname: string
2695 @param uidname: The uid to add
2697 @type session: SQLAlchemy
2698 @param session: Optional SQL session object (a temporary one will be
2699 generated if not supplied). If not passed, a commit will be performed at
2700 the end of the function, otherwise the caller is responsible for commiting.
2703 @return: the uid object for the given uidname
2706 q = session.query(Uid).filter_by(uid=uidname)
2710 except NoResultFound:
2714 session.commit_or_flush()
2719 __all__.append('get_or_set_uid')
2722 def get_uid_from_fingerprint(fpr, session=None):
2723 q = session.query(Uid)
2724 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2728 except NoResultFound:
2731 __all__.append('get_uid_from_fingerprint')
2733 ################################################################################
2735 class UploadBlock(object):
2736 def __init__(self, *args, **kwargs):
2740 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2742 __all__.append('UploadBlock')
2744 ################################################################################
2746 class DBConn(object):
2748 database module init.
2752 def __init__(self, *args, **kwargs):
2753 self.__dict__ = self.__shared_state
2755 if not getattr(self, 'initialised', False):
2756 self.initialised = True
2757 self.debug = kwargs.has_key('debug')
2760 def __setuptables(self):
2761 tables_with_primary = (
2772 'changes_pending_binaries',
2773 'changes_pending_files',
2774 'changes_pending_source',
2784 'pending_bin_contents',
2796 # The following tables have primary keys but sqlalchemy
2797 # version 0.5 fails to reflect them correctly with database
2798 # versions before upgrade #41.
2800 #'build_queue_files',
2803 tables_no_primary = (
2805 'changes_pending_files_map',
2806 'changes_pending_source_files',
2807 'changes_pool_files',
2810 'suite_architectures',
2811 'suite_src_formats',
2812 'suite_build_queue_copy',
2814 # see the comment above
2816 'build_queue_files',
2820 'almost_obsolete_all_associations',
2821 'almost_obsolete_src_associations',
2822 'any_associations_source',
2823 'bin_assoc_by_arch',
2824 'bin_associations_binaries',
2825 'binaries_suite_arch',
2826 'binfiles_suite_component_arch',
2829 'newest_all_associations',
2830 'newest_any_associations',
2832 'newest_src_association',
2833 'obsolete_all_associations',
2834 'obsolete_any_associations',
2835 'obsolete_any_by_all_associations',
2836 'obsolete_src_associations',
2838 'src_associations_bin',
2839 'src_associations_src',
2840 'suite_arch_by_name',
2843 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2844 # correctly and that is why we have to use a workaround. It can
2845 # be removed as soon as we switch to version 0.6.
2846 for table_name in tables_with_primary:
2847 table = Table(table_name, self.db_meta, \
2848 Column('id', Integer, primary_key = True), \
2849 autoload=True, useexisting=True)
2850 setattr(self, 'tbl_%s' % table_name, table)
2852 for table_name in tables_no_primary:
2853 table = Table(table_name, self.db_meta, autoload=True)
2854 setattr(self, 'tbl_%s' % table_name, table)
2856 for view_name in views:
2857 view = Table(view_name, self.db_meta, autoload=True)
2858 setattr(self, 'view_%s' % view_name, view)
2860 def __setupmappers(self):
2861 mapper(Architecture, self.tbl_architecture,
2862 properties = dict(arch_id = self.tbl_architecture.c.id,
2863 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2864 order_by='suite_name',
2865 backref=backref('architectures', order_by='arch_string'))))
2867 mapper(Archive, self.tbl_archive,
2868 properties = dict(archive_id = self.tbl_archive.c.id,
2869 archive_name = self.tbl_archive.c.name))
2871 mapper(BinAssociation, self.tbl_bin_associations,
2872 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2873 suite_id = self.tbl_bin_associations.c.suite,
2874 suite = relation(Suite),
2875 binary_id = self.tbl_bin_associations.c.bin,
2876 binary = relation(DBBinary)))
2878 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2879 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2880 filename = self.tbl_pending_bin_contents.c.filename,
2881 package = self.tbl_pending_bin_contents.c.package,
2882 version = self.tbl_pending_bin_contents.c.version,
2883 arch = self.tbl_pending_bin_contents.c.arch,
2884 otype = self.tbl_pending_bin_contents.c.type))
2886 mapper(DebContents, self.tbl_deb_contents,
2887 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2888 package=self.tbl_deb_contents.c.package,
2889 suite=self.tbl_deb_contents.c.suite,
2890 arch=self.tbl_deb_contents.c.arch,
2891 section=self.tbl_deb_contents.c.section,
2892 filename=self.tbl_deb_contents.c.filename))
2894 mapper(UdebContents, self.tbl_udeb_contents,
2895 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2896 package=self.tbl_udeb_contents.c.package,
2897 suite=self.tbl_udeb_contents.c.suite,
2898 arch=self.tbl_udeb_contents.c.arch,
2899 section=self.tbl_udeb_contents.c.section,
2900 filename=self.tbl_udeb_contents.c.filename))
2902 mapper(BuildQueue, self.tbl_build_queue,
2903 properties = dict(queue_id = self.tbl_build_queue.c.id))
2905 mapper(BuildQueueFile, self.tbl_build_queue_files,
2906 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2907 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2909 mapper(DBBinary, self.tbl_binaries,
2910 properties = dict(binary_id = self.tbl_binaries.c.id,
2911 package = self.tbl_binaries.c.package,
2912 version = self.tbl_binaries.c.version,
2913 maintainer_id = self.tbl_binaries.c.maintainer,
2914 maintainer = relation(Maintainer),
2915 source_id = self.tbl_binaries.c.source,
2916 source = relation(DBSource),
2917 arch_id = self.tbl_binaries.c.architecture,
2918 architecture = relation(Architecture),
2919 poolfile_id = self.tbl_binaries.c.file,
2920 poolfile = relation(PoolFile),
2921 binarytype = self.tbl_binaries.c.type,
2922 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2923 fingerprint = relation(Fingerprint),
2924 install_date = self.tbl_binaries.c.install_date,
2925 binassociations = relation(BinAssociation,
2926 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2928 mapper(BinaryACL, self.tbl_binary_acl,
2929 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2931 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2932 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2933 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2934 architecture = relation(Architecture)))
2936 mapper(Component, self.tbl_component,
2937 properties = dict(component_id = self.tbl_component.c.id,
2938 component_name = self.tbl_component.c.name))
2940 mapper(DBConfig, self.tbl_config,
2941 properties = dict(config_id = self.tbl_config.c.id))
2943 mapper(DSCFile, self.tbl_dsc_files,
2944 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2945 source_id = self.tbl_dsc_files.c.source,
2946 source = relation(DBSource),
2947 poolfile_id = self.tbl_dsc_files.c.file,
2948 poolfile = relation(PoolFile)))
2950 mapper(PoolFile, self.tbl_files,
2951 properties = dict(file_id = self.tbl_files.c.id,
2952 filesize = self.tbl_files.c.size,
2953 location_id = self.tbl_files.c.location,
2954 location = relation(Location,
2955 # using lazy='dynamic' in the back
2956 # reference because we have A LOT of
2957 # files in one location
2958 backref=backref('files', lazy='dynamic'))))
2960 mapper(Fingerprint, self.tbl_fingerprint,
2961 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2962 uid_id = self.tbl_fingerprint.c.uid,
2963 uid = relation(Uid),
2964 keyring_id = self.tbl_fingerprint.c.keyring,
2965 keyring = relation(Keyring),
2966 source_acl = relation(SourceACL),
2967 binary_acl = relation(BinaryACL)))
2969 mapper(Keyring, self.tbl_keyrings,
2970 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2971 keyring_id = self.tbl_keyrings.c.id))
2973 mapper(DBChange, self.tbl_changes,
2974 properties = dict(change_id = self.tbl_changes.c.id,
2975 poolfiles = relation(PoolFile,
2976 secondary=self.tbl_changes_pool_files,
2977 backref="changeslinks"),
2978 seen = self.tbl_changes.c.seen,
2979 source = self.tbl_changes.c.source,
2980 binaries = self.tbl_changes.c.binaries,
2981 architecture = self.tbl_changes.c.architecture,
2982 distribution = self.tbl_changes.c.distribution,
2983 urgency = self.tbl_changes.c.urgency,
2984 maintainer = self.tbl_changes.c.maintainer,
2985 changedby = self.tbl_changes.c.changedby,
2986 date = self.tbl_changes.c.date,
2987 version = self.tbl_changes.c.version,
2988 files = relation(ChangePendingFile,
2989 secondary=self.tbl_changes_pending_files_map,
2990 backref="changesfile"),
2991 in_queue_id = self.tbl_changes.c.in_queue,
2992 in_queue = relation(PolicyQueue,
2993 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2994 approved_for_id = self.tbl_changes.c.approved_for))
2996 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2997 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2999 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3000 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3001 filename = self.tbl_changes_pending_files.c.filename,
3002 size = self.tbl_changes_pending_files.c.size,
3003 md5sum = self.tbl_changes_pending_files.c.md5sum,
3004 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3005 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3007 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3008 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3009 change = relation(DBChange),
3010 maintainer = relation(Maintainer,
3011 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3012 changedby = relation(Maintainer,
3013 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3014 fingerprint = relation(Fingerprint),
3015 source_files = relation(ChangePendingFile,
3016 secondary=self.tbl_changes_pending_source_files,
3017 backref="pending_sources")))
3020 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3021 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3022 keyring = relation(Keyring, backref="keyring_acl_map"),
3023 architecture = relation(Architecture)))
3025 mapper(Location, self.tbl_location,
3026 properties = dict(location_id = self.tbl_location.c.id,
3027 component_id = self.tbl_location.c.component,
3028 component = relation(Component),
3029 archive_id = self.tbl_location.c.archive,
3030 archive = relation(Archive),
3031 # FIXME: the 'type' column is old cruft and
3032 # should be removed in the future.
3033 archive_type = self.tbl_location.c.type))
3035 mapper(Maintainer, self.tbl_maintainer,
3036 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3037 maintains_sources = relation(DBSource, backref='maintainer',
3038 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3039 changed_sources = relation(DBSource, backref='changedby',
3040 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3042 mapper(NewComment, self.tbl_new_comments,
3043 properties = dict(comment_id = self.tbl_new_comments.c.id))
3045 mapper(Override, self.tbl_override,
3046 properties = dict(suite_id = self.tbl_override.c.suite,
3047 suite = relation(Suite),
3048 package = self.tbl_override.c.package,
3049 component_id = self.tbl_override.c.component,
3050 component = relation(Component),
3051 priority_id = self.tbl_override.c.priority,
3052 priority = relation(Priority),
3053 section_id = self.tbl_override.c.section,
3054 section = relation(Section),
3055 overridetype_id = self.tbl_override.c.type,
3056 overridetype = relation(OverrideType)))
3058 mapper(OverrideType, self.tbl_override_type,
3059 properties = dict(overridetype = self.tbl_override_type.c.type,
3060 overridetype_id = self.tbl_override_type.c.id))
3062 mapper(PolicyQueue, self.tbl_policy_queue,
3063 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3065 mapper(Priority, self.tbl_priority,
3066 properties = dict(priority_id = self.tbl_priority.c.id))
3068 mapper(Section, self.tbl_section,
3069 properties = dict(section_id = self.tbl_section.c.id,
3070 section=self.tbl_section.c.section))
3072 mapper(DBSource, self.tbl_source,
3073 properties = dict(source_id = self.tbl_source.c.id,
3074 version = self.tbl_source.c.version,
3075 maintainer_id = self.tbl_source.c.maintainer,
3076 poolfile_id = self.tbl_source.c.file,
3077 poolfile = relation(PoolFile),
3078 fingerprint_id = self.tbl_source.c.sig_fpr,
3079 fingerprint = relation(Fingerprint),
3080 changedby_id = self.tbl_source.c.changedby,
3081 srcfiles = relation(DSCFile,
3082 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3083 suites = relation(Suite, secondary=self.tbl_src_associations,
3085 srcuploaders = relation(SrcUploader)))
3087 mapper(SourceACL, self.tbl_source_acl,
3088 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3090 mapper(SrcAssociation, self.tbl_src_associations,
3091 properties = dict(sa_id = self.tbl_src_associations.c.id,
3092 suite_id = self.tbl_src_associations.c.suite,
3093 suite = relation(Suite),
3094 source_id = self.tbl_src_associations.c.source,
3095 source = relation(DBSource)))
3097 mapper(SrcFormat, self.tbl_src_format,
3098 properties = dict(src_format_id = self.tbl_src_format.c.id,
3099 format_name = self.tbl_src_format.c.format_name))
3101 mapper(SrcUploader, self.tbl_src_uploaders,
3102 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3103 source_id = self.tbl_src_uploaders.c.source,
3104 source = relation(DBSource,
3105 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3106 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3107 maintainer = relation(Maintainer,
3108 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3110 mapper(Suite, self.tbl_suite,
3111 properties = dict(suite_id = self.tbl_suite.c.id,
3112 policy_queue = relation(PolicyQueue),
3113 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3115 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3116 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3117 suite = relation(Suite, backref='suitesrcformats'),
3118 src_format_id = self.tbl_suite_src_formats.c.src_format,
3119 src_format = relation(SrcFormat)))
3121 mapper(Uid, self.tbl_uid,
3122 properties = dict(uid_id = self.tbl_uid.c.id,
3123 fingerprint = relation(Fingerprint)))
3125 mapper(UploadBlock, self.tbl_upload_blocks,
3126 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3127 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3128 uid = relation(Uid, backref="uploadblocks")))
3130 ## Connection functions
3131 def __createconn(self):
3132 from config import Config
3136 connstr = "postgres://%s" % cnf["DB::Host"]
3137 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3138 connstr += ":%s" % cnf["DB::Port"]
3139 connstr += "/%s" % cnf["DB::Name"]
3142 connstr = "postgres:///%s" % cnf["DB::Name"]
3143 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3144 connstr += "?port=%s" % cnf["DB::Port"]
3146 self.db_pg = create_engine(connstr, echo=self.debug)
3147 self.db_meta = MetaData()
3148 self.db_meta.bind = self.db_pg
3149 self.db_smaker = sessionmaker(bind=self.db_pg,
3153 self.__setuptables()
3154 self.__setupmappers()
3157 return self.db_smaker()
3159 __all__.append('DBConn')