5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, backref
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 # suppress some deprecation warnings in squeeze related to sqlalchemy
64 warnings.filterwarnings('ignore', \
65 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
67 # TODO: sqlalchemy needs some extra configuration to correctly reflect
68 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
69 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
72 ################################################################################
74 # Patch in support for the debversion field type so that it works during
78 # that is for sqlalchemy 0.6
79 UserDefinedType = sqltypes.UserDefinedType
81 # this one for sqlalchemy 0.5
82 UserDefinedType = sqltypes.TypeEngine
84 class DebVersion(UserDefinedType):
85 def get_col_spec(self):
88 def bind_processor(self, dialect):
91 # ' = None' is needed for sqlalchemy 0.5:
92 def result_processor(self, dialect, coltype = None):
95 sa_major_version = sqlalchemy.__version__[0:3]
96 if sa_major_version in ["0.5", "0.6"]:
97 from sqlalchemy.databases import postgres
98 postgres.ischema_names['debversion'] = DebVersion
100 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
102 ################################################################################
104 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
106 ################################################################################
108 def session_wrapper(fn):
110 Wrapper around common ".., session=None):" handling. If the wrapped
111 function is called without passing 'session', we create a local one
112 and destroy it when the function ends.
114 Also attaches a commit_or_flush method to the session; if we created a
115 local session, this is a synonym for session.commit(), otherwise it is a
116 synonym for session.flush().
119 def wrapped(*args, **kwargs):
120 private_transaction = False
122 # Find the session object
123 session = kwargs.get('session')
126 if len(args) <= len(getargspec(fn)[0]) - 1:
127 # No session specified as last argument or in kwargs
128 private_transaction = True
129 session = kwargs['session'] = DBConn().session()
131 # Session is last argument in args
135 session = args[-1] = DBConn().session()
136 private_transaction = True
138 if private_transaction:
139 session.commit_or_flush = session.commit
141 session.commit_or_flush = session.flush
144 return fn(*args, **kwargs)
146 if private_transaction:
147 # We created a session; close it.
150 wrapped.__doc__ = fn.__doc__
151 wrapped.func_name = fn.func_name
155 __all__.append('session_wrapper')
157 ################################################################################
159 class Architecture(object):
160 def __init__(self, arch_string = None, description = None):
161 self.arch_string = arch_string
162 self.description = description
164 def __eq__(self, val):
165 if isinstance(val, str):
166 return (self.arch_string== val)
167 # This signals to use the normal comparison operator
168 return NotImplemented
170 def __ne__(self, val):
171 if isinstance(val, str):
172 return (self.arch_string != val)
173 # This signals to use the normal comparison operator
174 return NotImplemented
177 return '<Architecture %s>' % self.arch_string
179 __all__.append('Architecture')
182 def get_architecture(architecture, session=None):
184 Returns database id for given C{architecture}.
186 @type architecture: string
187 @param architecture: The name of the architecture
189 @type session: Session
190 @param session: Optional SQLA session object (a temporary one will be
191 generated if not supplied)
194 @return: Architecture object for the given arch (None if not present)
197 q = session.query(Architecture).filter_by(arch_string=architecture)
201 except NoResultFound:
204 __all__.append('get_architecture')
206 # TODO: should be removed because the implementation is too trivial
208 def get_architecture_suites(architecture, session=None):
210 Returns list of Suite objects for given C{architecture} name
212 @type architecture: str
213 @param architecture: Architecture name to search for
215 @type session: Session
216 @param session: Optional SQL session object (a temporary one will be
217 generated if not supplied)
220 @return: list of Suite objects for the given name (may be empty)
223 return get_architecture(architecture, session).suites
225 __all__.append('get_architecture_suites')
227 ################################################################################
229 class Archive(object):
230 def __init__(self, *args, **kwargs):
234 return '<Archive %s>' % self.archive_name
236 __all__.append('Archive')
239 def get_archive(archive, session=None):
241 returns database id for given C{archive}.
243 @type archive: string
244 @param archive: the name of the arhive
246 @type session: Session
247 @param session: Optional SQLA session object (a temporary one will be
248 generated if not supplied)
251 @return: Archive object for the given name (None if not present)
254 archive = archive.lower()
256 q = session.query(Archive).filter_by(archive_name=archive)
260 except NoResultFound:
263 __all__.append('get_archive')
265 ################################################################################
267 class BinAssociation(object):
268 def __init__(self, *args, **kwargs):
272 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
274 __all__.append('BinAssociation')
276 ################################################################################
278 class BinContents(object):
279 def __init__(self, *args, **kwargs):
283 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
285 __all__.append('BinContents')
287 ################################################################################
289 class DBBinary(object):
290 def __init__(self, *args, **kwargs):
294 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
296 __all__.append('DBBinary')
299 def get_suites_binary_in(package, session=None):
301 Returns list of Suite objects which given C{package} name is in
304 @param package: DBBinary package name to search for
307 @return: list of Suite objects for the given package
310 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
312 __all__.append('get_suites_binary_in')
315 def get_binary_from_id(binary_id, session=None):
317 Returns DBBinary object for given C{id}
320 @param binary_id: Id of the required binary
322 @type session: Session
323 @param session: Optional SQLA session object (a temporary one will be
324 generated if not supplied)
327 @return: DBBinary object for the given binary (None if not present)
330 q = session.query(DBBinary).filter_by(binary_id=binary_id)
334 except NoResultFound:
337 __all__.append('get_binary_from_id')
340 def get_binaries_from_name(package, version=None, architecture=None, session=None):
342 Returns list of DBBinary objects for given C{package} name
345 @param package: DBBinary package name to search for
347 @type version: str or None
348 @param version: Version to search for (or None)
350 @type architecture: str, list or None
351 @param architecture: Architectures to limit to (or None if no limit)
353 @type session: Session
354 @param session: Optional SQL session object (a temporary one will be
355 generated if not supplied)
358 @return: list of DBBinary objects for the given name (may be empty)
361 q = session.query(DBBinary).filter_by(package=package)
363 if version is not None:
364 q = q.filter_by(version=version)
366 if architecture is not None:
367 if not isinstance(architecture, list):
368 architecture = [architecture]
369 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
375 __all__.append('get_binaries_from_name')
378 def get_binaries_from_source_id(source_id, session=None):
380 Returns list of DBBinary objects for given C{source_id}
383 @param source_id: source_id to search for
385 @type session: Session
386 @param session: Optional SQL session object (a temporary one will be
387 generated if not supplied)
390 @return: list of DBBinary objects for the given name (may be empty)
393 return session.query(DBBinary).filter_by(source_id=source_id).all()
395 __all__.append('get_binaries_from_source_id')
398 def get_binary_from_name_suite(package, suitename, session=None):
399 ### For dak examine-package
400 ### XXX: Doesn't use object API yet
402 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
403 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
404 WHERE b.package='%(package)s'
406 AND fi.location = l.id
407 AND l.component = c.id
410 AND su.suite_name %(suitename)s
411 ORDER BY b.version DESC"""
413 return session.execute(sql % {'package': package, 'suitename': suitename})
415 __all__.append('get_binary_from_name_suite')
418 def get_binary_components(package, suitename, arch, session=None):
419 # Check for packages that have moved from one component to another
420 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
421 WHERE b.package=:package AND s.suite_name=:suitename
422 AND (a.arch_string = :arch OR a.arch_string = 'all')
423 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
424 AND f.location = l.id
425 AND l.component = c.id
428 vals = {'package': package, 'suitename': suitename, 'arch': arch}
430 return session.execute(query, vals)
432 __all__.append('get_binary_components')
434 ################################################################################
436 class BinaryACL(object):
437 def __init__(self, *args, **kwargs):
441 return '<BinaryACL %s>' % self.binary_acl_id
443 __all__.append('BinaryACL')
445 ################################################################################
447 class BinaryACLMap(object):
448 def __init__(self, *args, **kwargs):
452 return '<BinaryACLMap %s>' % self.binary_acl_map_id
454 __all__.append('BinaryACLMap')
456 ################################################################################
461 ArchiveDir "%(archivepath)s";
462 OverrideDir "%(overridedir)s";
463 CacheDir "%(cachedir)s";
468 Packages::Compress ". bzip2 gzip";
469 Sources::Compress ". bzip2 gzip";
474 bindirectory "incoming"
479 BinOverride "override.sid.all3";
480 BinCacheDB "packages-accepted.db";
482 FileList "%(filelist)s";
485 Packages::Extensions ".deb .udeb";
488 bindirectory "incoming/"
491 BinOverride "override.sid.all3";
492 SrcOverride "override.sid.all3.src";
493 FileList "%(filelist)s";
497 class BuildQueue(object):
498 def __init__(self, *args, **kwargs):
502 return '<BuildQueue %s>' % self.queue_name
504 def write_metadata(self, starttime, force=False):
505 # Do we write out metafiles?
506 if not (force or self.generate_metadata):
509 session = DBConn().session().object_session(self)
511 fl_fd = fl_name = ac_fd = ac_name = None
513 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
514 startdir = os.getcwd()
517 # Grab files we want to include
518 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
519 # Write file list with newer files
520 (fl_fd, fl_name) = mkstemp()
522 os.write(fl_fd, '%s\n' % n.fullpath)
527 # Write minimal apt.conf
528 # TODO: Remove hardcoding from template
529 (ac_fd, ac_name) = mkstemp()
530 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
532 'cachedir': cnf["Dir::Cache"],
533 'overridedir': cnf["Dir::Override"],
537 # Run apt-ftparchive generate
538 os.chdir(os.path.dirname(ac_name))
539 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
541 # Run apt-ftparchive release
542 # TODO: Eww - fix this
543 bname = os.path.basename(self.path)
547 # We have to remove the Release file otherwise it'll be included in the
550 os.unlink(os.path.join(bname, 'Release'))
554 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
556 # Crude hack with open and append, but this whole section is and should be redone.
557 if self.notautomatic:
558 release=open("Release", "a")
559 release.write("NotAutomatic: yes")
564 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
565 if cnf.has_key("Dinstall::SigningPubKeyring"):
566 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
568 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
570 # Move the files if we got this far
571 os.rename('Release', os.path.join(bname, 'Release'))
573 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
575 # Clean up any left behind files
602 def clean_and_update(self, starttime, Logger, dryrun=False):
603 """WARNING: This routine commits for you"""
604 session = DBConn().session().object_session(self)
606 if self.generate_metadata and not dryrun:
607 self.write_metadata(starttime)
609 # Grab files older than our execution time
610 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
616 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
618 Logger.log(["I: Removing %s from the queue" % o.fullpath])
619 os.unlink(o.fullpath)
622 # If it wasn't there, don't worry
623 if e.errno == ENOENT:
626 # TODO: Replace with proper logging call
627 Logger.log(["E: Could not remove %s" % o.fullpath])
634 for f in os.listdir(self.path):
635 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
639 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
640 except NoResultFound:
641 fp = os.path.join(self.path, f)
643 Logger.log(["I: Would remove unused link %s" % fp])
645 Logger.log(["I: Removing unused link %s" % fp])
649 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
651 def add_file_from_pool(self, poolfile):
652 """Copies a file into the pool. Assumes that the PoolFile object is
653 attached to the same SQLAlchemy session as the Queue object is.
655 The caller is responsible for committing after calling this function."""
656 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
658 # Check if we have a file of this name or this ID already
659 for f in self.queuefiles:
660 if f.fileid is not None and f.fileid == poolfile.file_id or \
661 f.poolfile.filename == poolfile_basename:
662 # In this case, update the BuildQueueFile entry so we
663 # don't remove it too early
664 f.lastused = datetime.now()
665 DBConn().session().object_session(poolfile).add(f)
668 # Prepare BuildQueueFile object
669 qf = BuildQueueFile()
670 qf.build_queue_id = self.queue_id
671 qf.lastused = datetime.now()
672 qf.filename = poolfile_basename
674 targetpath = poolfile.fullpath
675 queuepath = os.path.join(self.path, poolfile_basename)
679 # We need to copy instead of symlink
681 utils.copy(targetpath, queuepath)
682 # NULL in the fileid field implies a copy
685 os.symlink(targetpath, queuepath)
686 qf.fileid = poolfile.file_id
690 # Get the same session as the PoolFile is using and add the qf to it
691 DBConn().session().object_session(poolfile).add(qf)
696 __all__.append('BuildQueue')
699 def get_build_queue(queuename, session=None):
701 Returns BuildQueue object for given C{queue name}, creating it if it does not
704 @type queuename: string
705 @param queuename: The name of the queue
707 @type session: Session
708 @param session: Optional SQLA session object (a temporary one will be
709 generated if not supplied)
712 @return: BuildQueue object for the given queue
715 q = session.query(BuildQueue).filter_by(queue_name=queuename)
719 except NoResultFound:
722 __all__.append('get_build_queue')
724 ################################################################################
726 class BuildQueueFile(object):
727 def __init__(self, *args, **kwargs):
731 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
735 return os.path.join(self.buildqueue.path, self.filename)
738 __all__.append('BuildQueueFile')
740 ################################################################################
742 class ChangePendingBinary(object):
743 def __init__(self, *args, **kwargs):
747 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
749 __all__.append('ChangePendingBinary')
751 ################################################################################
753 class ChangePendingFile(object):
754 def __init__(self, *args, **kwargs):
758 return '<ChangePendingFile %s>' % self.change_pending_file_id
760 __all__.append('ChangePendingFile')
762 ################################################################################
764 class ChangePendingSource(object):
765 def __init__(self, *args, **kwargs):
769 return '<ChangePendingSource %s>' % self.change_pending_source_id
771 __all__.append('ChangePendingSource')
773 ################################################################################
775 class Component(object):
776 def __init__(self, *args, **kwargs):
779 def __eq__(self, val):
780 if isinstance(val, str):
781 return (self.component_name == val)
782 # This signals to use the normal comparison operator
783 return NotImplemented
785 def __ne__(self, val):
786 if isinstance(val, str):
787 return (self.component_name != val)
788 # This signals to use the normal comparison operator
789 return NotImplemented
792 return '<Component %s>' % self.component_name
795 __all__.append('Component')
798 def get_component(component, session=None):
800 Returns database id for given C{component}.
802 @type component: string
803 @param component: The name of the override type
806 @return: the database id for the given component
809 component = component.lower()
811 q = session.query(Component).filter_by(component_name=component)
815 except NoResultFound:
818 __all__.append('get_component')
820 ################################################################################
822 class DBConfig(object):
823 def __init__(self, *args, **kwargs):
827 return '<DBConfig %s>' % self.name
829 __all__.append('DBConfig')
831 ################################################################################
834 def get_or_set_contents_file_id(filename, session=None):
836 Returns database id for given filename.
838 If no matching file is found, a row is inserted.
840 @type filename: string
841 @param filename: The filename
842 @type session: SQLAlchemy
843 @param session: Optional SQL session object (a temporary one will be
844 generated if not supplied). If not passed, a commit will be performed at
845 the end of the function, otherwise the caller is responsible for commiting.
848 @return: the database id for the given component
851 q = session.query(ContentFilename).filter_by(filename=filename)
854 ret = q.one().cafilename_id
855 except NoResultFound:
856 cf = ContentFilename()
857 cf.filename = filename
859 session.commit_or_flush()
860 ret = cf.cafilename_id
864 __all__.append('get_or_set_contents_file_id')
867 def get_contents(suite, overridetype, section=None, session=None):
869 Returns contents for a suite / overridetype combination, limiting
870 to a section if not None.
873 @param suite: Suite object
875 @type overridetype: OverrideType
876 @param overridetype: OverrideType object
878 @type section: Section
879 @param section: Optional section object to limit results to
881 @type session: SQLAlchemy
882 @param session: Optional SQL session object (a temporary one will be
883 generated if not supplied)
886 @return: ResultsProxy object set up to return tuples of (filename, section,
890 # find me all of the contents for a given suite
891 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
895 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
896 JOIN content_file_names n ON (c.filename=n.id)
897 JOIN binaries b ON (b.id=c.binary_pkg)
898 JOIN override o ON (o.package=b.package)
899 JOIN section s ON (s.id=o.section)
900 WHERE o.suite = :suiteid AND o.type = :overridetypeid
901 AND b.type=:overridetypename"""
903 vals = {'suiteid': suite.suite_id,
904 'overridetypeid': overridetype.overridetype_id,
905 'overridetypename': overridetype.overridetype}
907 if section is not None:
908 contents_q += " AND s.id = :sectionid"
909 vals['sectionid'] = section.section_id
911 contents_q += " ORDER BY fn"
913 return session.execute(contents_q, vals)
915 __all__.append('get_contents')
917 ################################################################################
919 class ContentFilepath(object):
920 def __init__(self, *args, **kwargs):
924 return '<ContentFilepath %s>' % self.filepath
926 __all__.append('ContentFilepath')
929 def get_or_set_contents_path_id(filepath, session=None):
931 Returns database id for given path.
933 If no matching file is found, a row is inserted.
935 @type filepath: string
936 @param filepath: The filepath
938 @type session: SQLAlchemy
939 @param session: Optional SQL session object (a temporary one will be
940 generated if not supplied). If not passed, a commit will be performed at
941 the end of the function, otherwise the caller is responsible for commiting.
944 @return: the database id for the given path
947 q = session.query(ContentFilepath).filter_by(filepath=filepath)
950 ret = q.one().cafilepath_id
951 except NoResultFound:
952 cf = ContentFilepath()
953 cf.filepath = filepath
955 session.commit_or_flush()
956 ret = cf.cafilepath_id
960 __all__.append('get_or_set_contents_path_id')
962 ################################################################################
964 class ContentAssociation(object):
965 def __init__(self, *args, **kwargs):
969 return '<ContentAssociation %s>' % self.ca_id
971 __all__.append('ContentAssociation')
973 def insert_content_paths(binary_id, fullpaths, session=None):
975 Make sure given path is associated with given binary id
978 @param binary_id: the id of the binary
979 @type fullpaths: list
980 @param fullpaths: the list of paths of the file being associated with the binary
981 @type session: SQLAlchemy session
982 @param session: Optional SQLAlchemy session. If this is passed, the caller
983 is responsible for ensuring a transaction has begun and committing the
984 results or rolling back based on the result code. If not passed, a commit
985 will be performed at the end of the function, otherwise the caller is
986 responsible for commiting.
988 @return: True upon success
993 session = DBConn().session()
998 def generate_path_dicts():
999 for fullpath in fullpaths:
1000 if fullpath.startswith( './' ):
1001 fullpath = fullpath[2:]
1003 yield {'filename':fullpath, 'id': binary_id }
1005 for d in generate_path_dicts():
1006 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1015 traceback.print_exc()
1017 # Only rollback if we set up the session ourself
1024 __all__.append('insert_content_paths')
1026 ################################################################################
1028 class DSCFile(object):
1029 def __init__(self, *args, **kwargs):
1033 return '<DSCFile %s>' % self.dscfile_id
1035 __all__.append('DSCFile')
1038 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1040 Returns a list of DSCFiles which may be empty
1042 @type dscfile_id: int (optional)
1043 @param dscfile_id: the dscfile_id of the DSCFiles to find
1045 @type source_id: int (optional)
1046 @param source_id: the source id related to the DSCFiles to find
1048 @type poolfile_id: int (optional)
1049 @param poolfile_id: the poolfile id related to the DSCFiles to find
1052 @return: Possibly empty list of DSCFiles
1055 q = session.query(DSCFile)
1057 if dscfile_id is not None:
1058 q = q.filter_by(dscfile_id=dscfile_id)
1060 if source_id is not None:
1061 q = q.filter_by(source_id=source_id)
1063 if poolfile_id is not None:
1064 q = q.filter_by(poolfile_id=poolfile_id)
1068 __all__.append('get_dscfiles')
1070 ################################################################################
1072 class PoolFile(object):
1073 def __init__(self, filename = None, location = None, filesize = -1, \
1075 self.filename = filename
1076 self.location = location
1077 self.filesize = filesize
1078 self.md5sum = md5sum
1081 return '<PoolFile %s>' % self.filename
1085 return os.path.join(self.location.path, self.filename)
1087 def is_valid(self, filesize = -1, md5sum = None):\
1088 return self.filesize == filesize and self.md5sum == md5sum
1090 __all__.append('PoolFile')
1093 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1096 (ValidFileFound [boolean], PoolFile object or None)
1098 @type filename: string
1099 @param filename: the filename of the file to check against the DB
1102 @param filesize: the size of the file to check against the DB
1104 @type md5sum: string
1105 @param md5sum: the md5sum of the file to check against the DB
1107 @type location_id: int
1108 @param location_id: the id of the location to look in
1111 @return: Tuple of length 2.
1112 - If valid pool file found: (C{True}, C{PoolFile object})
1113 - If valid pool file not found:
1114 - (C{False}, C{None}) if no file found
1115 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1118 poolfile = session.query(Location).get(location_id). \
1119 files.filter_by(filename=filename).first()
1121 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1124 return (valid, poolfile)
1126 __all__.append('check_poolfile')
1128 # TODO: the implementation can trivially be inlined at the place where the
1129 # function is called
1131 def get_poolfile_by_id(file_id, session=None):
1133 Returns a PoolFile objects or None for the given id
1136 @param file_id: the id of the file to look for
1138 @rtype: PoolFile or None
1139 @return: either the PoolFile object or None
1142 return session.query(PoolFile).get(file_id)
1144 __all__.append('get_poolfile_by_id')
1147 def get_poolfile_like_name(filename, session=None):
1149 Returns an array of PoolFile objects which are like the given name
1151 @type filename: string
1152 @param filename: the filename of the file to check against the DB
1155 @return: array of PoolFile objects
1158 # TODO: There must be a way of properly using bind parameters with %FOO%
1159 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1163 __all__.append('get_poolfile_like_name')
1166 def add_poolfile(filename, datadict, location_id, session=None):
1168 Add a new file to the pool
1170 @type filename: string
1171 @param filename: filename
1173 @type datadict: dict
1174 @param datadict: dict with needed data
1176 @type location_id: int
1177 @param location_id: database id of the location
1180 @return: the PoolFile object created
1182 poolfile = PoolFile()
1183 poolfile.filename = filename
1184 poolfile.filesize = datadict["size"]
1185 poolfile.md5sum = datadict["md5sum"]
1186 poolfile.sha1sum = datadict["sha1sum"]
1187 poolfile.sha256sum = datadict["sha256sum"]
1188 poolfile.location_id = location_id
1190 session.add(poolfile)
1191 # Flush to get a file id (NB: This is not a commit)
1196 __all__.append('add_poolfile')
1198 ################################################################################
1200 class Fingerprint(object):
1201 def __init__(self, fingerprint = None):
1202 self.fingerprint = fingerprint
1205 return '<Fingerprint %s>' % self.fingerprint
1207 __all__.append('Fingerprint')
1210 def get_fingerprint(fpr, session=None):
1212 Returns Fingerprint object for given fpr.
1215 @param fpr: The fpr to find / add
1217 @type session: SQLAlchemy
1218 @param session: Optional SQL session object (a temporary one will be
1219 generated if not supplied).
1222 @return: the Fingerprint object for the given fpr or None
1225 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1229 except NoResultFound:
1234 __all__.append('get_fingerprint')
1237 def get_or_set_fingerprint(fpr, session=None):
1239 Returns Fingerprint object for given fpr.
1241 If no matching fpr is found, a row is inserted.
1244 @param fpr: The fpr to find / add
1246 @type session: SQLAlchemy
1247 @param session: Optional SQL session object (a temporary one will be
1248 generated if not supplied). If not passed, a commit will be performed at
1249 the end of the function, otherwise the caller is responsible for commiting.
1250 A flush will be performed either way.
1253 @return: the Fingerprint object for the given fpr
1256 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1260 except NoResultFound:
1261 fingerprint = Fingerprint()
1262 fingerprint.fingerprint = fpr
1263 session.add(fingerprint)
1264 session.commit_or_flush()
1269 __all__.append('get_or_set_fingerprint')
1271 ################################################################################
1273 # Helper routine for Keyring class
1274 def get_ldap_name(entry):
1276 for k in ["cn", "mn", "sn"]:
1278 if ret and ret[0] != "" and ret[0] != "-":
1280 return " ".join(name)
1282 ################################################################################
1284 class Keyring(object):
1285 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1286 " --with-colons --fingerprint --fingerprint"
1291 def __init__(self, *args, **kwargs):
1295 return '<Keyring %s>' % self.keyring_name
1297 def de_escape_gpg_str(self, txt):
1298 esclist = re.split(r'(\\x..)', txt)
1299 for x in range(1,len(esclist),2):
1300 esclist[x] = "%c" % (int(esclist[x][2:],16))
1301 return "".join(esclist)
1303 def parse_address(self, uid):
1304 """parses uid and returns a tuple of real name and email address"""
1306 (name, address) = email.Utils.parseaddr(uid)
1307 name = re.sub(r"\s*[(].*[)]", "", name)
1308 name = self.de_escape_gpg_str(name)
1311 return (name, address)
1313 def load_keys(self, keyring):
1314 if not self.keyring_id:
1315 raise Exception('Must be initialized with database information')
1317 k = os.popen(self.gpg_invocation % keyring, "r")
1321 for line in k.xreadlines():
1322 field = line.split(":")
1323 if field[0] == "pub":
1326 (name, addr) = self.parse_address(field[9])
1328 self.keys[key]["email"] = addr
1329 self.keys[key]["name"] = name
1330 self.keys[key]["fingerprints"] = []
1332 elif key and field[0] == "sub" and len(field) >= 12:
1333 signingkey = ("s" in field[11])
1334 elif key and field[0] == "uid":
1335 (name, addr) = self.parse_address(field[9])
1336 if "email" not in self.keys[key] and "@" in addr:
1337 self.keys[key]["email"] = addr
1338 self.keys[key]["name"] = name
1339 elif signingkey and field[0] == "fpr":
1340 self.keys[key]["fingerprints"].append(field[9])
1341 self.fpr_lookup[field[9]] = key
1343 def import_users_from_ldap(self, session):
1347 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1348 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1350 l = ldap.open(LDAPServer)
1351 l.simple_bind_s("","")
1352 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1353 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1354 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1356 ldap_fin_uid_id = {}
1363 uid = entry["uid"][0]
1364 name = get_ldap_name(entry)
1365 fingerprints = entry["keyFingerPrint"]
1367 for f in fingerprints:
1368 key = self.fpr_lookup.get(f, None)
1369 if key not in self.keys:
1371 self.keys[key]["uid"] = uid
1375 keyid = get_or_set_uid(uid, session).uid_id
1376 byuid[keyid] = (uid, name)
1377 byname[uid] = (keyid, name)
1379 return (byname, byuid)
1381 def generate_users_from_keyring(self, format, session):
1385 for x in self.keys.keys():
1386 if "email" not in self.keys[x]:
1388 self.keys[x]["uid"] = format % "invalid-uid"
1390 uid = format % self.keys[x]["email"]
1391 keyid = get_or_set_uid(uid, session).uid_id
1392 byuid[keyid] = (uid, self.keys[x]["name"])
1393 byname[uid] = (keyid, self.keys[x]["name"])
1394 self.keys[x]["uid"] = uid
1397 uid = format % "invalid-uid"
1398 keyid = get_or_set_uid(uid, session).uid_id
1399 byuid[keyid] = (uid, "ungeneratable user id")
1400 byname[uid] = (keyid, "ungeneratable user id")
1402 return (byname, byuid)
1404 __all__.append('Keyring')
1407 def get_keyring(keyring, session=None):
1409 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1410 If C{keyring} already has an entry, simply return the existing Keyring
1412 @type keyring: string
1413 @param keyring: the keyring name
1416 @return: the Keyring object for this keyring
1419 q = session.query(Keyring).filter_by(keyring_name=keyring)
1423 except NoResultFound:
1426 __all__.append('get_keyring')
1428 ################################################################################
1430 class KeyringACLMap(object):
1431 def __init__(self, *args, **kwargs):
1435 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1437 __all__.append('KeyringACLMap')
1439 ################################################################################
1441 class DBChange(object):
1442 def __init__(self, *args, **kwargs):
1446 return '<DBChange %s>' % self.changesname
1448 def clean_from_queue(self):
1449 session = DBConn().session().object_session(self)
1451 # Remove changes_pool_files entries
1454 # Remove changes_pending_files references
1457 # Clear out of queue
1458 self.in_queue = None
1459 self.approved_for_id = None
1461 __all__.append('DBChange')
1464 def get_dbchange(filename, session=None):
1466 returns DBChange object for given C{filename}.
1468 @type filename: string
1469 @param filename: the name of the file
1471 @type session: Session
1472 @param session: Optional SQLA session object (a temporary one will be
1473 generated if not supplied)
1476 @return: DBChange object for the given filename (C{None} if not present)
1479 q = session.query(DBChange).filter_by(changesname=filename)
1483 except NoResultFound:
1486 __all__.append('get_dbchange')
1488 ################################################################################
1490 class Location(object):
1491 def __init__(self, path = None):
1493 # the column 'type' should go away, see comment at mapper
1494 self.archive_type = 'pool'
1497 return '<Location %s (%s)>' % (self.path, self.location_id)
1499 __all__.append('Location')
1502 def get_location(location, component=None, archive=None, session=None):
1504 Returns Location object for the given combination of location, component
1507 @type location: string
1508 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1510 @type component: string
1511 @param component: the component name (if None, no restriction applied)
1513 @type archive: string
1514 @param archive: the archive name (if None, no restriction applied)
1516 @rtype: Location / None
1517 @return: Either a Location object or None if one can't be found
1520 q = session.query(Location).filter_by(path=location)
1522 if archive is not None:
1523 q = q.join(Archive).filter_by(archive_name=archive)
1525 if component is not None:
1526 q = q.join(Component).filter_by(component_name=component)
1530 except NoResultFound:
1533 __all__.append('get_location')
1535 ################################################################################
1537 class Maintainer(object):
1538 def __init__(self, name = None):
1542 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1544 def get_split_maintainer(self):
1545 if not hasattr(self, 'name') or self.name is None:
1546 return ('', '', '', '')
1548 return fix_maintainer(self.name.strip())
1550 __all__.append('Maintainer')
1553 def get_or_set_maintainer(name, session=None):
1555 Returns Maintainer object for given maintainer name.
1557 If no matching maintainer name is found, a row is inserted.
1560 @param name: The maintainer name to add
1562 @type session: SQLAlchemy
1563 @param session: Optional SQL session object (a temporary one will be
1564 generated if not supplied). If not passed, a commit will be performed at
1565 the end of the function, otherwise the caller is responsible for commiting.
1566 A flush will be performed either way.
1569 @return: the Maintainer object for the given maintainer
1572 q = session.query(Maintainer).filter_by(name=name)
1575 except NoResultFound:
1576 maintainer = Maintainer()
1577 maintainer.name = name
1578 session.add(maintainer)
1579 session.commit_or_flush()
1584 __all__.append('get_or_set_maintainer')
1587 def get_maintainer(maintainer_id, session=None):
1589 Return the name of the maintainer behind C{maintainer_id} or None if that
1590 maintainer_id is invalid.
1592 @type maintainer_id: int
1593 @param maintainer_id: the id of the maintainer
1596 @return: the Maintainer with this C{maintainer_id}
1599 return session.query(Maintainer).get(maintainer_id)
1601 __all__.append('get_maintainer')
1603 ################################################################################
1605 class NewComment(object):
1606 def __init__(self, *args, **kwargs):
1610 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1612 __all__.append('NewComment')
1615 def has_new_comment(package, version, session=None):
1617 Returns true if the given combination of C{package}, C{version} has a comment.
1619 @type package: string
1620 @param package: name of the package
1622 @type version: string
1623 @param version: package version
1625 @type session: Session
1626 @param session: Optional SQLA session object (a temporary one will be
1627 generated if not supplied)
1633 q = session.query(NewComment)
1634 q = q.filter_by(package=package)
1635 q = q.filter_by(version=version)
1637 return bool(q.count() > 0)
1639 __all__.append('has_new_comment')
1642 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1644 Returns (possibly empty) list of NewComment objects for the given
1647 @type package: string (optional)
1648 @param package: name of the package
1650 @type version: string (optional)
1651 @param version: package version
1653 @type comment_id: int (optional)
1654 @param comment_id: An id of a comment
1656 @type session: Session
1657 @param session: Optional SQLA session object (a temporary one will be
1658 generated if not supplied)
1661 @return: A (possibly empty) list of NewComment objects will be returned
1664 q = session.query(NewComment)
1665 if package is not None: q = q.filter_by(package=package)
1666 if version is not None: q = q.filter_by(version=version)
1667 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1671 __all__.append('get_new_comments')
1673 ################################################################################
1675 class Override(object):
1676 def __init__(self, *args, **kwargs):
1680 return '<Override %s (%s)>' % (self.package, self.suite_id)
1682 __all__.append('Override')
1685 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1687 Returns Override object for the given parameters
1689 @type package: string
1690 @param package: The name of the package
1692 @type suite: string, list or None
1693 @param suite: The name of the suite (or suites if a list) to limit to. If
1694 None, don't limit. Defaults to None.
1696 @type component: string, list or None
1697 @param component: The name of the component (or components if a list) to
1698 limit to. If None, don't limit. Defaults to None.
1700 @type overridetype: string, list or None
1701 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1702 limit to. If None, don't limit. Defaults to None.
1704 @type session: Session
1705 @param session: Optional SQLA session object (a temporary one will be
1706 generated if not supplied)
1709 @return: A (possibly empty) list of Override objects will be returned
1712 q = session.query(Override)
1713 q = q.filter_by(package=package)
1715 if suite is not None:
1716 if not isinstance(suite, list): suite = [suite]
1717 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1719 if component is not None:
1720 if not isinstance(component, list): component = [component]
1721 q = q.join(Component).filter(Component.component_name.in_(component))
1723 if overridetype is not None:
1724 if not isinstance(overridetype, list): overridetype = [overridetype]
1725 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1729 __all__.append('get_override')
1732 ################################################################################
1734 class OverrideType(object):
1735 def __init__(self, *args, **kwargs):
1739 return '<OverrideType %s>' % self.overridetype
1741 __all__.append('OverrideType')
1744 def get_override_type(override_type, session=None):
1746 Returns OverrideType object for given C{override type}.
1748 @type override_type: string
1749 @param override_type: The name of the override type
1751 @type session: Session
1752 @param session: Optional SQLA session object (a temporary one will be
1753 generated if not supplied)
1756 @return: the database id for the given override type
1759 q = session.query(OverrideType).filter_by(overridetype=override_type)
1763 except NoResultFound:
1766 __all__.append('get_override_type')
1768 ################################################################################
1770 class DebContents(object):
1771 def __init__(self, *args, **kwargs):
1775 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1777 __all__.append('DebContents')
1780 class UdebContents(object):
1781 def __init__(self, *args, **kwargs):
1785 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1787 __all__.append('UdebContents')
1789 class PendingBinContents(object):
1790 def __init__(self, *args, **kwargs):
1794 return '<PendingBinContents %s>' % self.contents_id
1796 __all__.append('PendingBinContents')
1798 def insert_pending_content_paths(package,
1803 Make sure given paths are temporarily associated with given
1807 @param package: the package to associate with should have been read in from the binary control file
1808 @type fullpaths: list
1809 @param fullpaths: the list of paths of the file being associated with the binary
1810 @type session: SQLAlchemy session
1811 @param session: Optional SQLAlchemy session. If this is passed, the caller
1812 is responsible for ensuring a transaction has begun and committing the
1813 results or rolling back based on the result code. If not passed, a commit
1814 will be performed at the end of the function
1816 @return: True upon success, False if there is a problem
1819 privatetrans = False
1822 session = DBConn().session()
1826 arch = get_architecture(package['Architecture'], session)
1827 arch_id = arch.arch_id
1829 # Remove any already existing recorded files for this package
1830 q = session.query(PendingBinContents)
1831 q = q.filter_by(package=package['Package'])
1832 q = q.filter_by(version=package['Version'])
1833 q = q.filter_by(architecture=arch_id)
1836 for fullpath in fullpaths:
1838 if fullpath.startswith( "./" ):
1839 fullpath = fullpath[2:]
1841 pca = PendingBinContents()
1842 pca.package = package['Package']
1843 pca.version = package['Version']
1845 pca.architecture = arch_id
1848 pca.type = 8 # gross
1850 pca.type = 7 # also gross
1853 # Only commit if we set up the session ourself
1861 except Exception, e:
1862 traceback.print_exc()
1864 # Only rollback if we set up the session ourself
1871 __all__.append('insert_pending_content_paths')
1873 ################################################################################
1875 class PolicyQueue(object):
1876 def __init__(self, *args, **kwargs):
1880 return '<PolicyQueue %s>' % self.queue_name
1882 __all__.append('PolicyQueue')
1885 def get_policy_queue(queuename, session=None):
1887 Returns PolicyQueue object for given C{queue name}
1889 @type queuename: string
1890 @param queuename: The name of the queue
1892 @type session: Session
1893 @param session: Optional SQLA session object (a temporary one will be
1894 generated if not supplied)
1897 @return: PolicyQueue object for the given queue
1900 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1904 except NoResultFound:
1907 __all__.append('get_policy_queue')
1910 def get_policy_queue_from_path(pathname, session=None):
1912 Returns PolicyQueue object for given C{path name}
1914 @type queuename: string
1915 @param queuename: The path
1917 @type session: Session
1918 @param session: Optional SQLA session object (a temporary one will be
1919 generated if not supplied)
1922 @return: PolicyQueue object for the given queue
1925 q = session.query(PolicyQueue).filter_by(path=pathname)
1929 except NoResultFound:
1932 __all__.append('get_policy_queue_from_path')
1934 ################################################################################
1936 class Priority(object):
1937 def __init__(self, *args, **kwargs):
1940 def __eq__(self, val):
1941 if isinstance(val, str):
1942 return (self.priority == val)
1943 # This signals to use the normal comparison operator
1944 return NotImplemented
1946 def __ne__(self, val):
1947 if isinstance(val, str):
1948 return (self.priority != val)
1949 # This signals to use the normal comparison operator
1950 return NotImplemented
1953 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1955 __all__.append('Priority')
1958 def get_priority(priority, session=None):
1960 Returns Priority object for given C{priority name}.
1962 @type priority: string
1963 @param priority: The name of the priority
1965 @type session: Session
1966 @param session: Optional SQLA session object (a temporary one will be
1967 generated if not supplied)
1970 @return: Priority object for the given priority
1973 q = session.query(Priority).filter_by(priority=priority)
1977 except NoResultFound:
1980 __all__.append('get_priority')
1983 def get_priorities(session=None):
1985 Returns dictionary of priority names -> id mappings
1987 @type session: Session
1988 @param session: Optional SQL session object (a temporary one will be
1989 generated if not supplied)
1992 @return: dictionary of priority names -> id mappings
1996 q = session.query(Priority)
1998 ret[x.priority] = x.priority_id
2002 __all__.append('get_priorities')
2004 ################################################################################
2006 class Section(object):
2007 def __init__(self, *args, **kwargs):
2010 def __eq__(self, val):
2011 if isinstance(val, str):
2012 return (self.section == val)
2013 # This signals to use the normal comparison operator
2014 return NotImplemented
2016 def __ne__(self, val):
2017 if isinstance(val, str):
2018 return (self.section != val)
2019 # This signals to use the normal comparison operator
2020 return NotImplemented
2023 return '<Section %s>' % self.section
2025 __all__.append('Section')
2028 def get_section(section, session=None):
2030 Returns Section object for given C{section name}.
2032 @type section: string
2033 @param section: The name of the section
2035 @type session: Session
2036 @param session: Optional SQLA session object (a temporary one will be
2037 generated if not supplied)
2040 @return: Section object for the given section name
2043 q = session.query(Section).filter_by(section=section)
2047 except NoResultFound:
2050 __all__.append('get_section')
2053 def get_sections(session=None):
2055 Returns dictionary of section names -> id mappings
2057 @type session: Session
2058 @param session: Optional SQL session object (a temporary one will be
2059 generated if not supplied)
2062 @return: dictionary of section names -> id mappings
2066 q = session.query(Section)
2068 ret[x.section] = x.section_id
2072 __all__.append('get_sections')
2074 ################################################################################
2076 class DBSource(object):
2077 def __init__(self, source = None, version = None, maintainer = None, \
2078 changedby = None, poolfile = None, install_date = None):
2079 self.source = source
2080 self.version = version
2081 self.maintainer = maintainer
2082 self.changedby = changedby
2083 self.poolfile = poolfile
2084 self.install_date = install_date
2087 return '<DBSource %s (%s)>' % (self.source, self.version)
2089 __all__.append('DBSource')
2092 def source_exists(source, source_version, suites = ["any"], session=None):
2094 Ensure that source exists somewhere in the archive for the binary
2095 upload being processed.
2096 1. exact match => 1.0-3
2097 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2099 @type source: string
2100 @param source: source name
2102 @type source_version: string
2103 @param source_version: expected source version
2106 @param suites: list of suites to check in, default I{any}
2108 @type session: Session
2109 @param session: Optional SQLA session object (a temporary one will be
2110 generated if not supplied)
2113 @return: returns 1 if a source with expected version is found, otherwise 0
2120 for suite in suites:
2121 q = session.query(DBSource).filter_by(source=source)
2123 # source must exist in suite X, or in some other suite that's
2124 # mapped to X, recursively... silent-maps are counted too,
2125 # unreleased-maps aren't.
2126 maps = cnf.ValueList("SuiteMappings")[:]
2128 maps = [ m.split() for m in maps ]
2129 maps = [ (x[1], x[2]) for x in maps
2130 if x[0] == "map" or x[0] == "silent-map" ]
2133 if x[1] in s and x[0] not in s:
2136 q = q.join(SrcAssociation).join(Suite)
2137 q = q.filter(Suite.suite_name.in_(s))
2139 # Reduce the query results to a list of version numbers
2140 ql = [ j.version for j in q.all() ]
2143 if source_version in ql:
2147 from daklib.regexes import re_bin_only_nmu
2148 orig_source_version = re_bin_only_nmu.sub('', source_version)
2149 if orig_source_version in ql:
2152 # No source found so return not ok
2157 __all__.append('source_exists')
2160 def get_suites_source_in(source, session=None):
2162 Returns list of Suite objects which given C{source} name is in
2165 @param source: DBSource package name to search for
2168 @return: list of Suite objects for the given source
2171 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2173 __all__.append('get_suites_source_in')
2176 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2178 Returns list of DBSource objects for given C{source} name and other parameters
2181 @param source: DBSource package name to search for
2183 @type version: str or None
2184 @param version: DBSource version name to search for or None if not applicable
2186 @type dm_upload_allowed: bool
2187 @param dm_upload_allowed: If None, no effect. If True or False, only
2188 return packages with that dm_upload_allowed setting
2190 @type session: Session
2191 @param session: Optional SQL session object (a temporary one will be
2192 generated if not supplied)
2195 @return: list of DBSource objects for the given name (may be empty)
2198 q = session.query(DBSource).filter_by(source=source)
2200 if version is not None:
2201 q = q.filter_by(version=version)
2203 if dm_upload_allowed is not None:
2204 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2208 __all__.append('get_sources_from_name')
2210 # FIXME: This function fails badly if it finds more than 1 source package and
2211 # its implementation is trivial enough to be inlined.
2213 def get_source_in_suite(source, suite, session=None):
2215 Returns a DBSource object for a combination of C{source} and C{suite}.
2217 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2218 - B{suite} - a suite name, eg. I{unstable}
2220 @type source: string
2221 @param source: source package name
2224 @param suite: the suite name
2227 @return: the version for I{source} in I{suite}
2231 q = get_suite(suite, session).get_sources(source)
2234 except NoResultFound:
2237 __all__.append('get_source_in_suite')
2239 ################################################################################
2242 def add_dsc_to_db(u, filename, session=None):
2243 entry = u.pkg.files[filename]
2247 source.source = u.pkg.dsc["source"]
2248 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2249 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2250 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2251 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2252 source.install_date = datetime.now().date()
2254 dsc_component = entry["component"]
2255 dsc_location_id = entry["location id"]
2257 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2259 # Set up a new poolfile if necessary
2260 if not entry.has_key("files id") or not entry["files id"]:
2261 filename = entry["pool name"] + filename
2262 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2264 pfs.append(poolfile)
2265 entry["files id"] = poolfile.file_id
2267 source.poolfile_id = entry["files id"]
2270 suite_names = u.pkg.changes["distribution"].keys()
2271 source.suites = session.query(Suite). \
2272 filter(Suite.suite_name.in_(suite_names)).all()
2274 # Add the source files to the DB (files and dsc_files)
2276 dscfile.source_id = source.source_id
2277 dscfile.poolfile_id = entry["files id"]
2278 session.add(dscfile)
2280 for dsc_file, dentry in u.pkg.dsc_files.items():
2282 df.source_id = source.source_id
2284 # If the .orig tarball is already in the pool, it's
2285 # files id is stored in dsc_files by check_dsc().
2286 files_id = dentry.get("files id", None)
2288 # Find the entry in the files hash
2289 # TODO: Bail out here properly
2291 for f, e in u.pkg.files.items():
2296 if files_id is None:
2297 filename = dfentry["pool name"] + dsc_file
2299 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2300 # FIXME: needs to check for -1/-2 and or handle exception
2301 if found and obj is not None:
2302 files_id = obj.file_id
2305 # If still not found, add it
2306 if files_id is None:
2307 # HACK: Force sha1sum etc into dentry
2308 dentry["sha1sum"] = dfentry["sha1sum"]
2309 dentry["sha256sum"] = dfentry["sha256sum"]
2310 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2311 pfs.append(poolfile)
2312 files_id = poolfile.file_id
2314 poolfile = get_poolfile_by_id(files_id, session)
2315 if poolfile is None:
2316 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2317 pfs.append(poolfile)
2319 df.poolfile_id = files_id
2322 # Add the src_uploaders to the DB
2323 uploader_ids = [source.maintainer_id]
2324 if u.pkg.dsc.has_key("uploaders"):
2325 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2327 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2330 for up_id in uploader_ids:
2331 if added_ids.has_key(up_id):
2333 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2339 su.maintainer_id = up_id
2340 su.source_id = source.source_id
2345 return source, dsc_component, dsc_location_id, pfs
2347 __all__.append('add_dsc_to_db')
2350 def add_deb_to_db(u, filename, session=None):
2352 Contrary to what you might expect, this routine deals with both
2353 debs and udebs. That info is in 'dbtype', whilst 'type' is
2354 'deb' for both of them
2357 entry = u.pkg.files[filename]
2360 bin.package = entry["package"]
2361 bin.version = entry["version"]
2362 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2363 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2364 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2365 bin.binarytype = entry["dbtype"]
2368 filename = entry["pool name"] + filename
2369 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2370 if not entry.get("location id", None):
2371 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2373 if entry.get("files id", None):
2374 poolfile = get_poolfile_by_id(bin.poolfile_id)
2375 bin.poolfile_id = entry["files id"]
2377 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2378 bin.poolfile_id = entry["files id"] = poolfile.file_id
2381 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2382 if len(bin_sources) != 1:
2383 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2384 (bin.package, bin.version, entry["architecture"],
2385 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2387 bin.source_id = bin_sources[0].source_id
2389 # Add and flush object so it has an ID
2393 # Add BinAssociations
2394 for suite_name in u.pkg.changes["distribution"].keys():
2395 ba = BinAssociation()
2396 ba.binary_id = bin.binary_id
2397 ba.suite_id = get_suite(suite_name).suite_id
2402 # Deal with contents - disabled for now
2403 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2405 # print "REJECT\nCould not determine contents of package %s" % bin.package
2406 # session.rollback()
2407 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2411 __all__.append('add_deb_to_db')
2413 ################################################################################
2415 class SourceACL(object):
2416 def __init__(self, *args, **kwargs):
2420 return '<SourceACL %s>' % self.source_acl_id
2422 __all__.append('SourceACL')
2424 ################################################################################
2426 class SrcAssociation(object):
2427 def __init__(self, *args, **kwargs):
2431 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2433 __all__.append('SrcAssociation')
2435 ################################################################################
2437 class SrcFormat(object):
2438 def __init__(self, *args, **kwargs):
2442 return '<SrcFormat %s>' % (self.format_name)
2444 __all__.append('SrcFormat')
2446 ################################################################################
2448 class SrcUploader(object):
2449 def __init__(self, *args, **kwargs):
2453 return '<SrcUploader %s>' % self.uploader_id
2455 __all__.append('SrcUploader')
2457 ################################################################################
2459 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2460 ('SuiteID', 'suite_id'),
2461 ('Version', 'version'),
2462 ('Origin', 'origin'),
2464 ('Description', 'description'),
2465 ('Untouchable', 'untouchable'),
2466 ('Announce', 'announce'),
2467 ('Codename', 'codename'),
2468 ('OverrideCodename', 'overridecodename'),
2469 ('ValidTime', 'validtime'),
2470 ('Priority', 'priority'),
2471 ('NotAutomatic', 'notautomatic'),
2472 ('CopyChanges', 'copychanges'),
2473 ('OverrideSuite', 'overridesuite')]
2475 # Why the heck don't we have any UNIQUE constraints in table suite?
2476 # TODO: Add UNIQUE constraints for appropriate columns.
2477 class Suite(object):
2478 def __init__(self, suite_name = None, version = None):
2479 self.suite_name = suite_name
2480 self.version = version
2483 return '<Suite %s>' % self.suite_name
2485 def __eq__(self, val):
2486 if isinstance(val, str):
2487 return (self.suite_name == val)
2488 # This signals to use the normal comparison operator
2489 return NotImplemented
2491 def __ne__(self, val):
2492 if isinstance(val, str):
2493 return (self.suite_name != val)
2494 # This signals to use the normal comparison operator
2495 return NotImplemented
2499 for disp, field in SUITE_FIELDS:
2500 val = getattr(self, field, None)
2502 ret.append("%s: %s" % (disp, val))
2504 return "\n".join(ret)
2506 def get_architectures(self, skipsrc=False, skipall=False):
2508 Returns list of Architecture objects
2510 @type skipsrc: boolean
2511 @param skipsrc: Whether to skip returning the 'source' architecture entry
2514 @type skipall: boolean
2515 @param skipall: Whether to skip returning the 'all' architecture entry
2519 @return: list of Architecture objects for the given name (may be empty)
2522 q = object_session(self).query(Architecture). \
2523 filter(Architecture.suites.contains(self))
2525 q = q.filter(Architecture.arch_string != 'source')
2527 q = q.filter(Architecture.arch_string != 'all')
2528 return q.order_by(Architecture.arch_string).all()
2530 def get_sources(self, source):
2532 Returns a query object representing DBSource that is part of C{suite}.
2534 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2536 @type source: string
2537 @param source: source package name
2539 @rtype: sqlalchemy.orm.query.Query
2540 @return: a query of DBSource
2544 session = object_session(self)
2545 return session.query(DBSource).filter_by(source = source). \
2546 filter(DBSource.suites.contains(self))
2548 __all__.append('Suite')
2551 def get_suite(suite, session=None):
2553 Returns Suite object for given C{suite name}.
2556 @param suite: The name of the suite
2558 @type session: Session
2559 @param session: Optional SQLA session object (a temporary one will be
2560 generated if not supplied)
2563 @return: Suite object for the requested suite name (None if not present)
2566 q = session.query(Suite).filter_by(suite_name=suite)
2570 except NoResultFound:
2573 __all__.append('get_suite')
2575 ################################################################################
2577 # TODO: should be removed because the implementation is too trivial
2579 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2581 Returns list of Architecture objects for given C{suite} name
2584 @param suite: Suite name to search for
2586 @type skipsrc: boolean
2587 @param skipsrc: Whether to skip returning the 'source' architecture entry
2590 @type skipall: boolean
2591 @param skipall: Whether to skip returning the 'all' architecture entry
2594 @type session: Session
2595 @param session: Optional SQL session object (a temporary one will be
2596 generated if not supplied)
2599 @return: list of Architecture objects for the given name (may be empty)
2602 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2604 __all__.append('get_suite_architectures')
2606 ################################################################################
2608 class SuiteSrcFormat(object):
2609 def __init__(self, *args, **kwargs):
2613 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2615 __all__.append('SuiteSrcFormat')
2618 def get_suite_src_formats(suite, session=None):
2620 Returns list of allowed SrcFormat for C{suite}.
2623 @param suite: Suite name to search for
2625 @type session: Session
2626 @param session: Optional SQL session object (a temporary one will be
2627 generated if not supplied)
2630 @return: the list of allowed source formats for I{suite}
2633 q = session.query(SrcFormat)
2634 q = q.join(SuiteSrcFormat)
2635 q = q.join(Suite).filter_by(suite_name=suite)
2636 q = q.order_by('format_name')
2640 __all__.append('get_suite_src_formats')
2642 ################################################################################
2645 def __init__(self, uid = None, name = None):
2649 def __eq__(self, val):
2650 if isinstance(val, str):
2651 return (self.uid == val)
2652 # This signals to use the normal comparison operator
2653 return NotImplemented
2655 def __ne__(self, val):
2656 if isinstance(val, str):
2657 return (self.uid != val)
2658 # This signals to use the normal comparison operator
2659 return NotImplemented
2662 return '<Uid %s (%s)>' % (self.uid, self.name)
2664 __all__.append('Uid')
2667 def get_or_set_uid(uidname, session=None):
2669 Returns uid object for given uidname.
2671 If no matching uidname is found, a row is inserted.
2673 @type uidname: string
2674 @param uidname: The uid to add
2676 @type session: SQLAlchemy
2677 @param session: Optional SQL session object (a temporary one will be
2678 generated if not supplied). If not passed, a commit will be performed at
2679 the end of the function, otherwise the caller is responsible for commiting.
2682 @return: the uid object for the given uidname
2685 q = session.query(Uid).filter_by(uid=uidname)
2689 except NoResultFound:
2693 session.commit_or_flush()
2698 __all__.append('get_or_set_uid')
2701 def get_uid_from_fingerprint(fpr, session=None):
2702 q = session.query(Uid)
2703 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2707 except NoResultFound:
2710 __all__.append('get_uid_from_fingerprint')
2712 ################################################################################
2714 class UploadBlock(object):
2715 def __init__(self, *args, **kwargs):
2719 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2721 __all__.append('UploadBlock')
2723 ################################################################################
2725 class DBConn(object):
2727 database module init.
2731 def __init__(self, *args, **kwargs):
2732 self.__dict__ = self.__shared_state
2734 if not getattr(self, 'initialised', False):
2735 self.initialised = True
2736 self.debug = kwargs.has_key('debug')
2739 def __setuptables(self):
2740 tables_with_primary = (
2751 'changes_pending_binaries',
2752 'changes_pending_files',
2753 'changes_pending_source',
2763 'pending_bin_contents',
2775 # The following tables have primary keys but sqlalchemy
2776 # version 0.5 fails to reflect them correctly with database
2777 # versions before upgrade #41.
2779 #'build_queue_files',
2782 tables_no_primary = (
2784 'changes_pending_files_map',
2785 'changes_pending_source_files',
2786 'changes_pool_files',
2789 'suite_architectures',
2790 'suite_src_formats',
2791 'suite_build_queue_copy',
2793 # see the comment above
2795 'build_queue_files',
2799 'almost_obsolete_all_associations',
2800 'almost_obsolete_src_associations',
2801 'any_associations_source',
2802 'bin_assoc_by_arch',
2803 'bin_associations_binaries',
2804 'binaries_suite_arch',
2805 'binfiles_suite_component_arch',
2808 'newest_all_associations',
2809 'newest_any_associations',
2811 'newest_src_association',
2812 'obsolete_all_associations',
2813 'obsolete_any_associations',
2814 'obsolete_any_by_all_associations',
2815 'obsolete_src_associations',
2817 'src_associations_bin',
2818 'src_associations_src',
2819 'suite_arch_by_name',
2822 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2823 # correctly and that is why we have to use a workaround. It can
2824 # be removed as soon as we switch to version 0.6.
2825 for table_name in tables_with_primary:
2826 table = Table(table_name, self.db_meta, \
2827 Column('id', Integer, primary_key = True), \
2828 autoload=True, useexisting=True)
2829 setattr(self, 'tbl_%s' % table_name, table)
2831 for table_name in tables_no_primary:
2832 table = Table(table_name, self.db_meta, autoload=True)
2833 setattr(self, 'tbl_%s' % table_name, table)
2835 for view_name in views:
2836 view = Table(view_name, self.db_meta, autoload=True)
2837 setattr(self, 'view_%s' % view_name, view)
2839 def __setupmappers(self):
2840 mapper(Architecture, self.tbl_architecture,
2841 properties = dict(arch_id = self.tbl_architecture.c.id,
2842 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2843 order_by='suite_name',
2844 backref=backref('architectures', order_by='arch_string'))))
2846 mapper(Archive, self.tbl_archive,
2847 properties = dict(archive_id = self.tbl_archive.c.id,
2848 archive_name = self.tbl_archive.c.name))
2850 mapper(BinAssociation, self.tbl_bin_associations,
2851 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2852 suite_id = self.tbl_bin_associations.c.suite,
2853 suite = relation(Suite),
2854 binary_id = self.tbl_bin_associations.c.bin,
2855 binary = relation(DBBinary)))
2857 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2858 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2859 filename = self.tbl_pending_bin_contents.c.filename,
2860 package = self.tbl_pending_bin_contents.c.package,
2861 version = self.tbl_pending_bin_contents.c.version,
2862 arch = self.tbl_pending_bin_contents.c.arch,
2863 otype = self.tbl_pending_bin_contents.c.type))
2865 mapper(DebContents, self.tbl_deb_contents,
2866 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2867 package=self.tbl_deb_contents.c.package,
2868 suite=self.tbl_deb_contents.c.suite,
2869 arch=self.tbl_deb_contents.c.arch,
2870 section=self.tbl_deb_contents.c.section,
2871 filename=self.tbl_deb_contents.c.filename))
2873 mapper(UdebContents, self.tbl_udeb_contents,
2874 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2875 package=self.tbl_udeb_contents.c.package,
2876 suite=self.tbl_udeb_contents.c.suite,
2877 arch=self.tbl_udeb_contents.c.arch,
2878 section=self.tbl_udeb_contents.c.section,
2879 filename=self.tbl_udeb_contents.c.filename))
2881 mapper(BuildQueue, self.tbl_build_queue,
2882 properties = dict(queue_id = self.tbl_build_queue.c.id))
2884 mapper(BuildQueueFile, self.tbl_build_queue_files,
2885 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2886 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2888 mapper(DBBinary, self.tbl_binaries,
2889 properties = dict(binary_id = self.tbl_binaries.c.id,
2890 package = self.tbl_binaries.c.package,
2891 version = self.tbl_binaries.c.version,
2892 maintainer_id = self.tbl_binaries.c.maintainer,
2893 maintainer = relation(Maintainer),
2894 source_id = self.tbl_binaries.c.source,
2895 source = relation(DBSource),
2896 arch_id = self.tbl_binaries.c.architecture,
2897 architecture = relation(Architecture),
2898 poolfile_id = self.tbl_binaries.c.file,
2899 poolfile = relation(PoolFile),
2900 binarytype = self.tbl_binaries.c.type,
2901 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2902 fingerprint = relation(Fingerprint),
2903 install_date = self.tbl_binaries.c.install_date,
2904 binassociations = relation(BinAssociation,
2905 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2907 mapper(BinaryACL, self.tbl_binary_acl,
2908 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2910 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2911 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2912 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2913 architecture = relation(Architecture)))
2915 mapper(Component, self.tbl_component,
2916 properties = dict(component_id = self.tbl_component.c.id,
2917 component_name = self.tbl_component.c.name))
2919 mapper(DBConfig, self.tbl_config,
2920 properties = dict(config_id = self.tbl_config.c.id))
2922 mapper(DSCFile, self.tbl_dsc_files,
2923 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2924 source_id = self.tbl_dsc_files.c.source,
2925 source = relation(DBSource),
2926 poolfile_id = self.tbl_dsc_files.c.file,
2927 poolfile = relation(PoolFile)))
2929 mapper(PoolFile, self.tbl_files,
2930 properties = dict(file_id = self.tbl_files.c.id,
2931 filesize = self.tbl_files.c.size,
2932 location_id = self.tbl_files.c.location,
2933 location = relation(Location,
2934 # using lazy='dynamic' in the back
2935 # reference because we have A LOT of
2936 # files in one location
2937 backref=backref('files', lazy='dynamic'))))
2939 mapper(Fingerprint, self.tbl_fingerprint,
2940 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2941 uid_id = self.tbl_fingerprint.c.uid,
2942 uid = relation(Uid),
2943 keyring_id = self.tbl_fingerprint.c.keyring,
2944 keyring = relation(Keyring),
2945 source_acl = relation(SourceACL),
2946 binary_acl = relation(BinaryACL)))
2948 mapper(Keyring, self.tbl_keyrings,
2949 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2950 keyring_id = self.tbl_keyrings.c.id))
2952 mapper(DBChange, self.tbl_changes,
2953 properties = dict(change_id = self.tbl_changes.c.id,
2954 poolfiles = relation(PoolFile,
2955 secondary=self.tbl_changes_pool_files,
2956 backref="changeslinks"),
2957 seen = self.tbl_changes.c.seen,
2958 source = self.tbl_changes.c.source,
2959 binaries = self.tbl_changes.c.binaries,
2960 architecture = self.tbl_changes.c.architecture,
2961 distribution = self.tbl_changes.c.distribution,
2962 urgency = self.tbl_changes.c.urgency,
2963 maintainer = self.tbl_changes.c.maintainer,
2964 changedby = self.tbl_changes.c.changedby,
2965 date = self.tbl_changes.c.date,
2966 version = self.tbl_changes.c.version,
2967 files = relation(ChangePendingFile,
2968 secondary=self.tbl_changes_pending_files_map,
2969 backref="changesfile"),
2970 in_queue_id = self.tbl_changes.c.in_queue,
2971 in_queue = relation(PolicyQueue,
2972 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2973 approved_for_id = self.tbl_changes.c.approved_for))
2975 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2976 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2978 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2979 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2980 filename = self.tbl_changes_pending_files.c.filename,
2981 size = self.tbl_changes_pending_files.c.size,
2982 md5sum = self.tbl_changes_pending_files.c.md5sum,
2983 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2984 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2986 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2987 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2988 change = relation(DBChange),
2989 maintainer = relation(Maintainer,
2990 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
2991 changedby = relation(Maintainer,
2992 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
2993 fingerprint = relation(Fingerprint),
2994 source_files = relation(ChangePendingFile,
2995 secondary=self.tbl_changes_pending_source_files,
2996 backref="pending_sources")))
2999 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3000 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3001 keyring = relation(Keyring, backref="keyring_acl_map"),
3002 architecture = relation(Architecture)))
3004 mapper(Location, self.tbl_location,
3005 properties = dict(location_id = self.tbl_location.c.id,
3006 component_id = self.tbl_location.c.component,
3007 component = relation(Component),
3008 archive_id = self.tbl_location.c.archive,
3009 archive = relation(Archive),
3010 # FIXME: the 'type' column is old cruft and
3011 # should be removed in the future.
3012 archive_type = self.tbl_location.c.type))
3014 mapper(Maintainer, self.tbl_maintainer,
3015 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3016 maintains_sources = relation(DBSource, backref='maintainer',
3017 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3018 changed_sources = relation(DBSource, backref='changedby',
3019 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3021 mapper(NewComment, self.tbl_new_comments,
3022 properties = dict(comment_id = self.tbl_new_comments.c.id))
3024 mapper(Override, self.tbl_override,
3025 properties = dict(suite_id = self.tbl_override.c.suite,
3026 suite = relation(Suite),
3027 package = self.tbl_override.c.package,
3028 component_id = self.tbl_override.c.component,
3029 component = relation(Component),
3030 priority_id = self.tbl_override.c.priority,
3031 priority = relation(Priority),
3032 section_id = self.tbl_override.c.section,
3033 section = relation(Section),
3034 overridetype_id = self.tbl_override.c.type,
3035 overridetype = relation(OverrideType)))
3037 mapper(OverrideType, self.tbl_override_type,
3038 properties = dict(overridetype = self.tbl_override_type.c.type,
3039 overridetype_id = self.tbl_override_type.c.id))
3041 mapper(PolicyQueue, self.tbl_policy_queue,
3042 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3044 mapper(Priority, self.tbl_priority,
3045 properties = dict(priority_id = self.tbl_priority.c.id))
3047 mapper(Section, self.tbl_section,
3048 properties = dict(section_id = self.tbl_section.c.id,
3049 section=self.tbl_section.c.section))
3051 mapper(DBSource, self.tbl_source,
3052 properties = dict(source_id = self.tbl_source.c.id,
3053 version = self.tbl_source.c.version,
3054 maintainer_id = self.tbl_source.c.maintainer,
3055 poolfile_id = self.tbl_source.c.file,
3056 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3057 fingerprint_id = self.tbl_source.c.sig_fpr,
3058 fingerprint = relation(Fingerprint),
3059 changedby_id = self.tbl_source.c.changedby,
3060 srcfiles = relation(DSCFile,
3061 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3062 suites = relation(Suite, secondary=self.tbl_src_associations,
3064 srcuploaders = relation(SrcUploader)))
3066 mapper(SourceACL, self.tbl_source_acl,
3067 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3069 mapper(SrcAssociation, self.tbl_src_associations,
3070 properties = dict(sa_id = self.tbl_src_associations.c.id,
3071 suite_id = self.tbl_src_associations.c.suite,
3072 suite = relation(Suite),
3073 source_id = self.tbl_src_associations.c.source,
3074 source = relation(DBSource)))
3076 mapper(SrcFormat, self.tbl_src_format,
3077 properties = dict(src_format_id = self.tbl_src_format.c.id,
3078 format_name = self.tbl_src_format.c.format_name))
3080 mapper(SrcUploader, self.tbl_src_uploaders,
3081 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3082 source_id = self.tbl_src_uploaders.c.source,
3083 source = relation(DBSource,
3084 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3085 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3086 maintainer = relation(Maintainer,
3087 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3089 mapper(Suite, self.tbl_suite,
3090 properties = dict(suite_id = self.tbl_suite.c.id,
3091 policy_queue = relation(PolicyQueue),
3092 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3094 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3095 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3096 suite = relation(Suite, backref='suitesrcformats'),
3097 src_format_id = self.tbl_suite_src_formats.c.src_format,
3098 src_format = relation(SrcFormat)))
3100 mapper(Uid, self.tbl_uid,
3101 properties = dict(uid_id = self.tbl_uid.c.id,
3102 fingerprint = relation(Fingerprint)))
3104 mapper(UploadBlock, self.tbl_upload_blocks,
3105 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3106 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3107 uid = relation(Uid, backref="uploadblocks")))
3109 ## Connection functions
3110 def __createconn(self):
3111 from config import Config
3115 connstr = "postgres://%s" % cnf["DB::Host"]
3116 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3117 connstr += ":%s" % cnf["DB::Port"]
3118 connstr += "/%s" % cnf["DB::Name"]
3121 connstr = "postgres:///%s" % cnf["DB::Name"]
3122 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3123 connstr += "?port=%s" % cnf["DB::Port"]
3125 self.db_pg = create_engine(connstr, echo=self.debug)
3126 self.db_meta = MetaData()
3127 self.db_meta.bind = self.db_pg
3128 self.db_smaker = sessionmaker(bind=self.db_pg,
3132 self.__setuptables()
3133 self.__setupmappers()
3136 return self.db_smaker()
3138 __all__.append('DBConn')