5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
40 from datetime import datetime, timedelta
41 from errno import ENOENT
42 from tempfile import mkstemp, mkdtemp
44 from inspect import getargspec
47 from sqlalchemy import create_engine, Table, MetaData
48 from sqlalchemy.orm import sessionmaker, mapper, relation
49 from sqlalchemy import types as sqltypes
51 # Don't remove this, we re-export the exceptions to scripts which import us
52 from sqlalchemy.exc import *
53 from sqlalchemy.orm.exc import NoResultFound
55 # Only import Config until Queue stuff is changed to store its config
57 from config import Config
58 from textutils import fix_maintainer
60 ################################################################################
62 # Patch in support for the debversion field type so that it works during
65 class DebVersion(sqltypes.Text):
67 Support the debversion type
70 def get_col_spec(self):
73 sa_major_version = sqlalchemy.__version__[0:3]
74 if sa_major_version == "0.5":
75 from sqlalchemy.databases import postgres
76 postgres.ischema_names['debversion'] = DebVersion
78 raise Exception("dak isn't ported to SQLA versions != 0.5 yet. See daklib/dbconn.py")
80 ################################################################################
82 __all__ = ['IntegrityError', 'SQLAlchemyError']
84 ################################################################################
86 def session_wrapper(fn):
88 Wrapper around common ".., session=None):" handling. If the wrapped
89 function is called without passing 'session', we create a local one
90 and destroy it when the function ends.
92 Also attaches a commit_or_flush method to the session; if we created a
93 local session, this is a synonym for session.commit(), otherwise it is a
94 synonym for session.flush().
97 def wrapped(*args, **kwargs):
98 private_transaction = False
100 # Find the session object
101 session = kwargs.get('session')
104 if len(args) <= len(getargspec(fn)[0]) - 1:
105 # No session specified as last argument or in kwargs
106 private_transaction = True
107 session = kwargs['session'] = DBConn().session()
109 # Session is last argument in args
113 session = args[-1] = DBConn().session()
114 private_transaction = True
116 if private_transaction:
117 session.commit_or_flush = session.commit
119 session.commit_or_flush = session.flush
122 return fn(*args, **kwargs)
124 if private_transaction:
125 # We created a session; close it.
128 wrapped.__doc__ = fn.__doc__
129 wrapped.func_name = fn.func_name
133 __all__.append('session_wrapper')
135 ################################################################################
137 class Architecture(object):
138 def __init__(self, *args, **kwargs):
141 def __eq__(self, val):
142 if isinstance(val, str):
143 return (self.arch_string== val)
144 # This signals to use the normal comparison operator
145 return NotImplemented
147 def __ne__(self, val):
148 if isinstance(val, str):
149 return (self.arch_string != val)
150 # This signals to use the normal comparison operator
151 return NotImplemented
154 return '<Architecture %s>' % self.arch_string
156 __all__.append('Architecture')
159 def get_architecture(architecture, session=None):
161 Returns database id for given C{architecture}.
163 @type architecture: string
164 @param architecture: The name of the architecture
166 @type session: Session
167 @param session: Optional SQLA session object (a temporary one will be
168 generated if not supplied)
171 @return: Architecture object for the given arch (None if not present)
174 q = session.query(Architecture).filter_by(arch_string=architecture)
178 except NoResultFound:
181 __all__.append('get_architecture')
184 def get_architecture_suites(architecture, session=None):
186 Returns list of Suite objects for given C{architecture} name
188 @type architecture: str
189 @param architecture: Architecture name to search for
191 @type session: Session
192 @param session: Optional SQL session object (a temporary one will be
193 generated if not supplied)
196 @return: list of Suite objects for the given name (may be empty)
199 q = session.query(Suite)
200 q = q.join(SuiteArchitecture)
201 q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
207 __all__.append('get_architecture_suites')
209 ################################################################################
211 class Archive(object):
212 def __init__(self, *args, **kwargs):
216 return '<Archive %s>' % self.archive_name
218 __all__.append('Archive')
221 def get_archive(archive, session=None):
223 returns database id for given C{archive}.
225 @type archive: string
226 @param archive: the name of the arhive
228 @type session: Session
229 @param session: Optional SQLA session object (a temporary one will be
230 generated if not supplied)
233 @return: Archive object for the given name (None if not present)
236 archive = archive.lower()
238 q = session.query(Archive).filter_by(archive_name=archive)
242 except NoResultFound:
245 __all__.append('get_archive')
247 ################################################################################
249 class BinAssociation(object):
250 def __init__(self, *args, **kwargs):
254 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
256 __all__.append('BinAssociation')
258 ################################################################################
260 class BinContents(object):
261 def __init__(self, *args, **kwargs):
265 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
267 __all__.append('BinContents')
269 ################################################################################
271 class DBBinary(object):
272 def __init__(self, *args, **kwargs):
276 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
278 __all__.append('DBBinary')
281 def get_suites_binary_in(package, session=None):
283 Returns list of Suite objects which given C{package} name is in
286 @param package: DBBinary package name to search for
289 @return: list of Suite objects for the given package
292 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
294 __all__.append('get_suites_binary_in')
297 def get_binary_from_id(binary_id, session=None):
299 Returns DBBinary object for given C{id}
302 @param binary_id: Id of the required binary
304 @type session: Session
305 @param session: Optional SQLA session object (a temporary one will be
306 generated if not supplied)
309 @return: DBBinary object for the given binary (None if not present)
312 q = session.query(DBBinary).filter_by(binary_id=binary_id)
316 except NoResultFound:
319 __all__.append('get_binary_from_id')
322 def get_binaries_from_name(package, version=None, architecture=None, session=None):
324 Returns list of DBBinary objects for given C{package} name
327 @param package: DBBinary package name to search for
329 @type version: str or None
330 @param version: Version to search for (or None)
332 @type architecture: str, list or None
333 @param architecture: Architectures to limit to (or None if no limit)
335 @type session: Session
336 @param session: Optional SQL session object (a temporary one will be
337 generated if not supplied)
340 @return: list of DBBinary objects for the given name (may be empty)
343 q = session.query(DBBinary).filter_by(package=package)
345 if version is not None:
346 q = q.filter_by(version=version)
348 if architecture is not None:
349 if not isinstance(architecture, list):
350 architecture = [architecture]
351 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
357 __all__.append('get_binaries_from_name')
360 def get_binaries_from_source_id(source_id, session=None):
362 Returns list of DBBinary objects for given C{source_id}
365 @param source_id: source_id to search for
367 @type session: Session
368 @param session: Optional SQL session object (a temporary one will be
369 generated if not supplied)
372 @return: list of DBBinary objects for the given name (may be empty)
375 return session.query(DBBinary).filter_by(source_id=source_id).all()
377 __all__.append('get_binaries_from_source_id')
380 def get_binary_from_name_suite(package, suitename, session=None):
381 ### For dak examine-package
382 ### XXX: Doesn't use object API yet
384 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
385 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
386 WHERE b.package='%(package)s'
388 AND fi.location = l.id
389 AND l.component = c.id
392 AND su.suite_name %(suitename)s
393 ORDER BY b.version DESC"""
395 return session.execute(sql % {'package': package, 'suitename': suitename})
397 __all__.append('get_binary_from_name_suite')
400 def get_binary_components(package, suitename, arch, session=None):
401 # Check for packages that have moved from one component to another
402 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
403 WHERE b.package=:package AND s.suite_name=:suitename
404 AND (a.arch_string = :arch OR a.arch_string = 'all')
405 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
406 AND f.location = l.id
407 AND l.component = c.id
410 vals = {'package': package, 'suitename': suitename, 'arch': arch}
412 return session.execute(query, vals)
414 __all__.append('get_binary_components')
416 ################################################################################
418 class BinaryACL(object):
419 def __init__(self, *args, **kwargs):
423 return '<BinaryACL %s>' % self.binary_acl_id
425 __all__.append('BinaryACL')
427 ################################################################################
429 class BinaryACLMap(object):
430 def __init__(self, *args, **kwargs):
434 return '<BinaryACLMap %s>' % self.binary_acl_map_id
436 __all__.append('BinaryACLMap')
438 ################################################################################
443 ArchiveDir "%(archivepath)s";
444 OverrideDir "/srv/ftp-master.debian.org/scripts/override/";
445 CacheDir "/srv/ftp-master.debian.org/database/";
450 Packages::Compress ". bzip2 gzip";
451 Sources::Compress ". bzip2 gzip";
456 bindirectory "incoming"
461 BinOverride "override.sid.all3";
462 BinCacheDB "packages-accepted.db";
464 FileList "%(filelist)s";
467 Packages::Extensions ".deb .udeb";
470 bindirectory "incoming/"
473 BinOverride "override.sid.all3";
474 SrcOverride "override.sid.all3.src";
475 FileList "%(filelist)s";
479 class BuildQueue(object):
480 def __init__(self, *args, **kwargs):
484 return '<BuildQueue %s>' % self.queue_name
486 def write_metadata(self, starttime, force=False):
487 # Do we write out metafiles?
488 if not (force or self.generate_metadata):
491 session = DBConn().session().object_session(self)
493 fl_fd = fl_name = ac_fd = ac_name = None
495 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
496 startdir = os.getcwd()
499 # Grab files we want to include
500 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
501 # Write file list with newer files
502 (fl_fd, fl_name) = mkstemp()
504 os.write(fl_fd, '%s\n' % n.fullpath)
507 # Write minimal apt.conf
508 # TODO: Remove hardcoding from template
509 (ac_fd, ac_name) = mkstemp()
510 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
511 'filelist': fl_name})
514 # Run apt-ftparchive generate
515 os.chdir(os.path.dirname(ac_name))
516 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
518 # Run apt-ftparchive release
519 # TODO: Eww - fix this
520 bname = os.path.basename(self.path)
524 # We have to remove the Release file otherwise it'll be included in the
527 os.unlink(os.path.join(bname, 'Release'))
531 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
533 # Crude hack with open and append, but this whole section is and should be redone.
534 if self.notautomatic:
535 release=open("Release", "a")
536 release.write("NotAutomatic: yes")
542 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
543 if cnf.has_key("Dinstall::SigningPubKeyring"):
544 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
546 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
548 # Move the files if we got this far
549 os.rename('Release', os.path.join(bname, 'Release'))
551 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
553 # Clean up any left behind files
580 def clean_and_update(self, starttime, Logger, dryrun=False):
581 """WARNING: This routine commits for you"""
582 session = DBConn().session().object_session(self)
584 if self.generate_metadata and not dryrun:
585 self.write_metadata(starttime)
587 # Grab files older than our execution time
588 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
594 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
596 Logger.log(["I: Removing %s from the queue" % o.fullpath])
597 os.unlink(o.fullpath)
600 # If it wasn't there, don't worry
601 if e.errno == ENOENT:
604 # TODO: Replace with proper logging call
605 Logger.log(["E: Could not remove %s" % o.fullpath])
612 for f in os.listdir(self.path):
613 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release'):
617 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
618 except NoResultFound:
619 fp = os.path.join(self.path, f)
621 Logger.log(["I: Would remove unused link %s" % fp])
623 Logger.log(["I: Removing unused link %s" % fp])
627 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
629 def add_file_from_pool(self, poolfile):
630 """Copies a file into the pool. Assumes that the PoolFile object is
631 attached to the same SQLAlchemy session as the Queue object is.
633 The caller is responsible for committing after calling this function."""
634 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
636 # Check if we have a file of this name or this ID already
637 for f in self.queuefiles:
638 if f.fileid is not None and f.fileid == poolfile.file_id or \
639 f.poolfile.filename == poolfile_basename:
640 # In this case, update the BuildQueueFile entry so we
641 # don't remove it too early
642 f.lastused = datetime.now()
643 DBConn().session().object_session(poolfile).add(f)
646 # Prepare BuildQueueFile object
647 qf = BuildQueueFile()
648 qf.build_queue_id = self.queue_id
649 qf.lastused = datetime.now()
650 qf.filename = poolfile_basename
652 targetpath = poolfile.fullpath
653 queuepath = os.path.join(self.path, poolfile_basename)
657 # We need to copy instead of symlink
659 utils.copy(targetpath, queuepath)
660 # NULL in the fileid field implies a copy
663 os.symlink(targetpath, queuepath)
664 qf.fileid = poolfile.file_id
668 # Get the same session as the PoolFile is using and add the qf to it
669 DBConn().session().object_session(poolfile).add(qf)
674 __all__.append('BuildQueue')
677 def get_build_queue(queuename, session=None):
679 Returns BuildQueue object for given C{queue name}, creating it if it does not
682 @type queuename: string
683 @param queuename: The name of the queue
685 @type session: Session
686 @param session: Optional SQLA session object (a temporary one will be
687 generated if not supplied)
690 @return: BuildQueue object for the given queue
693 q = session.query(BuildQueue).filter_by(queue_name=queuename)
697 except NoResultFound:
700 __all__.append('get_build_queue')
702 ################################################################################
704 class BuildQueueFile(object):
705 def __init__(self, *args, **kwargs):
709 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
713 return os.path.join(self.buildqueue.path, self.filename)
716 __all__.append('BuildQueueFile')
718 ################################################################################
720 class ChangePendingBinary(object):
721 def __init__(self, *args, **kwargs):
725 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
727 __all__.append('ChangePendingBinary')
729 ################################################################################
731 class ChangePendingFile(object):
732 def __init__(self, *args, **kwargs):
736 return '<ChangePendingFile %s>' % self.change_pending_file_id
738 __all__.append('ChangePendingFile')
740 ################################################################################
742 class ChangePendingSource(object):
743 def __init__(self, *args, **kwargs):
747 return '<ChangePendingSource %s>' % self.change_pending_source_id
749 __all__.append('ChangePendingSource')
751 ################################################################################
753 class Component(object):
754 def __init__(self, *args, **kwargs):
757 def __eq__(self, val):
758 if isinstance(val, str):
759 return (self.component_name == val)
760 # This signals to use the normal comparison operator
761 return NotImplemented
763 def __ne__(self, val):
764 if isinstance(val, str):
765 return (self.component_name != val)
766 # This signals to use the normal comparison operator
767 return NotImplemented
770 return '<Component %s>' % self.component_name
773 __all__.append('Component')
776 def get_component(component, session=None):
778 Returns database id for given C{component}.
780 @type component: string
781 @param component: The name of the override type
784 @return: the database id for the given component
787 component = component.lower()
789 q = session.query(Component).filter_by(component_name=component)
793 except NoResultFound:
796 __all__.append('get_component')
798 ################################################################################
800 class DBConfig(object):
801 def __init__(self, *args, **kwargs):
805 return '<DBConfig %s>' % self.name
807 __all__.append('DBConfig')
809 ################################################################################
812 def get_or_set_contents_file_id(filename, session=None):
814 Returns database id for given filename.
816 If no matching file is found, a row is inserted.
818 @type filename: string
819 @param filename: The filename
820 @type session: SQLAlchemy
821 @param session: Optional SQL session object (a temporary one will be
822 generated if not supplied). If not passed, a commit will be performed at
823 the end of the function, otherwise the caller is responsible for commiting.
826 @return: the database id for the given component
829 q = session.query(ContentFilename).filter_by(filename=filename)
832 ret = q.one().cafilename_id
833 except NoResultFound:
834 cf = ContentFilename()
835 cf.filename = filename
837 session.commit_or_flush()
838 ret = cf.cafilename_id
842 __all__.append('get_or_set_contents_file_id')
845 def get_contents(suite, overridetype, section=None, session=None):
847 Returns contents for a suite / overridetype combination, limiting
848 to a section if not None.
851 @param suite: Suite object
853 @type overridetype: OverrideType
854 @param overridetype: OverrideType object
856 @type section: Section
857 @param section: Optional section object to limit results to
859 @type session: SQLAlchemy
860 @param session: Optional SQL session object (a temporary one will be
861 generated if not supplied)
864 @return: ResultsProxy object set up to return tuples of (filename, section,
868 # find me all of the contents for a given suite
869 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
873 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
874 JOIN content_file_names n ON (c.filename=n.id)
875 JOIN binaries b ON (b.id=c.binary_pkg)
876 JOIN override o ON (o.package=b.package)
877 JOIN section s ON (s.id=o.section)
878 WHERE o.suite = :suiteid AND o.type = :overridetypeid
879 AND b.type=:overridetypename"""
881 vals = {'suiteid': suite.suite_id,
882 'overridetypeid': overridetype.overridetype_id,
883 'overridetypename': overridetype.overridetype}
885 if section is not None:
886 contents_q += " AND s.id = :sectionid"
887 vals['sectionid'] = section.section_id
889 contents_q += " ORDER BY fn"
891 return session.execute(contents_q, vals)
893 __all__.append('get_contents')
895 ################################################################################
897 class ContentFilepath(object):
898 def __init__(self, *args, **kwargs):
902 return '<ContentFilepath %s>' % self.filepath
904 __all__.append('ContentFilepath')
907 def get_or_set_contents_path_id(filepath, session=None):
909 Returns database id for given path.
911 If no matching file is found, a row is inserted.
913 @type filepath: string
914 @param filepath: The filepath
916 @type session: SQLAlchemy
917 @param session: Optional SQL session object (a temporary one will be
918 generated if not supplied). If not passed, a commit will be performed at
919 the end of the function, otherwise the caller is responsible for commiting.
922 @return: the database id for the given path
925 q = session.query(ContentFilepath).filter_by(filepath=filepath)
928 ret = q.one().cafilepath_id
929 except NoResultFound:
930 cf = ContentFilepath()
931 cf.filepath = filepath
933 session.commit_or_flush()
934 ret = cf.cafilepath_id
938 __all__.append('get_or_set_contents_path_id')
940 ################################################################################
942 class ContentAssociation(object):
943 def __init__(self, *args, **kwargs):
947 return '<ContentAssociation %s>' % self.ca_id
949 __all__.append('ContentAssociation')
951 def insert_content_paths(binary_id, fullpaths, session=None):
953 Make sure given path is associated with given binary id
956 @param binary_id: the id of the binary
957 @type fullpaths: list
958 @param fullpaths: the list of paths of the file being associated with the binary
959 @type session: SQLAlchemy session
960 @param session: Optional SQLAlchemy session. If this is passed, the caller
961 is responsible for ensuring a transaction has begun and committing the
962 results or rolling back based on the result code. If not passed, a commit
963 will be performed at the end of the function, otherwise the caller is
964 responsible for commiting.
966 @return: True upon success
971 session = DBConn().session()
976 def generate_path_dicts():
977 for fullpath in fullpaths:
978 if fullpath.startswith( './' ):
979 fullpath = fullpath[2:]
981 yield {'filename':fullpath, 'id': binary_id }
983 for d in generate_path_dicts():
984 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
993 traceback.print_exc()
995 # Only rollback if we set up the session ourself
1002 __all__.append('insert_content_paths')
1004 ################################################################################
1006 class DSCFile(object):
1007 def __init__(self, *args, **kwargs):
1011 return '<DSCFile %s>' % self.dscfile_id
1013 __all__.append('DSCFile')
1016 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1018 Returns a list of DSCFiles which may be empty
1020 @type dscfile_id: int (optional)
1021 @param dscfile_id: the dscfile_id of the DSCFiles to find
1023 @type source_id: int (optional)
1024 @param source_id: the source id related to the DSCFiles to find
1026 @type poolfile_id: int (optional)
1027 @param poolfile_id: the poolfile id related to the DSCFiles to find
1030 @return: Possibly empty list of DSCFiles
1033 q = session.query(DSCFile)
1035 if dscfile_id is not None:
1036 q = q.filter_by(dscfile_id=dscfile_id)
1038 if source_id is not None:
1039 q = q.filter_by(source_id=source_id)
1041 if poolfile_id is not None:
1042 q = q.filter_by(poolfile_id=poolfile_id)
1046 __all__.append('get_dscfiles')
1048 ################################################################################
1050 class PoolFile(object):
1051 def __init__(self, *args, **kwargs):
1055 return '<PoolFile %s>' % self.filename
1059 return os.path.join(self.location.path, self.filename)
1061 __all__.append('PoolFile')
1064 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1067 (ValidFileFound [boolean or None], PoolFile object or None)
1069 @type filename: string
1070 @param filename: the filename of the file to check against the DB
1073 @param filesize: the size of the file to check against the DB
1075 @type md5sum: string
1076 @param md5sum: the md5sum of the file to check against the DB
1078 @type location_id: int
1079 @param location_id: the id of the location to look in
1082 @return: Tuple of length 2.
1083 - If more than one file found with that name: (C{None}, C{None})
1084 - If valid pool file found: (C{True}, C{PoolFile object})
1085 - If valid pool file not found:
1086 - (C{False}, C{None}) if no file found
1087 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1090 q = session.query(PoolFile).filter_by(filename=filename)
1091 q = q.join(Location).filter_by(location_id=location_id)
1101 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1109 __all__.append('check_poolfile')
1112 def get_poolfile_by_id(file_id, session=None):
1114 Returns a PoolFile objects or None for the given id
1117 @param file_id: the id of the file to look for
1119 @rtype: PoolFile or None
1120 @return: either the PoolFile object or None
1123 q = session.query(PoolFile).filter_by(file_id=file_id)
1127 except NoResultFound:
1130 __all__.append('get_poolfile_by_id')
1134 def get_poolfile_by_name(filename, location_id=None, session=None):
1136 Returns an array of PoolFile objects for the given filename and
1137 (optionally) location_id
1139 @type filename: string
1140 @param filename: the filename of the file to check against the DB
1142 @type location_id: int
1143 @param location_id: the id of the location to look in (optional)
1146 @return: array of PoolFile objects
1149 q = session.query(PoolFile).filter_by(filename=filename)
1151 if location_id is not None:
1152 q = q.join(Location).filter_by(location_id=location_id)
1156 __all__.append('get_poolfile_by_name')
1159 def get_poolfile_like_name(filename, session=None):
1161 Returns an array of PoolFile objects which are like the given name
1163 @type filename: string
1164 @param filename: the filename of the file to check against the DB
1167 @return: array of PoolFile objects
1170 # TODO: There must be a way of properly using bind parameters with %FOO%
1171 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1175 __all__.append('get_poolfile_like_name')
1178 def add_poolfile(filename, datadict, location_id, session=None):
1180 Add a new file to the pool
1182 @type filename: string
1183 @param filename: filename
1185 @type datadict: dict
1186 @param datadict: dict with needed data
1188 @type location_id: int
1189 @param location_id: database id of the location
1192 @return: the PoolFile object created
1194 poolfile = PoolFile()
1195 poolfile.filename = filename
1196 poolfile.filesize = datadict["size"]
1197 poolfile.md5sum = datadict["md5sum"]
1198 poolfile.sha1sum = datadict["sha1sum"]
1199 poolfile.sha256sum = datadict["sha256sum"]
1200 poolfile.location_id = location_id
1202 session.add(poolfile)
1203 # Flush to get a file id (NB: This is not a commit)
1208 __all__.append('add_poolfile')
1210 ################################################################################
1212 class Fingerprint(object):
1213 def __init__(self, *args, **kwargs):
1217 return '<Fingerprint %s>' % self.fingerprint
1219 __all__.append('Fingerprint')
1222 def get_fingerprint(fpr, session=None):
1224 Returns Fingerprint object for given fpr.
1227 @param fpr: The fpr to find / add
1229 @type session: SQLAlchemy
1230 @param session: Optional SQL session object (a temporary one will be
1231 generated if not supplied).
1234 @return: the Fingerprint object for the given fpr or None
1237 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1241 except NoResultFound:
1246 __all__.append('get_fingerprint')
1249 def get_or_set_fingerprint(fpr, session=None):
1251 Returns Fingerprint object for given fpr.
1253 If no matching fpr is found, a row is inserted.
1256 @param fpr: The fpr to find / add
1258 @type session: SQLAlchemy
1259 @param session: Optional SQL session object (a temporary one will be
1260 generated if not supplied). If not passed, a commit will be performed at
1261 the end of the function, otherwise the caller is responsible for commiting.
1262 A flush will be performed either way.
1265 @return: the Fingerprint object for the given fpr
1268 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1272 except NoResultFound:
1273 fingerprint = Fingerprint()
1274 fingerprint.fingerprint = fpr
1275 session.add(fingerprint)
1276 session.commit_or_flush()
1281 __all__.append('get_or_set_fingerprint')
1283 ################################################################################
1285 # Helper routine for Keyring class
1286 def get_ldap_name(entry):
1288 for k in ["cn", "mn", "sn"]:
1290 if ret and ret[0] != "" and ret[0] != "-":
1292 return " ".join(name)
1294 ################################################################################
1296 class Keyring(object):
1297 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1298 " --with-colons --fingerprint --fingerprint"
1303 def __init__(self, *args, **kwargs):
1307 return '<Keyring %s>' % self.keyring_name
1309 def de_escape_gpg_str(self, txt):
1310 esclist = re.split(r'(\\x..)', txt)
1311 for x in range(1,len(esclist),2):
1312 esclist[x] = "%c" % (int(esclist[x][2:],16))
1313 return "".join(esclist)
1315 def load_keys(self, keyring):
1318 if not self.keyring_id:
1319 raise Exception('Must be initialized with database information')
1321 k = os.popen(self.gpg_invocation % keyring, "r")
1325 for line in k.xreadlines():
1326 field = line.split(":")
1327 if field[0] == "pub":
1329 (name, addr) = email.Utils.parseaddr(field[9])
1330 name = re.sub(r"\s*[(].*[)]", "", name)
1331 if name == "" or addr == "" or "@" not in addr:
1333 addr = "invalid-uid"
1334 name = self.de_escape_gpg_str(name)
1335 self.keys[key] = {"email": addr}
1337 self.keys[key]["name"] = name
1338 self.keys[key]["aliases"] = [name]
1339 self.keys[key]["fingerprints"] = []
1341 elif key and field[0] == "sub" and len(field) >= 12:
1342 signingkey = ("s" in field[11])
1343 elif key and field[0] == "uid":
1344 (name, addr) = email.Utils.parseaddr(field[9])
1345 if name and name not in self.keys[key]["aliases"]:
1346 self.keys[key]["aliases"].append(name)
1347 elif signingkey and field[0] == "fpr":
1348 self.keys[key]["fingerprints"].append(field[9])
1349 self.fpr_lookup[field[9]] = key
1351 def import_users_from_ldap(self, session):
1355 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1356 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1358 l = ldap.open(LDAPServer)
1359 l.simple_bind_s("","")
1360 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1361 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1362 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1364 ldap_fin_uid_id = {}
1371 uid = entry["uid"][0]
1372 name = get_ldap_name(entry)
1373 fingerprints = entry["keyFingerPrint"]
1375 for f in fingerprints:
1376 key = self.fpr_lookup.get(f, None)
1377 if key not in self.keys:
1379 self.keys[key]["uid"] = uid
1383 keyid = get_or_set_uid(uid, session).uid_id
1384 byuid[keyid] = (uid, name)
1385 byname[uid] = (keyid, name)
1387 return (byname, byuid)
1389 def generate_users_from_keyring(self, format, session):
1393 for x in self.keys.keys():
1394 if self.keys[x]["email"] == "invalid-uid":
1396 self.keys[x]["uid"] = format % "invalid-uid"
1398 uid = format % self.keys[x]["email"]
1399 keyid = get_or_set_uid(uid, session).uid_id
1400 byuid[keyid] = (uid, self.keys[x]["name"])
1401 byname[uid] = (keyid, self.keys[x]["name"])
1402 self.keys[x]["uid"] = uid
1405 uid = format % "invalid-uid"
1406 keyid = get_or_set_uid(uid, session).uid_id
1407 byuid[keyid] = (uid, "ungeneratable user id")
1408 byname[uid] = (keyid, "ungeneratable user id")
1410 return (byname, byuid)
1412 __all__.append('Keyring')
1415 def get_keyring(keyring, session=None):
1417 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1418 If C{keyring} already has an entry, simply return the existing Keyring
1420 @type keyring: string
1421 @param keyring: the keyring name
1424 @return: the Keyring object for this keyring
1427 q = session.query(Keyring).filter_by(keyring_name=keyring)
1431 except NoResultFound:
1434 __all__.append('get_keyring')
1436 ################################################################################
1438 class KeyringACLMap(object):
1439 def __init__(self, *args, **kwargs):
1443 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1445 __all__.append('KeyringACLMap')
1447 ################################################################################
1449 class DBChange(object):
1450 def __init__(self, *args, **kwargs):
1454 return '<DBChange %s>' % self.changesname
1456 def clean_from_queue(self):
1457 session = DBConn().session().object_session(self)
1459 # Remove changes_pool_files entries
1462 # Remove changes_pending_files references
1465 # Clear out of queue
1466 self.in_queue = None
1467 self.approved_for_id = None
1469 __all__.append('DBChange')
1472 def get_dbchange(filename, session=None):
1474 returns DBChange object for given C{filename}.
1476 @type filename: string
1477 @param filename: the name of the file
1479 @type session: Session
1480 @param session: Optional SQLA session object (a temporary one will be
1481 generated if not supplied)
1484 @return: DBChange object for the given filename (C{None} if not present)
1487 q = session.query(DBChange).filter_by(changesname=filename)
1491 except NoResultFound:
1494 __all__.append('get_dbchange')
1496 ################################################################################
1498 class Location(object):
1499 def __init__(self, *args, **kwargs):
1503 return '<Location %s (%s)>' % (self.path, self.location_id)
1505 __all__.append('Location')
1508 def get_location(location, component=None, archive=None, session=None):
1510 Returns Location object for the given combination of location, component
1513 @type location: string
1514 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1516 @type component: string
1517 @param component: the component name (if None, no restriction applied)
1519 @type archive: string
1520 @param archive: the archive name (if None, no restriction applied)
1522 @rtype: Location / None
1523 @return: Either a Location object or None if one can't be found
1526 q = session.query(Location).filter_by(path=location)
1528 if archive is not None:
1529 q = q.join(Archive).filter_by(archive_name=archive)
1531 if component is not None:
1532 q = q.join(Component).filter_by(component_name=component)
1536 except NoResultFound:
1539 __all__.append('get_location')
1541 ################################################################################
1543 class Maintainer(object):
1544 def __init__(self, *args, **kwargs):
1548 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1550 def get_split_maintainer(self):
1551 if not hasattr(self, 'name') or self.name is None:
1552 return ('', '', '', '')
1554 return fix_maintainer(self.name.strip())
1556 __all__.append('Maintainer')
1559 def get_or_set_maintainer(name, session=None):
1561 Returns Maintainer object for given maintainer name.
1563 If no matching maintainer name is found, a row is inserted.
1566 @param name: The maintainer name to add
1568 @type session: SQLAlchemy
1569 @param session: Optional SQL session object (a temporary one will be
1570 generated if not supplied). If not passed, a commit will be performed at
1571 the end of the function, otherwise the caller is responsible for commiting.
1572 A flush will be performed either way.
1575 @return: the Maintainer object for the given maintainer
1578 q = session.query(Maintainer).filter_by(name=name)
1581 except NoResultFound:
1582 maintainer = Maintainer()
1583 maintainer.name = name
1584 session.add(maintainer)
1585 session.commit_or_flush()
1590 __all__.append('get_or_set_maintainer')
1593 def get_maintainer(maintainer_id, session=None):
1595 Return the name of the maintainer behind C{maintainer_id} or None if that
1596 maintainer_id is invalid.
1598 @type maintainer_id: int
1599 @param maintainer_id: the id of the maintainer
1602 @return: the Maintainer with this C{maintainer_id}
1605 return session.query(Maintainer).get(maintainer_id)
1607 __all__.append('get_maintainer')
1609 ################################################################################
1611 class NewComment(object):
1612 def __init__(self, *args, **kwargs):
1616 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1618 __all__.append('NewComment')
1621 def has_new_comment(package, version, session=None):
1623 Returns true if the given combination of C{package}, C{version} has a comment.
1625 @type package: string
1626 @param package: name of the package
1628 @type version: string
1629 @param version: package version
1631 @type session: Session
1632 @param session: Optional SQLA session object (a temporary one will be
1633 generated if not supplied)
1639 q = session.query(NewComment)
1640 q = q.filter_by(package=package)
1641 q = q.filter_by(version=version)
1643 return bool(q.count() > 0)
1645 __all__.append('has_new_comment')
1648 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1650 Returns (possibly empty) list of NewComment objects for the given
1653 @type package: string (optional)
1654 @param package: name of the package
1656 @type version: string (optional)
1657 @param version: package version
1659 @type comment_id: int (optional)
1660 @param comment_id: An id of a comment
1662 @type session: Session
1663 @param session: Optional SQLA session object (a temporary one will be
1664 generated if not supplied)
1667 @return: A (possibly empty) list of NewComment objects will be returned
1670 q = session.query(NewComment)
1671 if package is not None: q = q.filter_by(package=package)
1672 if version is not None: q = q.filter_by(version=version)
1673 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1677 __all__.append('get_new_comments')
1679 ################################################################################
1681 class Override(object):
1682 def __init__(self, *args, **kwargs):
1686 return '<Override %s (%s)>' % (self.package, self.suite_id)
1688 __all__.append('Override')
1691 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1693 Returns Override object for the given parameters
1695 @type package: string
1696 @param package: The name of the package
1698 @type suite: string, list or None
1699 @param suite: The name of the suite (or suites if a list) to limit to. If
1700 None, don't limit. Defaults to None.
1702 @type component: string, list or None
1703 @param component: The name of the component (or components if a list) to
1704 limit to. If None, don't limit. Defaults to None.
1706 @type overridetype: string, list or None
1707 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1708 limit to. If None, don't limit. Defaults to None.
1710 @type session: Session
1711 @param session: Optional SQLA session object (a temporary one will be
1712 generated if not supplied)
1715 @return: A (possibly empty) list of Override objects will be returned
1718 q = session.query(Override)
1719 q = q.filter_by(package=package)
1721 if suite is not None:
1722 if not isinstance(suite, list): suite = [suite]
1723 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1725 if component is not None:
1726 if not isinstance(component, list): component = [component]
1727 q = q.join(Component).filter(Component.component_name.in_(component))
1729 if overridetype is not None:
1730 if not isinstance(overridetype, list): overridetype = [overridetype]
1731 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1735 __all__.append('get_override')
1738 ################################################################################
1740 class OverrideType(object):
1741 def __init__(self, *args, **kwargs):
1745 return '<OverrideType %s>' % self.overridetype
1747 __all__.append('OverrideType')
1750 def get_override_type(override_type, session=None):
1752 Returns OverrideType object for given C{override type}.
1754 @type override_type: string
1755 @param override_type: The name of the override type
1757 @type session: Session
1758 @param session: Optional SQLA session object (a temporary one will be
1759 generated if not supplied)
1762 @return: the database id for the given override type
1765 q = session.query(OverrideType).filter_by(overridetype=override_type)
1769 except NoResultFound:
1772 __all__.append('get_override_type')
1774 ################################################################################
1776 class DebContents(object):
1777 def __init__(self, *args, **kwargs):
1781 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1783 __all__.append('DebContents')
1786 class UdebContents(object):
1787 def __init__(self, *args, **kwargs):
1791 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1793 __all__.append('UdebContents')
1795 class PendingBinContents(object):
1796 def __init__(self, *args, **kwargs):
1800 return '<PendingBinContents %s>' % self.contents_id
1802 __all__.append('PendingBinContents')
1804 def insert_pending_content_paths(package,
1809 Make sure given paths are temporarily associated with given
1813 @param package: the package to associate with should have been read in from the binary control file
1814 @type fullpaths: list
1815 @param fullpaths: the list of paths of the file being associated with the binary
1816 @type session: SQLAlchemy session
1817 @param session: Optional SQLAlchemy session. If this is passed, the caller
1818 is responsible for ensuring a transaction has begun and committing the
1819 results or rolling back based on the result code. If not passed, a commit
1820 will be performed at the end of the function
1822 @return: True upon success, False if there is a problem
1825 privatetrans = False
1828 session = DBConn().session()
1832 arch = get_architecture(package['Architecture'], session)
1833 arch_id = arch.arch_id
1835 # Remove any already existing recorded files for this package
1836 q = session.query(PendingBinContents)
1837 q = q.filter_by(package=package['Package'])
1838 q = q.filter_by(version=package['Version'])
1839 q = q.filter_by(architecture=arch_id)
1842 for fullpath in fullpaths:
1844 if fullpath.startswith( "./" ):
1845 fullpath = fullpath[2:]
1847 pca = PendingBinContents()
1848 pca.package = package['Package']
1849 pca.version = package['Version']
1851 pca.architecture = arch_id
1854 pca.type = 8 # gross
1856 pca.type = 7 # also gross
1859 # Only commit if we set up the session ourself
1867 except Exception, e:
1868 traceback.print_exc()
1870 # Only rollback if we set up the session ourself
1877 __all__.append('insert_pending_content_paths')
1879 ################################################################################
1881 class PolicyQueue(object):
1882 def __init__(self, *args, **kwargs):
1886 return '<PolicyQueue %s>' % self.queue_name
1888 __all__.append('PolicyQueue')
1891 def get_policy_queue(queuename, session=None):
1893 Returns PolicyQueue object for given C{queue name}
1895 @type queuename: string
1896 @param queuename: The name of the queue
1898 @type session: Session
1899 @param session: Optional SQLA session object (a temporary one will be
1900 generated if not supplied)
1903 @return: PolicyQueue object for the given queue
1906 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1910 except NoResultFound:
1913 __all__.append('get_policy_queue')
1915 ################################################################################
1917 class Priority(object):
1918 def __init__(self, *args, **kwargs):
1921 def __eq__(self, val):
1922 if isinstance(val, str):
1923 return (self.priority == val)
1924 # This signals to use the normal comparison operator
1925 return NotImplemented
1927 def __ne__(self, val):
1928 if isinstance(val, str):
1929 return (self.priority != val)
1930 # This signals to use the normal comparison operator
1931 return NotImplemented
1934 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1936 __all__.append('Priority')
1939 def get_priority(priority, session=None):
1941 Returns Priority object for given C{priority name}.
1943 @type priority: string
1944 @param priority: The name of the priority
1946 @type session: Session
1947 @param session: Optional SQLA session object (a temporary one will be
1948 generated if not supplied)
1951 @return: Priority object for the given priority
1954 q = session.query(Priority).filter_by(priority=priority)
1958 except NoResultFound:
1961 __all__.append('get_priority')
1964 def get_priorities(session=None):
1966 Returns dictionary of priority names -> id mappings
1968 @type session: Session
1969 @param session: Optional SQL session object (a temporary one will be
1970 generated if not supplied)
1973 @return: dictionary of priority names -> id mappings
1977 q = session.query(Priority)
1979 ret[x.priority] = x.priority_id
1983 __all__.append('get_priorities')
1985 ################################################################################
1987 class Section(object):
1988 def __init__(self, *args, **kwargs):
1991 def __eq__(self, val):
1992 if isinstance(val, str):
1993 return (self.section == val)
1994 # This signals to use the normal comparison operator
1995 return NotImplemented
1997 def __ne__(self, val):
1998 if isinstance(val, str):
1999 return (self.section != val)
2000 # This signals to use the normal comparison operator
2001 return NotImplemented
2004 return '<Section %s>' % self.section
2006 __all__.append('Section')
2009 def get_section(section, session=None):
2011 Returns Section object for given C{section name}.
2013 @type section: string
2014 @param section: The name of the section
2016 @type session: Session
2017 @param session: Optional SQLA session object (a temporary one will be
2018 generated if not supplied)
2021 @return: Section object for the given section name
2024 q = session.query(Section).filter_by(section=section)
2028 except NoResultFound:
2031 __all__.append('get_section')
2034 def get_sections(session=None):
2036 Returns dictionary of section names -> id mappings
2038 @type session: Session
2039 @param session: Optional SQL session object (a temporary one will be
2040 generated if not supplied)
2043 @return: dictionary of section names -> id mappings
2047 q = session.query(Section)
2049 ret[x.section] = x.section_id
2053 __all__.append('get_sections')
2055 ################################################################################
2057 class DBSource(object):
2058 def __init__(self, *args, **kwargs):
2062 return '<DBSource %s (%s)>' % (self.source, self.version)
2064 __all__.append('DBSource')
2067 def source_exists(source, source_version, suites = ["any"], session=None):
2069 Ensure that source exists somewhere in the archive for the binary
2070 upload being processed.
2071 1. exact match => 1.0-3
2072 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2074 @type source: string
2075 @param source: source name
2077 @type source_version: string
2078 @param source_version: expected source version
2081 @param suites: list of suites to check in, default I{any}
2083 @type session: Session
2084 @param session: Optional SQLA session object (a temporary one will be
2085 generated if not supplied)
2088 @return: returns 1 if a source with expected version is found, otherwise 0
2095 for suite in suites:
2096 q = session.query(DBSource).filter_by(source=source)
2098 # source must exist in suite X, or in some other suite that's
2099 # mapped to X, recursively... silent-maps are counted too,
2100 # unreleased-maps aren't.
2101 maps = cnf.ValueList("SuiteMappings")[:]
2103 maps = [ m.split() for m in maps ]
2104 maps = [ (x[1], x[2]) for x in maps
2105 if x[0] == "map" or x[0] == "silent-map" ]
2108 if x[1] in s and x[0] not in s:
2111 q = q.join(SrcAssociation).join(Suite)
2112 q = q.filter(Suite.suite_name.in_(s))
2114 # Reduce the query results to a list of version numbers
2115 ql = [ j.version for j in q.all() ]
2118 if source_version in ql:
2122 from daklib.regexes import re_bin_only_nmu
2123 orig_source_version = re_bin_only_nmu.sub('', source_version)
2124 if orig_source_version in ql:
2127 # No source found so return not ok
2132 __all__.append('source_exists')
2135 def get_suites_source_in(source, session=None):
2137 Returns list of Suite objects which given C{source} name is in
2140 @param source: DBSource package name to search for
2143 @return: list of Suite objects for the given source
2146 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2148 __all__.append('get_suites_source_in')
2151 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2153 Returns list of DBSource objects for given C{source} name and other parameters
2156 @param source: DBSource package name to search for
2158 @type version: str or None
2159 @param version: DBSource version name to search for or None if not applicable
2161 @type dm_upload_allowed: bool
2162 @param dm_upload_allowed: If None, no effect. If True or False, only
2163 return packages with that dm_upload_allowed setting
2165 @type session: Session
2166 @param session: Optional SQL session object (a temporary one will be
2167 generated if not supplied)
2170 @return: list of DBSource objects for the given name (may be empty)
2173 q = session.query(DBSource).filter_by(source=source)
2175 if version is not None:
2176 q = q.filter_by(version=version)
2178 if dm_upload_allowed is not None:
2179 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2183 __all__.append('get_sources_from_name')
2186 def get_source_in_suite(source, suite, session=None):
2188 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2190 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2191 - B{suite} - a suite name, eg. I{unstable}
2193 @type source: string
2194 @param source: source package name
2197 @param suite: the suite name
2200 @return: the version for I{source} in I{suite}
2204 q = session.query(SrcAssociation)
2205 q = q.join('source').filter_by(source=source)
2206 q = q.join('suite').filter_by(suite_name=suite)
2209 return q.one().source
2210 except NoResultFound:
2213 __all__.append('get_source_in_suite')
2215 ################################################################################
2218 def add_dsc_to_db(u, filename, session=None):
2219 entry = u.pkg.files[filename]
2223 source.source = u.pkg.dsc["source"]
2224 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2225 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2226 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2227 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2228 source.install_date = datetime.now().date()
2230 dsc_component = entry["component"]
2231 dsc_location_id = entry["location id"]
2233 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2235 # Set up a new poolfile if necessary
2236 if not entry.has_key("files id") or not entry["files id"]:
2237 filename = entry["pool name"] + filename
2238 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2240 pfs.append(poolfile)
2241 entry["files id"] = poolfile.file_id
2243 source.poolfile_id = entry["files id"]
2247 for suite_name in u.pkg.changes["distribution"].keys():
2248 sa = SrcAssociation()
2249 sa.source_id = source.source_id
2250 sa.suite_id = get_suite(suite_name).suite_id
2255 # Add the source files to the DB (files and dsc_files)
2257 dscfile.source_id = source.source_id
2258 dscfile.poolfile_id = entry["files id"]
2259 session.add(dscfile)
2261 for dsc_file, dentry in u.pkg.dsc_files.items():
2263 df.source_id = source.source_id
2265 # If the .orig tarball is already in the pool, it's
2266 # files id is stored in dsc_files by check_dsc().
2267 files_id = dentry.get("files id", None)
2269 # Find the entry in the files hash
2270 # TODO: Bail out here properly
2272 for f, e in u.pkg.files.items():
2277 if files_id is None:
2278 filename = dfentry["pool name"] + dsc_file
2280 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2281 # FIXME: needs to check for -1/-2 and or handle exception
2282 if found and obj is not None:
2283 files_id = obj.file_id
2286 # If still not found, add it
2287 if files_id is None:
2288 # HACK: Force sha1sum etc into dentry
2289 dentry["sha1sum"] = dfentry["sha1sum"]
2290 dentry["sha256sum"] = dfentry["sha256sum"]
2291 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2292 pfs.append(poolfile)
2293 files_id = poolfile.file_id
2295 poolfile = get_poolfile_by_id(files_id, session)
2296 if poolfile is None:
2297 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2298 pfs.append(poolfile)
2300 df.poolfile_id = files_id
2305 # Add the src_uploaders to the DB
2306 uploader_ids = [source.maintainer_id]
2307 if u.pkg.dsc.has_key("uploaders"):
2308 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2310 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2313 for up_id in uploader_ids:
2314 if added_ids.has_key(up_id):
2316 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2322 su.maintainer_id = up_id
2323 su.source_id = source.source_id
2328 return source, dsc_component, dsc_location_id, pfs
2330 __all__.append('add_dsc_to_db')
2333 def add_deb_to_db(u, filename, session=None):
2335 Contrary to what you might expect, this routine deals with both
2336 debs and udebs. That info is in 'dbtype', whilst 'type' is
2337 'deb' for both of them
2340 entry = u.pkg.files[filename]
2343 bin.package = entry["package"]
2344 bin.version = entry["version"]
2345 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2346 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2347 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2348 bin.binarytype = entry["dbtype"]
2351 filename = entry["pool name"] + filename
2352 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2353 if not entry.get("location id", None):
2354 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2356 if entry.get("files id", None):
2357 poolfile = get_poolfile_by_id(bin.poolfile_id)
2358 bin.poolfile_id = entry["files id"]
2360 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2361 bin.poolfile_id = entry["files id"] = poolfile.file_id
2364 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2365 if len(bin_sources) != 1:
2366 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2367 (bin.package, bin.version, bin.architecture.arch_string,
2368 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2370 bin.source_id = bin_sources[0].source_id
2372 # Add and flush object so it has an ID
2376 # Add BinAssociations
2377 for suite_name in u.pkg.changes["distribution"].keys():
2378 ba = BinAssociation()
2379 ba.binary_id = bin.binary_id
2380 ba.suite_id = get_suite(suite_name).suite_id
2385 # Deal with contents - disabled for now
2386 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2388 # print "REJECT\nCould not determine contents of package %s" % bin.package
2389 # session.rollback()
2390 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2394 __all__.append('add_deb_to_db')
2396 ################################################################################
2398 class SourceACL(object):
2399 def __init__(self, *args, **kwargs):
2403 return '<SourceACL %s>' % self.source_acl_id
2405 __all__.append('SourceACL')
2407 ################################################################################
2409 class SrcAssociation(object):
2410 def __init__(self, *args, **kwargs):
2414 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2416 __all__.append('SrcAssociation')
2418 ################################################################################
2420 class SrcFormat(object):
2421 def __init__(self, *args, **kwargs):
2425 return '<SrcFormat %s>' % (self.format_name)
2427 __all__.append('SrcFormat')
2429 ################################################################################
2431 class SrcUploader(object):
2432 def __init__(self, *args, **kwargs):
2436 return '<SrcUploader %s>' % self.uploader_id
2438 __all__.append('SrcUploader')
2440 ################################################################################
2442 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2443 ('SuiteID', 'suite_id'),
2444 ('Version', 'version'),
2445 ('Origin', 'origin'),
2447 ('Description', 'description'),
2448 ('Untouchable', 'untouchable'),
2449 ('Announce', 'announce'),
2450 ('Codename', 'codename'),
2451 ('OverrideCodename', 'overridecodename'),
2452 ('ValidTime', 'validtime'),
2453 ('Priority', 'priority'),
2454 ('NotAutomatic', 'notautomatic'),
2455 ('CopyChanges', 'copychanges'),
2456 ('CopyDotDak', 'copydotdak'),
2457 ('CommentsDir', 'commentsdir'),
2458 ('OverrideSuite', 'overridesuite'),
2459 ('ChangelogBase', 'changelogbase')]
2462 class Suite(object):
2463 def __init__(self, *args, **kwargs):
2467 return '<Suite %s>' % self.suite_name
2469 def __eq__(self, val):
2470 if isinstance(val, str):
2471 return (self.suite_name == val)
2472 # This signals to use the normal comparison operator
2473 return NotImplemented
2475 def __ne__(self, val):
2476 if isinstance(val, str):
2477 return (self.suite_name != val)
2478 # This signals to use the normal comparison operator
2479 return NotImplemented
2483 for disp, field in SUITE_FIELDS:
2484 val = getattr(self, field, None)
2486 ret.append("%s: %s" % (disp, val))
2488 return "\n".join(ret)
2490 __all__.append('Suite')
2493 def get_suite_architecture(suite, architecture, session=None):
2495 Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
2499 @param suite: Suite name to search for
2501 @type architecture: str
2502 @param architecture: Architecture name to search for
2504 @type session: Session
2505 @param session: Optional SQL session object (a temporary one will be
2506 generated if not supplied)
2508 @rtype: SuiteArchitecture
2509 @return: the SuiteArchitecture object or None
2512 q = session.query(SuiteArchitecture)
2513 q = q.join(Architecture).filter_by(arch_string=architecture)
2514 q = q.join(Suite).filter_by(suite_name=suite)
2518 except NoResultFound:
2521 __all__.append('get_suite_architecture')
2524 def get_suite(suite, session=None):
2526 Returns Suite object for given C{suite name}.
2529 @param suite: The name of the suite
2531 @type session: Session
2532 @param session: Optional SQLA session object (a temporary one will be
2533 generated if not supplied)
2536 @return: Suite object for the requested suite name (None if not present)
2539 q = session.query(Suite).filter_by(suite_name=suite)
2543 except NoResultFound:
2546 __all__.append('get_suite')
2548 ################################################################################
2550 class SuiteArchitecture(object):
2551 def __init__(self, *args, **kwargs):
2555 return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
2557 __all__.append('SuiteArchitecture')
2560 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2562 Returns list of Architecture objects for given C{suite} name
2565 @param suite: Suite name to search for
2567 @type skipsrc: boolean
2568 @param skipsrc: Whether to skip returning the 'source' architecture entry
2571 @type skipall: boolean
2572 @param skipall: Whether to skip returning the 'all' architecture entry
2575 @type session: Session
2576 @param session: Optional SQL session object (a temporary one will be
2577 generated if not supplied)
2580 @return: list of Architecture objects for the given name (may be empty)
2583 q = session.query(Architecture)
2584 q = q.join(SuiteArchitecture)
2585 q = q.join(Suite).filter_by(suite_name=suite)
2588 q = q.filter(Architecture.arch_string != 'source')
2591 q = q.filter(Architecture.arch_string != 'all')
2593 q = q.order_by('arch_string')
2597 __all__.append('get_suite_architectures')
2599 ################################################################################
2601 class SuiteSrcFormat(object):
2602 def __init__(self, *args, **kwargs):
2606 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2608 __all__.append('SuiteSrcFormat')
2611 def get_suite_src_formats(suite, session=None):
2613 Returns list of allowed SrcFormat for C{suite}.
2616 @param suite: Suite name to search for
2618 @type session: Session
2619 @param session: Optional SQL session object (a temporary one will be
2620 generated if not supplied)
2623 @return: the list of allowed source formats for I{suite}
2626 q = session.query(SrcFormat)
2627 q = q.join(SuiteSrcFormat)
2628 q = q.join(Suite).filter_by(suite_name=suite)
2629 q = q.order_by('format_name')
2633 __all__.append('get_suite_src_formats')
2635 ################################################################################
2638 def __init__(self, *args, **kwargs):
2641 def __eq__(self, val):
2642 if isinstance(val, str):
2643 return (self.uid == val)
2644 # This signals to use the normal comparison operator
2645 return NotImplemented
2647 def __ne__(self, val):
2648 if isinstance(val, str):
2649 return (self.uid != val)
2650 # This signals to use the normal comparison operator
2651 return NotImplemented
2654 return '<Uid %s (%s)>' % (self.uid, self.name)
2656 __all__.append('Uid')
2659 def add_database_user(uidname, session=None):
2661 Adds a database user
2663 @type uidname: string
2664 @param uidname: The uid of the user to add
2666 @type session: SQLAlchemy
2667 @param session: Optional SQL session object (a temporary one will be
2668 generated if not supplied). If not passed, a commit will be performed at
2669 the end of the function, otherwise the caller is responsible for commiting.
2672 @return: the uid object for the given uidname
2675 session.execute("CREATE USER :uid", {'uid': uidname})
2676 session.commit_or_flush()
2678 __all__.append('add_database_user')
2681 def get_or_set_uid(uidname, session=None):
2683 Returns uid object for given uidname.
2685 If no matching uidname is found, a row is inserted.
2687 @type uidname: string
2688 @param uidname: The uid to add
2690 @type session: SQLAlchemy
2691 @param session: Optional SQL session object (a temporary one will be
2692 generated if not supplied). If not passed, a commit will be performed at
2693 the end of the function, otherwise the caller is responsible for commiting.
2696 @return: the uid object for the given uidname
2699 q = session.query(Uid).filter_by(uid=uidname)
2703 except NoResultFound:
2707 session.commit_or_flush()
2712 __all__.append('get_or_set_uid')
2715 def get_uid_from_fingerprint(fpr, session=None):
2716 q = session.query(Uid)
2717 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2721 except NoResultFound:
2724 __all__.append('get_uid_from_fingerprint')
2726 ################################################################################
2728 class UploadBlock(object):
2729 def __init__(self, *args, **kwargs):
2733 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2735 __all__.append('UploadBlock')
2737 ################################################################################
2739 class DBConn(object):
2741 database module init.
2745 def __init__(self, *args, **kwargs):
2746 self.__dict__ = self.__shared_state
2748 if not getattr(self, 'initialised', False):
2749 self.initialised = True
2750 self.debug = kwargs.has_key('debug')
2753 def __setuptables(self):
2763 'build_queue_files',
2766 'changes_pending_binaries',
2767 'changes_pending_files',
2768 'changes_pending_files_map',
2769 'changes_pending_source',
2770 'changes_pending_source_files',
2771 'changes_pool_files',
2784 'pending_bin_contents',
2794 'suite_architectures',
2795 'suite_src_formats',
2796 'suite_build_queue_copy',
2802 for table_name in tables:
2803 table = Table(table_name, self.db_meta, autoload=True)
2804 setattr(self, 'tbl_%s' % table_name, table)
2806 def __setupmappers(self):
2807 mapper(Architecture, self.tbl_architecture,
2808 properties = dict(arch_id = self.tbl_architecture.c.id))
2810 mapper(Archive, self.tbl_archive,
2811 properties = dict(archive_id = self.tbl_archive.c.id,
2812 archive_name = self.tbl_archive.c.name))
2814 mapper(BinAssociation, self.tbl_bin_associations,
2815 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2816 suite_id = self.tbl_bin_associations.c.suite,
2817 suite = relation(Suite),
2818 binary_id = self.tbl_bin_associations.c.bin,
2819 binary = relation(DBBinary)))
2821 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2822 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2823 filename = self.tbl_pending_bin_contents.c.filename,
2824 package = self.tbl_pending_bin_contents.c.package,
2825 version = self.tbl_pending_bin_contents.c.version,
2826 arch = self.tbl_pending_bin_contents.c.arch,
2827 otype = self.tbl_pending_bin_contents.c.type))
2829 mapper(DebContents, self.tbl_deb_contents,
2830 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2831 package=self.tbl_deb_contents.c.package,
2832 suite=self.tbl_deb_contents.c.suite,
2833 arch=self.tbl_deb_contents.c.arch,
2834 section=self.tbl_deb_contents.c.section,
2835 filename=self.tbl_deb_contents.c.filename))
2837 mapper(UdebContents, self.tbl_udeb_contents,
2838 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2839 package=self.tbl_udeb_contents.c.package,
2840 suite=self.tbl_udeb_contents.c.suite,
2841 arch=self.tbl_udeb_contents.c.arch,
2842 section=self.tbl_udeb_contents.c.section,
2843 filename=self.tbl_udeb_contents.c.filename))
2845 mapper(BuildQueue, self.tbl_build_queue,
2846 properties = dict(queue_id = self.tbl_build_queue.c.id))
2848 mapper(BuildQueueFile, self.tbl_build_queue_files,
2849 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2850 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2852 mapper(DBBinary, self.tbl_binaries,
2853 properties = dict(binary_id = self.tbl_binaries.c.id,
2854 package = self.tbl_binaries.c.package,
2855 version = self.tbl_binaries.c.version,
2856 maintainer_id = self.tbl_binaries.c.maintainer,
2857 maintainer = relation(Maintainer),
2858 source_id = self.tbl_binaries.c.source,
2859 source = relation(DBSource),
2860 arch_id = self.tbl_binaries.c.architecture,
2861 architecture = relation(Architecture),
2862 poolfile_id = self.tbl_binaries.c.file,
2863 poolfile = relation(PoolFile),
2864 binarytype = self.tbl_binaries.c.type,
2865 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2866 fingerprint = relation(Fingerprint),
2867 install_date = self.tbl_binaries.c.install_date,
2868 binassociations = relation(BinAssociation,
2869 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2871 mapper(BinaryACL, self.tbl_binary_acl,
2872 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2874 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2875 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2876 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2877 architecture = relation(Architecture)))
2879 mapper(Component, self.tbl_component,
2880 properties = dict(component_id = self.tbl_component.c.id,
2881 component_name = self.tbl_component.c.name))
2883 mapper(DBConfig, self.tbl_config,
2884 properties = dict(config_id = self.tbl_config.c.id))
2886 mapper(DSCFile, self.tbl_dsc_files,
2887 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2888 source_id = self.tbl_dsc_files.c.source,
2889 source = relation(DBSource),
2890 poolfile_id = self.tbl_dsc_files.c.file,
2891 poolfile = relation(PoolFile)))
2893 mapper(PoolFile, self.tbl_files,
2894 properties = dict(file_id = self.tbl_files.c.id,
2895 filesize = self.tbl_files.c.size,
2896 location_id = self.tbl_files.c.location,
2897 location = relation(Location)))
2899 mapper(Fingerprint, self.tbl_fingerprint,
2900 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2901 uid_id = self.tbl_fingerprint.c.uid,
2902 uid = relation(Uid),
2903 keyring_id = self.tbl_fingerprint.c.keyring,
2904 keyring = relation(Keyring),
2905 source_acl = relation(SourceACL),
2906 binary_acl = relation(BinaryACL)))
2908 mapper(Keyring, self.tbl_keyrings,
2909 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2910 keyring_id = self.tbl_keyrings.c.id))
2912 mapper(DBChange, self.tbl_changes,
2913 properties = dict(change_id = self.tbl_changes.c.id,
2914 poolfiles = relation(PoolFile,
2915 secondary=self.tbl_changes_pool_files,
2916 backref="changeslinks"),
2917 seen = self.tbl_changes.c.seen,
2918 source = self.tbl_changes.c.source,
2919 binaries = self.tbl_changes.c.binaries,
2920 architecture = self.tbl_changes.c.architecture,
2921 distribution = self.tbl_changes.c.distribution,
2922 urgency = self.tbl_changes.c.urgency,
2923 maintainer = self.tbl_changes.c.maintainer,
2924 changedby = self.tbl_changes.c.changedby,
2925 date = self.tbl_changes.c.date,
2926 version = self.tbl_changes.c.version,
2927 files = relation(ChangePendingFile,
2928 secondary=self.tbl_changes_pending_files_map,
2929 backref="changesfile"),
2930 in_queue_id = self.tbl_changes.c.in_queue,
2931 in_queue = relation(PolicyQueue,
2932 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
2933 approved_for_id = self.tbl_changes.c.approved_for))
2935 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
2936 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
2938 mapper(ChangePendingFile, self.tbl_changes_pending_files,
2939 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
2940 filename = self.tbl_changes_pending_files.c.filename,
2941 size = self.tbl_changes_pending_files.c.size,
2942 md5sum = self.tbl_changes_pending_files.c.md5sum,
2943 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
2944 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
2946 mapper(ChangePendingSource, self.tbl_changes_pending_source,
2947 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
2948 change = relation(DBChange),
2949 maintainer = relation(Maintainer,
2950 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
2951 changedby = relation(Maintainer,
2952 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
2953 fingerprint = relation(Fingerprint),
2954 source_files = relation(ChangePendingFile,
2955 secondary=self.tbl_changes_pending_source_files,
2956 backref="pending_sources")))
2959 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
2960 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
2961 keyring = relation(Keyring, backref="keyring_acl_map"),
2962 architecture = relation(Architecture)))
2964 mapper(Location, self.tbl_location,
2965 properties = dict(location_id = self.tbl_location.c.id,
2966 component_id = self.tbl_location.c.component,
2967 component = relation(Component),
2968 archive_id = self.tbl_location.c.archive,
2969 archive = relation(Archive),
2970 archive_type = self.tbl_location.c.type))
2972 mapper(Maintainer, self.tbl_maintainer,
2973 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
2975 mapper(NewComment, self.tbl_new_comments,
2976 properties = dict(comment_id = self.tbl_new_comments.c.id))
2978 mapper(Override, self.tbl_override,
2979 properties = dict(suite_id = self.tbl_override.c.suite,
2980 suite = relation(Suite),
2981 package = self.tbl_override.c.package,
2982 component_id = self.tbl_override.c.component,
2983 component = relation(Component),
2984 priority_id = self.tbl_override.c.priority,
2985 priority = relation(Priority),
2986 section_id = self.tbl_override.c.section,
2987 section = relation(Section),
2988 overridetype_id = self.tbl_override.c.type,
2989 overridetype = relation(OverrideType)))
2991 mapper(OverrideType, self.tbl_override_type,
2992 properties = dict(overridetype = self.tbl_override_type.c.type,
2993 overridetype_id = self.tbl_override_type.c.id))
2995 mapper(PolicyQueue, self.tbl_policy_queue,
2996 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
2998 mapper(Priority, self.tbl_priority,
2999 properties = dict(priority_id = self.tbl_priority.c.id))
3001 mapper(Section, self.tbl_section,
3002 properties = dict(section_id = self.tbl_section.c.id,
3003 section=self.tbl_section.c.section))
3005 mapper(DBSource, self.tbl_source,
3006 properties = dict(source_id = self.tbl_source.c.id,
3007 version = self.tbl_source.c.version,
3008 maintainer_id = self.tbl_source.c.maintainer,
3009 maintainer = relation(Maintainer,
3010 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3011 poolfile_id = self.tbl_source.c.file,
3012 poolfile = relation(PoolFile),
3013 fingerprint_id = self.tbl_source.c.sig_fpr,
3014 fingerprint = relation(Fingerprint),
3015 changedby_id = self.tbl_source.c.changedby,
3016 changedby = relation(Maintainer,
3017 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3018 srcfiles = relation(DSCFile,
3019 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3020 srcassociations = relation(SrcAssociation,
3021 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3022 srcuploaders = relation(SrcUploader)))
3024 mapper(SourceACL, self.tbl_source_acl,
3025 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3027 mapper(SrcAssociation, self.tbl_src_associations,
3028 properties = dict(sa_id = self.tbl_src_associations.c.id,
3029 suite_id = self.tbl_src_associations.c.suite,
3030 suite = relation(Suite),
3031 source_id = self.tbl_src_associations.c.source,
3032 source = relation(DBSource)))
3034 mapper(SrcFormat, self.tbl_src_format,
3035 properties = dict(src_format_id = self.tbl_src_format.c.id,
3036 format_name = self.tbl_src_format.c.format_name))
3038 mapper(SrcUploader, self.tbl_src_uploaders,
3039 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3040 source_id = self.tbl_src_uploaders.c.source,
3041 source = relation(DBSource,
3042 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3043 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3044 maintainer = relation(Maintainer,
3045 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3047 mapper(Suite, self.tbl_suite,
3048 properties = dict(suite_id = self.tbl_suite.c.id,
3049 policy_queue = relation(PolicyQueue),
3050 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3052 mapper(SuiteArchitecture, self.tbl_suite_architectures,
3053 properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
3054 suite = relation(Suite, backref='suitearchitectures'),
3055 arch_id = self.tbl_suite_architectures.c.architecture,
3056 architecture = relation(Architecture)))
3058 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3059 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3060 suite = relation(Suite, backref='suitesrcformats'),
3061 src_format_id = self.tbl_suite_src_formats.c.src_format,
3062 src_format = relation(SrcFormat)))
3064 mapper(Uid, self.tbl_uid,
3065 properties = dict(uid_id = self.tbl_uid.c.id,
3066 fingerprint = relation(Fingerprint)))
3068 mapper(UploadBlock, self.tbl_upload_blocks,
3069 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3070 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3071 uid = relation(Uid, backref="uploadblocks")))
3073 ## Connection functions
3074 def __createconn(self):
3075 from config import Config
3079 connstr = "postgres://%s" % cnf["DB::Host"]
3080 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3081 connstr += ":%s" % cnf["DB::Port"]
3082 connstr += "/%s" % cnf["DB::Name"]
3085 connstr = "postgres:///%s" % cnf["DB::Name"]
3086 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3087 connstr += "?port=%s" % cnf["DB::Port"]
3089 self.db_pg = create_engine(connstr, echo=self.debug)
3090 self.db_meta = MetaData()
3091 self.db_meta.bind = self.db_pg
3092 self.db_smaker = sessionmaker(bind=self.db_pg,
3096 self.__setuptables()
3097 self.__setupmappers()
3100 return self.db_smaker()
3102 __all__.append('DBConn')