5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
41 from datetime import datetime, timedelta
42 from errno import ENOENT
43 from tempfile import mkstemp, mkdtemp
45 from inspect import getargspec
48 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
49 from sqlalchemy.orm import sessionmaker, mapper, relation
50 from sqlalchemy import types as sqltypes
52 # Don't remove this, we re-export the exceptions to scripts which import us
53 from sqlalchemy.exc import *
54 from sqlalchemy.orm.exc import NoResultFound
56 # Only import Config until Queue stuff is changed to store its config
58 from config import Config
59 from textutils import fix_maintainer
60 from dak_exceptions import NoSourceFieldError
62 ################################################################################
64 # Patch in support for the debversion field type so that it works during
68 # that is for sqlalchemy 0.6
69 UserDefinedType = sqltypes.UserDefinedType
71 # this one for sqlalchemy 0.5
72 UserDefinedType = sqltypes.TypeEngine
74 class DebVersion(UserDefinedType):
75 def get_col_spec(self):
78 def bind_processor(self, dialect):
81 # ' = None' is needed for sqlalchemy 0.5:
82 def result_processor(self, dialect, coltype = None):
85 sa_major_version = sqlalchemy.__version__[0:3]
86 if sa_major_version in ["0.5", "0.6"]:
87 from sqlalchemy.databases import postgres
88 postgres.ischema_names['debversion'] = DebVersion
90 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
92 ################################################################################
94 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
96 ################################################################################
98 def session_wrapper(fn):
100 Wrapper around common ".., session=None):" handling. If the wrapped
101 function is called without passing 'session', we create a local one
102 and destroy it when the function ends.
104 Also attaches a commit_or_flush method to the session; if we created a
105 local session, this is a synonym for session.commit(), otherwise it is a
106 synonym for session.flush().
109 def wrapped(*args, **kwargs):
110 private_transaction = False
112 # Find the session object
113 session = kwargs.get('session')
116 if len(args) <= len(getargspec(fn)[0]) - 1:
117 # No session specified as last argument or in kwargs
118 private_transaction = True
119 session = kwargs['session'] = DBConn().session()
121 # Session is last argument in args
125 session = args[-1] = DBConn().session()
126 private_transaction = True
128 if private_transaction:
129 session.commit_or_flush = session.commit
131 session.commit_or_flush = session.flush
134 return fn(*args, **kwargs)
136 if private_transaction:
137 # We created a session; close it.
140 wrapped.__doc__ = fn.__doc__
141 wrapped.func_name = fn.func_name
145 __all__.append('session_wrapper')
147 ################################################################################
149 class Architecture(object):
150 def __init__(self, *args, **kwargs):
153 def __eq__(self, val):
154 if isinstance(val, str):
155 return (self.arch_string== val)
156 # This signals to use the normal comparison operator
157 return NotImplemented
159 def __ne__(self, val):
160 if isinstance(val, str):
161 return (self.arch_string != val)
162 # This signals to use the normal comparison operator
163 return NotImplemented
166 return '<Architecture %s>' % self.arch_string
168 __all__.append('Architecture')
171 def get_architecture(architecture, session=None):
173 Returns database id for given C{architecture}.
175 @type architecture: string
176 @param architecture: The name of the architecture
178 @type session: Session
179 @param session: Optional SQLA session object (a temporary one will be
180 generated if not supplied)
183 @return: Architecture object for the given arch (None if not present)
186 q = session.query(Architecture).filter_by(arch_string=architecture)
190 except NoResultFound:
193 __all__.append('get_architecture')
196 def get_architecture_suites(architecture, session=None):
198 Returns list of Suite objects for given C{architecture} name
200 @type architecture: str
201 @param architecture: Architecture name to search for
203 @type session: Session
204 @param session: Optional SQL session object (a temporary one will be
205 generated if not supplied)
208 @return: list of Suite objects for the given name (may be empty)
211 q = session.query(Suite)
212 q = q.join(SuiteArchitecture)
213 q = q.join(Architecture).filter_by(arch_string=architecture).order_by('suite_name')
219 __all__.append('get_architecture_suites')
221 ################################################################################
223 class Archive(object):
224 def __init__(self, *args, **kwargs):
228 return '<Archive %s>' % self.archive_name
230 __all__.append('Archive')
233 def get_archive(archive, session=None):
235 returns database id for given C{archive}.
237 @type archive: string
238 @param archive: the name of the arhive
240 @type session: Session
241 @param session: Optional SQLA session object (a temporary one will be
242 generated if not supplied)
245 @return: Archive object for the given name (None if not present)
248 archive = archive.lower()
250 q = session.query(Archive).filter_by(archive_name=archive)
254 except NoResultFound:
257 __all__.append('get_archive')
259 ################################################################################
261 class BinAssociation(object):
262 def __init__(self, *args, **kwargs):
266 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
268 __all__.append('BinAssociation')
270 ################################################################################
272 class BinContents(object):
273 def __init__(self, *args, **kwargs):
277 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
279 __all__.append('BinContents')
281 ################################################################################
283 class DBBinary(object):
284 def __init__(self, *args, **kwargs):
288 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
290 __all__.append('DBBinary')
293 def get_suites_binary_in(package, session=None):
295 Returns list of Suite objects which given C{package} name is in
298 @param package: DBBinary package name to search for
301 @return: list of Suite objects for the given package
304 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
306 __all__.append('get_suites_binary_in')
309 def get_binary_from_id(binary_id, session=None):
311 Returns DBBinary object for given C{id}
314 @param binary_id: Id of the required binary
316 @type session: Session
317 @param session: Optional SQLA session object (a temporary one will be
318 generated if not supplied)
321 @return: DBBinary object for the given binary (None if not present)
324 q = session.query(DBBinary).filter_by(binary_id=binary_id)
328 except NoResultFound:
331 __all__.append('get_binary_from_id')
334 def get_binaries_from_name(package, version=None, architecture=None, session=None):
336 Returns list of DBBinary objects for given C{package} name
339 @param package: DBBinary package name to search for
341 @type version: str or None
342 @param version: Version to search for (or None)
344 @type architecture: str, list or None
345 @param architecture: Architectures to limit to (or None if no limit)
347 @type session: Session
348 @param session: Optional SQL session object (a temporary one will be
349 generated if not supplied)
352 @return: list of DBBinary objects for the given name (may be empty)
355 q = session.query(DBBinary).filter_by(package=package)
357 if version is not None:
358 q = q.filter_by(version=version)
360 if architecture is not None:
361 if not isinstance(architecture, list):
362 architecture = [architecture]
363 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
369 __all__.append('get_binaries_from_name')
372 def get_binaries_from_source_id(source_id, session=None):
374 Returns list of DBBinary objects for given C{source_id}
377 @param source_id: source_id to search for
379 @type session: Session
380 @param session: Optional SQL session object (a temporary one will be
381 generated if not supplied)
384 @return: list of DBBinary objects for the given name (may be empty)
387 return session.query(DBBinary).filter_by(source_id=source_id).all()
389 __all__.append('get_binaries_from_source_id')
392 def get_binary_from_name_suite(package, suitename, session=None):
393 ### For dak examine-package
394 ### XXX: Doesn't use object API yet
396 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
397 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
398 WHERE b.package='%(package)s'
400 AND fi.location = l.id
401 AND l.component = c.id
404 AND su.suite_name %(suitename)s
405 ORDER BY b.version DESC"""
407 return session.execute(sql % {'package': package, 'suitename': suitename})
409 __all__.append('get_binary_from_name_suite')
412 def get_binary_components(package, suitename, arch, session=None):
413 # Check for packages that have moved from one component to another
414 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
415 WHERE b.package=:package AND s.suite_name=:suitename
416 AND (a.arch_string = :arch OR a.arch_string = 'all')
417 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
418 AND f.location = l.id
419 AND l.component = c.id
422 vals = {'package': package, 'suitename': suitename, 'arch': arch}
424 return session.execute(query, vals)
426 __all__.append('get_binary_components')
428 ################################################################################
430 class BinaryACL(object):
431 def __init__(self, *args, **kwargs):
435 return '<BinaryACL %s>' % self.binary_acl_id
437 __all__.append('BinaryACL')
439 ################################################################################
441 class BinaryACLMap(object):
442 def __init__(self, *args, **kwargs):
446 return '<BinaryACLMap %s>' % self.binary_acl_map_id
448 __all__.append('BinaryACLMap')
450 ################################################################################
455 ArchiveDir "%(archivepath)s";
456 OverrideDir "%(overridedir)s";
457 CacheDir "%(cachedir)s";
462 Packages::Compress ". bzip2 gzip";
463 Sources::Compress ". bzip2 gzip";
468 bindirectory "incoming"
473 BinOverride "override.sid.all3";
474 BinCacheDB "packages-accepted.db";
476 FileList "%(filelist)s";
479 Packages::Extensions ".deb .udeb";
482 bindirectory "incoming/"
485 BinOverride "override.sid.all3";
486 SrcOverride "override.sid.all3.src";
487 FileList "%(filelist)s";
491 class BuildQueue(object):
492 def __init__(self, *args, **kwargs):
496 return '<BuildQueue %s>' % self.queue_name
498 def write_metadata(self, starttime, force=False):
499 # Do we write out metafiles?
500 if not (force or self.generate_metadata):
503 session = DBConn().session().object_session(self)
505 fl_fd = fl_name = ac_fd = ac_name = None
507 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
508 startdir = os.getcwd()
511 # Grab files we want to include
512 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
513 # Write file list with newer files
514 (fl_fd, fl_name) = mkstemp()
516 os.write(fl_fd, '%s\n' % n.fullpath)
521 # Write minimal apt.conf
522 # TODO: Remove hardcoding from template
523 (ac_fd, ac_name) = mkstemp()
524 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
526 'cachedir': cnf["Dir::Cache"],
527 'overridedir': cnf["Dir::Override"],
531 # Run apt-ftparchive generate
532 os.chdir(os.path.dirname(ac_name))
533 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
535 # Run apt-ftparchive release
536 # TODO: Eww - fix this
537 bname = os.path.basename(self.path)
541 # We have to remove the Release file otherwise it'll be included in the
544 os.unlink(os.path.join(bname, 'Release'))
548 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
550 # Crude hack with open and append, but this whole section is and should be redone.
551 if self.notautomatic:
552 release=open("Release", "a")
553 release.write("NotAutomatic: yes")
558 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
559 if cnf.has_key("Dinstall::SigningPubKeyring"):
560 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
562 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
564 # Move the files if we got this far
565 os.rename('Release', os.path.join(bname, 'Release'))
567 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
569 # Clean up any left behind files
596 def clean_and_update(self, starttime, Logger, dryrun=False):
597 """WARNING: This routine commits for you"""
598 session = DBConn().session().object_session(self)
600 if self.generate_metadata and not dryrun:
601 self.write_metadata(starttime)
603 # Grab files older than our execution time
604 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
610 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
612 Logger.log(["I: Removing %s from the queue" % o.fullpath])
613 os.unlink(o.fullpath)
616 # If it wasn't there, don't worry
617 if e.errno == ENOENT:
620 # TODO: Replace with proper logging call
621 Logger.log(["E: Could not remove %s" % o.fullpath])
628 for f in os.listdir(self.path):
629 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
633 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
634 except NoResultFound:
635 fp = os.path.join(self.path, f)
637 Logger.log(["I: Would remove unused link %s" % fp])
639 Logger.log(["I: Removing unused link %s" % fp])
643 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
645 def add_file_from_pool(self, poolfile):
646 """Copies a file into the pool. Assumes that the PoolFile object is
647 attached to the same SQLAlchemy session as the Queue object is.
649 The caller is responsible for committing after calling this function."""
650 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
652 # Check if we have a file of this name or this ID already
653 for f in self.queuefiles:
654 if f.fileid is not None and f.fileid == poolfile.file_id or \
655 f.poolfile.filename == poolfile_basename:
656 # In this case, update the BuildQueueFile entry so we
657 # don't remove it too early
658 f.lastused = datetime.now()
659 DBConn().session().object_session(poolfile).add(f)
662 # Prepare BuildQueueFile object
663 qf = BuildQueueFile()
664 qf.build_queue_id = self.queue_id
665 qf.lastused = datetime.now()
666 qf.filename = poolfile_basename
668 targetpath = poolfile.fullpath
669 queuepath = os.path.join(self.path, poolfile_basename)
673 # We need to copy instead of symlink
675 utils.copy(targetpath, queuepath)
676 # NULL in the fileid field implies a copy
679 os.symlink(targetpath, queuepath)
680 qf.fileid = poolfile.file_id
684 # Get the same session as the PoolFile is using and add the qf to it
685 DBConn().session().object_session(poolfile).add(qf)
690 __all__.append('BuildQueue')
693 def get_build_queue(queuename, session=None):
695 Returns BuildQueue object for given C{queue name}, creating it if it does not
698 @type queuename: string
699 @param queuename: The name of the queue
701 @type session: Session
702 @param session: Optional SQLA session object (a temporary one will be
703 generated if not supplied)
706 @return: BuildQueue object for the given queue
709 q = session.query(BuildQueue).filter_by(queue_name=queuename)
713 except NoResultFound:
716 __all__.append('get_build_queue')
718 ################################################################################
720 class BuildQueueFile(object):
721 def __init__(self, *args, **kwargs):
725 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
729 return os.path.join(self.buildqueue.path, self.filename)
732 __all__.append('BuildQueueFile')
734 ################################################################################
736 class ChangePendingBinary(object):
737 def __init__(self, *args, **kwargs):
741 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
743 __all__.append('ChangePendingBinary')
745 ################################################################################
747 class ChangePendingFile(object):
748 def __init__(self, *args, **kwargs):
752 return '<ChangePendingFile %s>' % self.change_pending_file_id
754 __all__.append('ChangePendingFile')
756 ################################################################################
758 class ChangePendingSource(object):
759 def __init__(self, *args, **kwargs):
763 return '<ChangePendingSource %s>' % self.change_pending_source_id
765 __all__.append('ChangePendingSource')
767 ################################################################################
769 class Component(object):
770 def __init__(self, *args, **kwargs):
773 def __eq__(self, val):
774 if isinstance(val, str):
775 return (self.component_name == val)
776 # This signals to use the normal comparison operator
777 return NotImplemented
779 def __ne__(self, val):
780 if isinstance(val, str):
781 return (self.component_name != val)
782 # This signals to use the normal comparison operator
783 return NotImplemented
786 return '<Component %s>' % self.component_name
789 __all__.append('Component')
792 def get_component(component, session=None):
794 Returns database id for given C{component}.
796 @type component: string
797 @param component: The name of the override type
800 @return: the database id for the given component
803 component = component.lower()
805 q = session.query(Component).filter_by(component_name=component)
809 except NoResultFound:
812 __all__.append('get_component')
814 ################################################################################
816 class DBConfig(object):
817 def __init__(self, *args, **kwargs):
821 return '<DBConfig %s>' % self.name
823 __all__.append('DBConfig')
825 ################################################################################
828 def get_or_set_contents_file_id(filename, session=None):
830 Returns database id for given filename.
832 If no matching file is found, a row is inserted.
834 @type filename: string
835 @param filename: The filename
836 @type session: SQLAlchemy
837 @param session: Optional SQL session object (a temporary one will be
838 generated if not supplied). If not passed, a commit will be performed at
839 the end of the function, otherwise the caller is responsible for commiting.
842 @return: the database id for the given component
845 q = session.query(ContentFilename).filter_by(filename=filename)
848 ret = q.one().cafilename_id
849 except NoResultFound:
850 cf = ContentFilename()
851 cf.filename = filename
853 session.commit_or_flush()
854 ret = cf.cafilename_id
858 __all__.append('get_or_set_contents_file_id')
861 def get_contents(suite, overridetype, section=None, session=None):
863 Returns contents for a suite / overridetype combination, limiting
864 to a section if not None.
867 @param suite: Suite object
869 @type overridetype: OverrideType
870 @param overridetype: OverrideType object
872 @type section: Section
873 @param section: Optional section object to limit results to
875 @type session: SQLAlchemy
876 @param session: Optional SQL session object (a temporary one will be
877 generated if not supplied)
880 @return: ResultsProxy object set up to return tuples of (filename, section,
884 # find me all of the contents for a given suite
885 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
889 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
890 JOIN content_file_names n ON (c.filename=n.id)
891 JOIN binaries b ON (b.id=c.binary_pkg)
892 JOIN override o ON (o.package=b.package)
893 JOIN section s ON (s.id=o.section)
894 WHERE o.suite = :suiteid AND o.type = :overridetypeid
895 AND b.type=:overridetypename"""
897 vals = {'suiteid': suite.suite_id,
898 'overridetypeid': overridetype.overridetype_id,
899 'overridetypename': overridetype.overridetype}
901 if section is not None:
902 contents_q += " AND s.id = :sectionid"
903 vals['sectionid'] = section.section_id
905 contents_q += " ORDER BY fn"
907 return session.execute(contents_q, vals)
909 __all__.append('get_contents')
911 ################################################################################
913 class ContentFilepath(object):
914 def __init__(self, *args, **kwargs):
918 return '<ContentFilepath %s>' % self.filepath
920 __all__.append('ContentFilepath')
923 def get_or_set_contents_path_id(filepath, session=None):
925 Returns database id for given path.
927 If no matching file is found, a row is inserted.
929 @type filepath: string
930 @param filepath: The filepath
932 @type session: SQLAlchemy
933 @param session: Optional SQL session object (a temporary one will be
934 generated if not supplied). If not passed, a commit will be performed at
935 the end of the function, otherwise the caller is responsible for commiting.
938 @return: the database id for the given path
941 q = session.query(ContentFilepath).filter_by(filepath=filepath)
944 ret = q.one().cafilepath_id
945 except NoResultFound:
946 cf = ContentFilepath()
947 cf.filepath = filepath
949 session.commit_or_flush()
950 ret = cf.cafilepath_id
954 __all__.append('get_or_set_contents_path_id')
956 ################################################################################
958 class ContentAssociation(object):
959 def __init__(self, *args, **kwargs):
963 return '<ContentAssociation %s>' % self.ca_id
965 __all__.append('ContentAssociation')
967 def insert_content_paths(binary_id, fullpaths, session=None):
969 Make sure given path is associated with given binary id
972 @param binary_id: the id of the binary
973 @type fullpaths: list
974 @param fullpaths: the list of paths of the file being associated with the binary
975 @type session: SQLAlchemy session
976 @param session: Optional SQLAlchemy session. If this is passed, the caller
977 is responsible for ensuring a transaction has begun and committing the
978 results or rolling back based on the result code. If not passed, a commit
979 will be performed at the end of the function, otherwise the caller is
980 responsible for commiting.
982 @return: True upon success
987 session = DBConn().session()
992 def generate_path_dicts():
993 for fullpath in fullpaths:
994 if fullpath.startswith( './' ):
995 fullpath = fullpath[2:]
997 yield {'filename':fullpath, 'id': binary_id }
999 for d in generate_path_dicts():
1000 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1009 traceback.print_exc()
1011 # Only rollback if we set up the session ourself
1018 __all__.append('insert_content_paths')
1020 ################################################################################
1022 class DSCFile(object):
1023 def __init__(self, *args, **kwargs):
1027 return '<DSCFile %s>' % self.dscfile_id
1029 __all__.append('DSCFile')
1032 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1034 Returns a list of DSCFiles which may be empty
1036 @type dscfile_id: int (optional)
1037 @param dscfile_id: the dscfile_id of the DSCFiles to find
1039 @type source_id: int (optional)
1040 @param source_id: the source id related to the DSCFiles to find
1042 @type poolfile_id: int (optional)
1043 @param poolfile_id: the poolfile id related to the DSCFiles to find
1046 @return: Possibly empty list of DSCFiles
1049 q = session.query(DSCFile)
1051 if dscfile_id is not None:
1052 q = q.filter_by(dscfile_id=dscfile_id)
1054 if source_id is not None:
1055 q = q.filter_by(source_id=source_id)
1057 if poolfile_id is not None:
1058 q = q.filter_by(poolfile_id=poolfile_id)
1062 __all__.append('get_dscfiles')
1064 ################################################################################
1066 class PoolFile(object):
1067 def __init__(self, *args, **kwargs):
1071 return '<PoolFile %s>' % self.filename
1075 return os.path.join(self.location.path, self.filename)
1077 __all__.append('PoolFile')
1080 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1083 (ValidFileFound [boolean or None], PoolFile object or None)
1085 @type filename: string
1086 @param filename: the filename of the file to check against the DB
1089 @param filesize: the size of the file to check against the DB
1091 @type md5sum: string
1092 @param md5sum: the md5sum of the file to check against the DB
1094 @type location_id: int
1095 @param location_id: the id of the location to look in
1098 @return: Tuple of length 2.
1099 - If more than one file found with that name: (C{None}, C{None})
1100 - If valid pool file found: (C{True}, C{PoolFile object})
1101 - If valid pool file not found:
1102 - (C{False}, C{None}) if no file found
1103 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1106 q = session.query(PoolFile).filter_by(filename=filename)
1107 q = q.join(Location).filter_by(location_id=location_id)
1117 if obj.md5sum != md5sum or obj.filesize != int(filesize):
1125 __all__.append('check_poolfile')
1128 def get_poolfile_by_id(file_id, session=None):
1130 Returns a PoolFile objects or None for the given id
1133 @param file_id: the id of the file to look for
1135 @rtype: PoolFile or None
1136 @return: either the PoolFile object or None
1139 q = session.query(PoolFile).filter_by(file_id=file_id)
1143 except NoResultFound:
1146 __all__.append('get_poolfile_by_id')
1150 def get_poolfile_by_name(filename, location_id=None, session=None):
1152 Returns an array of PoolFile objects for the given filename and
1153 (optionally) location_id
1155 @type filename: string
1156 @param filename: the filename of the file to check against the DB
1158 @type location_id: int
1159 @param location_id: the id of the location to look in (optional)
1162 @return: array of PoolFile objects
1165 q = session.query(PoolFile).filter_by(filename=filename)
1167 if location_id is not None:
1168 q = q.join(Location).filter_by(location_id=location_id)
1172 __all__.append('get_poolfile_by_name')
1175 def get_poolfile_like_name(filename, session=None):
1177 Returns an array of PoolFile objects which are like the given name
1179 @type filename: string
1180 @param filename: the filename of the file to check against the DB
1183 @return: array of PoolFile objects
1186 # TODO: There must be a way of properly using bind parameters with %FOO%
1187 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1191 __all__.append('get_poolfile_like_name')
1194 def add_poolfile(filename, datadict, location_id, session=None):
1196 Add a new file to the pool
1198 @type filename: string
1199 @param filename: filename
1201 @type datadict: dict
1202 @param datadict: dict with needed data
1204 @type location_id: int
1205 @param location_id: database id of the location
1208 @return: the PoolFile object created
1210 poolfile = PoolFile()
1211 poolfile.filename = filename
1212 poolfile.filesize = datadict["size"]
1213 poolfile.md5sum = datadict["md5sum"]
1214 poolfile.sha1sum = datadict["sha1sum"]
1215 poolfile.sha256sum = datadict["sha256sum"]
1216 poolfile.location_id = location_id
1218 session.add(poolfile)
1219 # Flush to get a file id (NB: This is not a commit)
1224 __all__.append('add_poolfile')
1226 ################################################################################
1228 class Fingerprint(object):
1229 def __init__(self, fingerprint = None):
1230 self.fingerprint = fingerprint
1233 return '<Fingerprint %s>' % self.fingerprint
1235 __all__.append('Fingerprint')
1238 def get_fingerprint(fpr, session=None):
1240 Returns Fingerprint object for given fpr.
1243 @param fpr: The fpr to find / add
1245 @type session: SQLAlchemy
1246 @param session: Optional SQL session object (a temporary one will be
1247 generated if not supplied).
1250 @return: the Fingerprint object for the given fpr or None
1253 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1257 except NoResultFound:
1262 __all__.append('get_fingerprint')
1265 def get_or_set_fingerprint(fpr, session=None):
1267 Returns Fingerprint object for given fpr.
1269 If no matching fpr is found, a row is inserted.
1272 @param fpr: The fpr to find / add
1274 @type session: SQLAlchemy
1275 @param session: Optional SQL session object (a temporary one will be
1276 generated if not supplied). If not passed, a commit will be performed at
1277 the end of the function, otherwise the caller is responsible for commiting.
1278 A flush will be performed either way.
1281 @return: the Fingerprint object for the given fpr
1284 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1288 except NoResultFound:
1289 fingerprint = Fingerprint()
1290 fingerprint.fingerprint = fpr
1291 session.add(fingerprint)
1292 session.commit_or_flush()
1297 __all__.append('get_or_set_fingerprint')
1299 ################################################################################
1301 # Helper routine for Keyring class
1302 def get_ldap_name(entry):
1304 for k in ["cn", "mn", "sn"]:
1306 if ret and ret[0] != "" and ret[0] != "-":
1308 return " ".join(name)
1310 ################################################################################
1312 class Keyring(object):
1313 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1314 " --with-colons --fingerprint --fingerprint"
1319 def __init__(self, *args, **kwargs):
1323 return '<Keyring %s>' % self.keyring_name
1325 def de_escape_gpg_str(self, txt):
1326 esclist = re.split(r'(\\x..)', txt)
1327 for x in range(1,len(esclist),2):
1328 esclist[x] = "%c" % (int(esclist[x][2:],16))
1329 return "".join(esclist)
1331 def parse_address(self, uid):
1332 """parses uid and returns a tuple of real name and email address"""
1334 (name, address) = email.Utils.parseaddr(uid)
1335 name = re.sub(r"\s*[(].*[)]", "", name)
1336 name = self.de_escape_gpg_str(name)
1339 return (name, address)
1341 def load_keys(self, keyring):
1342 if not self.keyring_id:
1343 raise Exception('Must be initialized with database information')
1345 k = os.popen(self.gpg_invocation % keyring, "r")
1349 for line in k.xreadlines():
1350 field = line.split(":")
1351 if field[0] == "pub":
1354 (name, addr) = self.parse_address(field[9])
1356 self.keys[key]["email"] = addr
1357 self.keys[key]["name"] = name
1358 self.keys[key]["fingerprints"] = []
1360 elif key and field[0] == "sub" and len(field) >= 12:
1361 signingkey = ("s" in field[11])
1362 elif key and field[0] == "uid":
1363 (name, addr) = self.parse_address(field[9])
1364 if "email" not in self.keys[key] and "@" in addr:
1365 self.keys[key]["email"] = addr
1366 self.keys[key]["name"] = name
1367 elif signingkey and field[0] == "fpr":
1368 self.keys[key]["fingerprints"].append(field[9])
1369 self.fpr_lookup[field[9]] = key
1371 def import_users_from_ldap(self, session):
1375 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1376 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1378 l = ldap.open(LDAPServer)
1379 l.simple_bind_s("","")
1380 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1381 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1382 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1384 ldap_fin_uid_id = {}
1391 uid = entry["uid"][0]
1392 name = get_ldap_name(entry)
1393 fingerprints = entry["keyFingerPrint"]
1395 for f in fingerprints:
1396 key = self.fpr_lookup.get(f, None)
1397 if key not in self.keys:
1399 self.keys[key]["uid"] = uid
1403 keyid = get_or_set_uid(uid, session).uid_id
1404 byuid[keyid] = (uid, name)
1405 byname[uid] = (keyid, name)
1407 return (byname, byuid)
1409 def generate_users_from_keyring(self, format, session):
1413 for x in self.keys.keys():
1414 if "email" not in self.keys[x]:
1416 self.keys[x]["uid"] = format % "invalid-uid"
1418 uid = format % self.keys[x]["email"]
1419 keyid = get_or_set_uid(uid, session).uid_id
1420 byuid[keyid] = (uid, self.keys[x]["name"])
1421 byname[uid] = (keyid, self.keys[x]["name"])
1422 self.keys[x]["uid"] = uid
1425 uid = format % "invalid-uid"
1426 keyid = get_or_set_uid(uid, session).uid_id
1427 byuid[keyid] = (uid, "ungeneratable user id")
1428 byname[uid] = (keyid, "ungeneratable user id")
1430 return (byname, byuid)
1432 __all__.append('Keyring')
1435 def get_keyring(keyring, session=None):
1437 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1438 If C{keyring} already has an entry, simply return the existing Keyring
1440 @type keyring: string
1441 @param keyring: the keyring name
1444 @return: the Keyring object for this keyring
1447 q = session.query(Keyring).filter_by(keyring_name=keyring)
1451 except NoResultFound:
1454 __all__.append('get_keyring')
1456 ################################################################################
1458 class KeyringACLMap(object):
1459 def __init__(self, *args, **kwargs):
1463 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1465 __all__.append('KeyringACLMap')
1467 ################################################################################
1469 class DBChange(object):
1470 def __init__(self, *args, **kwargs):
1474 return '<DBChange %s>' % self.changesname
1476 def clean_from_queue(self):
1477 session = DBConn().session().object_session(self)
1479 # Remove changes_pool_files entries
1482 # Remove changes_pending_files references
1485 # Clear out of queue
1486 self.in_queue = None
1487 self.approved_for_id = None
1489 __all__.append('DBChange')
1492 def get_dbchange(filename, session=None):
1494 returns DBChange object for given C{filename}.
1496 @type filename: string
1497 @param filename: the name of the file
1499 @type session: Session
1500 @param session: Optional SQLA session object (a temporary one will be
1501 generated if not supplied)
1504 @return: DBChange object for the given filename (C{None} if not present)
1507 q = session.query(DBChange).filter_by(changesname=filename)
1511 except NoResultFound:
1514 __all__.append('get_dbchange')
1516 ################################################################################
1518 class Location(object):
1519 def __init__(self, *args, **kwargs):
1523 return '<Location %s (%s)>' % (self.path, self.location_id)
1525 __all__.append('Location')
1528 def get_location(location, component=None, archive=None, session=None):
1530 Returns Location object for the given combination of location, component
1533 @type location: string
1534 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1536 @type component: string
1537 @param component: the component name (if None, no restriction applied)
1539 @type archive: string
1540 @param archive: the archive name (if None, no restriction applied)
1542 @rtype: Location / None
1543 @return: Either a Location object or None if one can't be found
1546 q = session.query(Location).filter_by(path=location)
1548 if archive is not None:
1549 q = q.join(Archive).filter_by(archive_name=archive)
1551 if component is not None:
1552 q = q.join(Component).filter_by(component_name=component)
1556 except NoResultFound:
1559 __all__.append('get_location')
1561 ################################################################################
1563 class Maintainer(object):
1564 def __init__(self, *args, **kwargs):
1568 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1570 def get_split_maintainer(self):
1571 if not hasattr(self, 'name') or self.name is None:
1572 return ('', '', '', '')
1574 return fix_maintainer(self.name.strip())
1576 __all__.append('Maintainer')
1579 def get_or_set_maintainer(name, session=None):
1581 Returns Maintainer object for given maintainer name.
1583 If no matching maintainer name is found, a row is inserted.
1586 @param name: The maintainer name to add
1588 @type session: SQLAlchemy
1589 @param session: Optional SQL session object (a temporary one will be
1590 generated if not supplied). If not passed, a commit will be performed at
1591 the end of the function, otherwise the caller is responsible for commiting.
1592 A flush will be performed either way.
1595 @return: the Maintainer object for the given maintainer
1598 q = session.query(Maintainer).filter_by(name=name)
1601 except NoResultFound:
1602 maintainer = Maintainer()
1603 maintainer.name = name
1604 session.add(maintainer)
1605 session.commit_or_flush()
1610 __all__.append('get_or_set_maintainer')
1613 def get_maintainer(maintainer_id, session=None):
1615 Return the name of the maintainer behind C{maintainer_id} or None if that
1616 maintainer_id is invalid.
1618 @type maintainer_id: int
1619 @param maintainer_id: the id of the maintainer
1622 @return: the Maintainer with this C{maintainer_id}
1625 return session.query(Maintainer).get(maintainer_id)
1627 __all__.append('get_maintainer')
1629 ################################################################################
1631 class NewComment(object):
1632 def __init__(self, *args, **kwargs):
1636 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1638 __all__.append('NewComment')
1641 def has_new_comment(package, version, session=None):
1643 Returns true if the given combination of C{package}, C{version} has a comment.
1645 @type package: string
1646 @param package: name of the package
1648 @type version: string
1649 @param version: package version
1651 @type session: Session
1652 @param session: Optional SQLA session object (a temporary one will be
1653 generated if not supplied)
1659 q = session.query(NewComment)
1660 q = q.filter_by(package=package)
1661 q = q.filter_by(version=version)
1663 return bool(q.count() > 0)
1665 __all__.append('has_new_comment')
1668 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1670 Returns (possibly empty) list of NewComment objects for the given
1673 @type package: string (optional)
1674 @param package: name of the package
1676 @type version: string (optional)
1677 @param version: package version
1679 @type comment_id: int (optional)
1680 @param comment_id: An id of a comment
1682 @type session: Session
1683 @param session: Optional SQLA session object (a temporary one will be
1684 generated if not supplied)
1687 @return: A (possibly empty) list of NewComment objects will be returned
1690 q = session.query(NewComment)
1691 if package is not None: q = q.filter_by(package=package)
1692 if version is not None: q = q.filter_by(version=version)
1693 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1697 __all__.append('get_new_comments')
1699 ################################################################################
1701 class Override(object):
1702 def __init__(self, *args, **kwargs):
1706 return '<Override %s (%s)>' % (self.package, self.suite_id)
1708 __all__.append('Override')
1711 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1713 Returns Override object for the given parameters
1715 @type package: string
1716 @param package: The name of the package
1718 @type suite: string, list or None
1719 @param suite: The name of the suite (or suites if a list) to limit to. If
1720 None, don't limit. Defaults to None.
1722 @type component: string, list or None
1723 @param component: The name of the component (or components if a list) to
1724 limit to. If None, don't limit. Defaults to None.
1726 @type overridetype: string, list or None
1727 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1728 limit to. If None, don't limit. Defaults to None.
1730 @type session: Session
1731 @param session: Optional SQLA session object (a temporary one will be
1732 generated if not supplied)
1735 @return: A (possibly empty) list of Override objects will be returned
1738 q = session.query(Override)
1739 q = q.filter_by(package=package)
1741 if suite is not None:
1742 if not isinstance(suite, list): suite = [suite]
1743 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1745 if component is not None:
1746 if not isinstance(component, list): component = [component]
1747 q = q.join(Component).filter(Component.component_name.in_(component))
1749 if overridetype is not None:
1750 if not isinstance(overridetype, list): overridetype = [overridetype]
1751 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1755 __all__.append('get_override')
1758 ################################################################################
1760 class OverrideType(object):
1761 def __init__(self, *args, **kwargs):
1765 return '<OverrideType %s>' % self.overridetype
1767 __all__.append('OverrideType')
1770 def get_override_type(override_type, session=None):
1772 Returns OverrideType object for given C{override type}.
1774 @type override_type: string
1775 @param override_type: The name of the override type
1777 @type session: Session
1778 @param session: Optional SQLA session object (a temporary one will be
1779 generated if not supplied)
1782 @return: the database id for the given override type
1785 q = session.query(OverrideType).filter_by(overridetype=override_type)
1789 except NoResultFound:
1792 __all__.append('get_override_type')
1794 ################################################################################
1796 class DebContents(object):
1797 def __init__(self, *args, **kwargs):
1801 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1803 __all__.append('DebContents')
1806 class UdebContents(object):
1807 def __init__(self, *args, **kwargs):
1811 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1813 __all__.append('UdebContents')
1815 class PendingBinContents(object):
1816 def __init__(self, *args, **kwargs):
1820 return '<PendingBinContents %s>' % self.contents_id
1822 __all__.append('PendingBinContents')
1824 def insert_pending_content_paths(package,
1829 Make sure given paths are temporarily associated with given
1833 @param package: the package to associate with should have been read in from the binary control file
1834 @type fullpaths: list
1835 @param fullpaths: the list of paths of the file being associated with the binary
1836 @type session: SQLAlchemy session
1837 @param session: Optional SQLAlchemy session. If this is passed, the caller
1838 is responsible for ensuring a transaction has begun and committing the
1839 results or rolling back based on the result code. If not passed, a commit
1840 will be performed at the end of the function
1842 @return: True upon success, False if there is a problem
1845 privatetrans = False
1848 session = DBConn().session()
1852 arch = get_architecture(package['Architecture'], session)
1853 arch_id = arch.arch_id
1855 # Remove any already existing recorded files for this package
1856 q = session.query(PendingBinContents)
1857 q = q.filter_by(package=package['Package'])
1858 q = q.filter_by(version=package['Version'])
1859 q = q.filter_by(architecture=arch_id)
1862 for fullpath in fullpaths:
1864 if fullpath.startswith( "./" ):
1865 fullpath = fullpath[2:]
1867 pca = PendingBinContents()
1868 pca.package = package['Package']
1869 pca.version = package['Version']
1871 pca.architecture = arch_id
1874 pca.type = 8 # gross
1876 pca.type = 7 # also gross
1879 # Only commit if we set up the session ourself
1887 except Exception, e:
1888 traceback.print_exc()
1890 # Only rollback if we set up the session ourself
1897 __all__.append('insert_pending_content_paths')
1899 ################################################################################
1901 class PolicyQueue(object):
1902 def __init__(self, *args, **kwargs):
1906 return '<PolicyQueue %s>' % self.queue_name
1908 __all__.append('PolicyQueue')
1911 def get_policy_queue(queuename, session=None):
1913 Returns PolicyQueue object for given C{queue name}
1915 @type queuename: string
1916 @param queuename: The name of the queue
1918 @type session: Session
1919 @param session: Optional SQLA session object (a temporary one will be
1920 generated if not supplied)
1923 @return: PolicyQueue object for the given queue
1926 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1930 except NoResultFound:
1933 __all__.append('get_policy_queue')
1936 def get_policy_queue_from_path(pathname, session=None):
1938 Returns PolicyQueue object for given C{path name}
1940 @type queuename: string
1941 @param queuename: The path
1943 @type session: Session
1944 @param session: Optional SQLA session object (a temporary one will be
1945 generated if not supplied)
1948 @return: PolicyQueue object for the given queue
1951 q = session.query(PolicyQueue).filter_by(path=pathname)
1955 except NoResultFound:
1958 __all__.append('get_policy_queue_from_path')
1960 ################################################################################
1962 class Priority(object):
1963 def __init__(self, *args, **kwargs):
1966 def __eq__(self, val):
1967 if isinstance(val, str):
1968 return (self.priority == val)
1969 # This signals to use the normal comparison operator
1970 return NotImplemented
1972 def __ne__(self, val):
1973 if isinstance(val, str):
1974 return (self.priority != val)
1975 # This signals to use the normal comparison operator
1976 return NotImplemented
1979 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
1981 __all__.append('Priority')
1984 def get_priority(priority, session=None):
1986 Returns Priority object for given C{priority name}.
1988 @type priority: string
1989 @param priority: The name of the priority
1991 @type session: Session
1992 @param session: Optional SQLA session object (a temporary one will be
1993 generated if not supplied)
1996 @return: Priority object for the given priority
1999 q = session.query(Priority).filter_by(priority=priority)
2003 except NoResultFound:
2006 __all__.append('get_priority')
2009 def get_priorities(session=None):
2011 Returns dictionary of priority names -> id mappings
2013 @type session: Session
2014 @param session: Optional SQL session object (a temporary one will be
2015 generated if not supplied)
2018 @return: dictionary of priority names -> id mappings
2022 q = session.query(Priority)
2024 ret[x.priority] = x.priority_id
2028 __all__.append('get_priorities')
2030 ################################################################################
2032 class Section(object):
2033 def __init__(self, *args, **kwargs):
2036 def __eq__(self, val):
2037 if isinstance(val, str):
2038 return (self.section == val)
2039 # This signals to use the normal comparison operator
2040 return NotImplemented
2042 def __ne__(self, val):
2043 if isinstance(val, str):
2044 return (self.section != val)
2045 # This signals to use the normal comparison operator
2046 return NotImplemented
2049 return '<Section %s>' % self.section
2051 __all__.append('Section')
2054 def get_section(section, session=None):
2056 Returns Section object for given C{section name}.
2058 @type section: string
2059 @param section: The name of the section
2061 @type session: Session
2062 @param session: Optional SQLA session object (a temporary one will be
2063 generated if not supplied)
2066 @return: Section object for the given section name
2069 q = session.query(Section).filter_by(section=section)
2073 except NoResultFound:
2076 __all__.append('get_section')
2079 def get_sections(session=None):
2081 Returns dictionary of section names -> id mappings
2083 @type session: Session
2084 @param session: Optional SQL session object (a temporary one will be
2085 generated if not supplied)
2088 @return: dictionary of section names -> id mappings
2092 q = session.query(Section)
2094 ret[x.section] = x.section_id
2098 __all__.append('get_sections')
2100 ################################################################################
2102 class DBSource(object):
2103 def __init__(self, *args, **kwargs):
2107 return '<DBSource %s (%s)>' % (self.source, self.version)
2109 __all__.append('DBSource')
2112 def source_exists(source, source_version, suites = ["any"], session=None):
2114 Ensure that source exists somewhere in the archive for the binary
2115 upload being processed.
2116 1. exact match => 1.0-3
2117 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2119 @type source: string
2120 @param source: source name
2122 @type source_version: string
2123 @param source_version: expected source version
2126 @param suites: list of suites to check in, default I{any}
2128 @type session: Session
2129 @param session: Optional SQLA session object (a temporary one will be
2130 generated if not supplied)
2133 @return: returns 1 if a source with expected version is found, otherwise 0
2140 for suite in suites:
2141 q = session.query(DBSource).filter_by(source=source)
2143 # source must exist in suite X, or in some other suite that's
2144 # mapped to X, recursively... silent-maps are counted too,
2145 # unreleased-maps aren't.
2146 maps = cnf.ValueList("SuiteMappings")[:]
2148 maps = [ m.split() for m in maps ]
2149 maps = [ (x[1], x[2]) for x in maps
2150 if x[0] == "map" or x[0] == "silent-map" ]
2153 if x[1] in s and x[0] not in s:
2156 q = q.join(SrcAssociation).join(Suite)
2157 q = q.filter(Suite.suite_name.in_(s))
2159 # Reduce the query results to a list of version numbers
2160 ql = [ j.version for j in q.all() ]
2163 if source_version in ql:
2167 from daklib.regexes import re_bin_only_nmu
2168 orig_source_version = re_bin_only_nmu.sub('', source_version)
2169 if orig_source_version in ql:
2172 # No source found so return not ok
2177 __all__.append('source_exists')
2180 def get_suites_source_in(source, session=None):
2182 Returns list of Suite objects which given C{source} name is in
2185 @param source: DBSource package name to search for
2188 @return: list of Suite objects for the given source
2191 return session.query(Suite).join(SrcAssociation).join(DBSource).filter_by(source=source).all()
2193 __all__.append('get_suites_source_in')
2196 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2198 Returns list of DBSource objects for given C{source} name and other parameters
2201 @param source: DBSource package name to search for
2203 @type version: str or None
2204 @param version: DBSource version name to search for or None if not applicable
2206 @type dm_upload_allowed: bool
2207 @param dm_upload_allowed: If None, no effect. If True or False, only
2208 return packages with that dm_upload_allowed setting
2210 @type session: Session
2211 @param session: Optional SQL session object (a temporary one will be
2212 generated if not supplied)
2215 @return: list of DBSource objects for the given name (may be empty)
2218 q = session.query(DBSource).filter_by(source=source)
2220 if version is not None:
2221 q = q.filter_by(version=version)
2223 if dm_upload_allowed is not None:
2224 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2228 __all__.append('get_sources_from_name')
2231 def get_source_in_suite(source, suite, session=None):
2233 Returns list of DBSource objects for a combination of C{source} and C{suite}.
2235 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2236 - B{suite} - a suite name, eg. I{unstable}
2238 @type source: string
2239 @param source: source package name
2242 @param suite: the suite name
2245 @return: the version for I{source} in I{suite}
2249 q = session.query(SrcAssociation)
2250 q = q.join('source').filter_by(source=source)
2251 q = q.join('suite').filter_by(suite_name=suite)
2254 return q.one().source
2255 except NoResultFound:
2258 __all__.append('get_source_in_suite')
2260 ################################################################################
2263 def add_dsc_to_db(u, filename, session=None):
2264 entry = u.pkg.files[filename]
2268 source.source = u.pkg.dsc["source"]
2269 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2270 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2271 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2272 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2273 source.install_date = datetime.now().date()
2275 dsc_component = entry["component"]
2276 dsc_location_id = entry["location id"]
2278 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2280 # Set up a new poolfile if necessary
2281 if not entry.has_key("files id") or not entry["files id"]:
2282 filename = entry["pool name"] + filename
2283 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2285 pfs.append(poolfile)
2286 entry["files id"] = poolfile.file_id
2288 source.poolfile_id = entry["files id"]
2292 for suite_name in u.pkg.changes["distribution"].keys():
2293 sa = SrcAssociation()
2294 sa.source_id = source.source_id
2295 sa.suite_id = get_suite(suite_name).suite_id
2300 # Add the source files to the DB (files and dsc_files)
2302 dscfile.source_id = source.source_id
2303 dscfile.poolfile_id = entry["files id"]
2304 session.add(dscfile)
2306 for dsc_file, dentry in u.pkg.dsc_files.items():
2308 df.source_id = source.source_id
2310 # If the .orig tarball is already in the pool, it's
2311 # files id is stored in dsc_files by check_dsc().
2312 files_id = dentry.get("files id", None)
2314 # Find the entry in the files hash
2315 # TODO: Bail out here properly
2317 for f, e in u.pkg.files.items():
2322 if files_id is None:
2323 filename = dfentry["pool name"] + dsc_file
2325 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2326 # FIXME: needs to check for -1/-2 and or handle exception
2327 if found and obj is not None:
2328 files_id = obj.file_id
2331 # If still not found, add it
2332 if files_id is None:
2333 # HACK: Force sha1sum etc into dentry
2334 dentry["sha1sum"] = dfentry["sha1sum"]
2335 dentry["sha256sum"] = dfentry["sha256sum"]
2336 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2337 pfs.append(poolfile)
2338 files_id = poolfile.file_id
2340 poolfile = get_poolfile_by_id(files_id, session)
2341 if poolfile is None:
2342 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2343 pfs.append(poolfile)
2345 df.poolfile_id = files_id
2350 # Add the src_uploaders to the DB
2351 uploader_ids = [source.maintainer_id]
2352 if u.pkg.dsc.has_key("uploaders"):
2353 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2355 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2358 for up_id in uploader_ids:
2359 if added_ids.has_key(up_id):
2361 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2367 su.maintainer_id = up_id
2368 su.source_id = source.source_id
2373 return source, dsc_component, dsc_location_id, pfs
2375 __all__.append('add_dsc_to_db')
2378 def add_deb_to_db(u, filename, session=None):
2380 Contrary to what you might expect, this routine deals with both
2381 debs and udebs. That info is in 'dbtype', whilst 'type' is
2382 'deb' for both of them
2385 entry = u.pkg.files[filename]
2388 bin.package = entry["package"]
2389 bin.version = entry["version"]
2390 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2391 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2392 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2393 bin.binarytype = entry["dbtype"]
2396 filename = entry["pool name"] + filename
2397 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2398 if not entry.get("location id", None):
2399 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2401 if entry.get("files id", None):
2402 poolfile = get_poolfile_by_id(bin.poolfile_id)
2403 bin.poolfile_id = entry["files id"]
2405 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2406 bin.poolfile_id = entry["files id"] = poolfile.file_id
2409 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2410 if len(bin_sources) != 1:
2411 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2412 (bin.package, bin.version, entry["architecture"],
2413 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2415 bin.source_id = bin_sources[0].source_id
2417 # Add and flush object so it has an ID
2421 # Add BinAssociations
2422 for suite_name in u.pkg.changes["distribution"].keys():
2423 ba = BinAssociation()
2424 ba.binary_id = bin.binary_id
2425 ba.suite_id = get_suite(suite_name).suite_id
2430 # Deal with contents - disabled for now
2431 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2433 # print "REJECT\nCould not determine contents of package %s" % bin.package
2434 # session.rollback()
2435 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2439 __all__.append('add_deb_to_db')
2441 ################################################################################
2443 class SourceACL(object):
2444 def __init__(self, *args, **kwargs):
2448 return '<SourceACL %s>' % self.source_acl_id
2450 __all__.append('SourceACL')
2452 ################################################################################
2454 class SrcAssociation(object):
2455 def __init__(self, *args, **kwargs):
2459 return '<SrcAssociation %s (%s, %s)>' % (self.sa_id, self.source, self.suite)
2461 __all__.append('SrcAssociation')
2463 ################################################################################
2465 class SrcFormat(object):
2466 def __init__(self, *args, **kwargs):
2470 return '<SrcFormat %s>' % (self.format_name)
2472 __all__.append('SrcFormat')
2474 ################################################################################
2476 class SrcUploader(object):
2477 def __init__(self, *args, **kwargs):
2481 return '<SrcUploader %s>' % self.uploader_id
2483 __all__.append('SrcUploader')
2485 ################################################################################
2487 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2488 ('SuiteID', 'suite_id'),
2489 ('Version', 'version'),
2490 ('Origin', 'origin'),
2492 ('Description', 'description'),
2493 ('Untouchable', 'untouchable'),
2494 ('Announce', 'announce'),
2495 ('Codename', 'codename'),
2496 ('OverrideCodename', 'overridecodename'),
2497 ('ValidTime', 'validtime'),
2498 ('Priority', 'priority'),
2499 ('NotAutomatic', 'notautomatic'),
2500 ('CopyChanges', 'copychanges'),
2501 ('OverrideSuite', 'overridesuite')]
2503 class Suite(object):
2504 def __init__(self, *args, **kwargs):
2508 return '<Suite %s>' % self.suite_name
2510 def __eq__(self, val):
2511 if isinstance(val, str):
2512 return (self.suite_name == val)
2513 # This signals to use the normal comparison operator
2514 return NotImplemented
2516 def __ne__(self, val):
2517 if isinstance(val, str):
2518 return (self.suite_name != val)
2519 # This signals to use the normal comparison operator
2520 return NotImplemented
2524 for disp, field in SUITE_FIELDS:
2525 val = getattr(self, field, None)
2527 ret.append("%s: %s" % (disp, val))
2529 return "\n".join(ret)
2531 __all__.append('Suite')
2534 def get_suite_architecture(suite, architecture, session=None):
2536 Returns a SuiteArchitecture object given C{suite} and ${arch} or None if it
2540 @param suite: Suite name to search for
2542 @type architecture: str
2543 @param architecture: Architecture name to search for
2545 @type session: Session
2546 @param session: Optional SQL session object (a temporary one will be
2547 generated if not supplied)
2549 @rtype: SuiteArchitecture
2550 @return: the SuiteArchitecture object or None
2553 q = session.query(SuiteArchitecture)
2554 q = q.join(Architecture).filter_by(arch_string=architecture)
2555 q = q.join(Suite).filter_by(suite_name=suite)
2559 except NoResultFound:
2562 __all__.append('get_suite_architecture')
2565 def get_suite(suite, session=None):
2567 Returns Suite object for given C{suite name}.
2570 @param suite: The name of the suite
2572 @type session: Session
2573 @param session: Optional SQLA session object (a temporary one will be
2574 generated if not supplied)
2577 @return: Suite object for the requested suite name (None if not present)
2580 q = session.query(Suite).filter_by(suite_name=suite)
2584 except NoResultFound:
2587 __all__.append('get_suite')
2589 ################################################################################
2591 class SuiteArchitecture(object):
2592 def __init__(self, *args, **kwargs):
2596 return '<SuiteArchitecture (%s, %s)>' % (self.suite_id, self.arch_id)
2598 __all__.append('SuiteArchitecture')
2601 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2603 Returns list of Architecture objects for given C{suite} name
2606 @param suite: Suite name to search for
2608 @type skipsrc: boolean
2609 @param skipsrc: Whether to skip returning the 'source' architecture entry
2612 @type skipall: boolean
2613 @param skipall: Whether to skip returning the 'all' architecture entry
2616 @type session: Session
2617 @param session: Optional SQL session object (a temporary one will be
2618 generated if not supplied)
2621 @return: list of Architecture objects for the given name (may be empty)
2624 q = session.query(Architecture)
2625 q = q.join(SuiteArchitecture)
2626 q = q.join(Suite).filter_by(suite_name=suite)
2629 q = q.filter(Architecture.arch_string != 'source')
2632 q = q.filter(Architecture.arch_string != 'all')
2634 q = q.order_by('arch_string')
2638 __all__.append('get_suite_architectures')
2640 ################################################################################
2642 class SuiteSrcFormat(object):
2643 def __init__(self, *args, **kwargs):
2647 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2649 __all__.append('SuiteSrcFormat')
2652 def get_suite_src_formats(suite, session=None):
2654 Returns list of allowed SrcFormat for C{suite}.
2657 @param suite: Suite name to search for
2659 @type session: Session
2660 @param session: Optional SQL session object (a temporary one will be
2661 generated if not supplied)
2664 @return: the list of allowed source formats for I{suite}
2667 q = session.query(SrcFormat)
2668 q = q.join(SuiteSrcFormat)
2669 q = q.join(Suite).filter_by(suite_name=suite)
2670 q = q.order_by('format_name')
2674 __all__.append('get_suite_src_formats')
2676 ################################################################################
2679 def __init__(self, uid = None, name = None):
2683 def __eq__(self, val):
2684 if isinstance(val, str):
2685 return (self.uid == val)
2686 # This signals to use the normal comparison operator
2687 return NotImplemented
2689 def __ne__(self, val):
2690 if isinstance(val, str):
2691 return (self.uid != val)
2692 # This signals to use the normal comparison operator
2693 return NotImplemented
2696 return '<Uid %s (%s)>' % (self.uid, self.name)
2698 __all__.append('Uid')
2701 def get_or_set_uid(uidname, session=None):
2703 Returns uid object for given uidname.
2705 If no matching uidname is found, a row is inserted.
2707 @type uidname: string
2708 @param uidname: The uid to add
2710 @type session: SQLAlchemy
2711 @param session: Optional SQL session object (a temporary one will be
2712 generated if not supplied). If not passed, a commit will be performed at
2713 the end of the function, otherwise the caller is responsible for commiting.
2716 @return: the uid object for the given uidname
2719 q = session.query(Uid).filter_by(uid=uidname)
2723 except NoResultFound:
2727 session.commit_or_flush()
2732 __all__.append('get_or_set_uid')
2735 def get_uid_from_fingerprint(fpr, session=None):
2736 q = session.query(Uid)
2737 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2741 except NoResultFound:
2744 __all__.append('get_uid_from_fingerprint')
2746 ################################################################################
2748 class UploadBlock(object):
2749 def __init__(self, *args, **kwargs):
2753 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2755 __all__.append('UploadBlock')
2757 ################################################################################
2759 class DBConn(object):
2761 database module init.
2765 def __init__(self, *args, **kwargs):
2766 self.__dict__ = self.__shared_state
2768 if not getattr(self, 'initialised', False):
2769 self.initialised = True
2770 self.debug = kwargs.has_key('debug')
2773 def __setuptables(self):
2774 tables_with_primary = (
2785 'changes_pending_binaries',
2786 'changes_pending_files',
2787 'changes_pending_source',
2797 'pending_bin_contents',
2809 # The following tables have primary keys but sqlalchemy
2810 # version 0.5 fails to reflect them correctly with database
2811 # versions before upgrade #41.
2813 #'build_queue_files',
2816 tables_no_primary = (
2818 'changes_pending_files_map',
2819 'changes_pending_source_files',
2820 'changes_pool_files',
2823 'suite_architectures',
2824 'suite_src_formats',
2825 'suite_build_queue_copy',
2827 # see the comment above
2829 'build_queue_files',
2833 'almost_obsolete_all_associations',
2834 'almost_obsolete_src_associations',
2835 'any_associations_source',
2836 'bin_assoc_by_arch',
2837 'bin_associations_binaries',
2838 'binaries_suite_arch',
2839 'binfiles_suite_component_arch',
2842 'newest_all_associations',
2843 'newest_any_associations',
2845 'newest_src_association',
2846 'obsolete_all_associations',
2847 'obsolete_any_associations',
2848 'obsolete_any_by_all_associations',
2849 'obsolete_src_associations',
2851 'src_associations_bin',
2852 'src_associations_src',
2853 'suite_arch_by_name',
2856 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2857 # correctly and that is why we have to use a workaround. It can
2858 # be removed as soon as we switch to version 0.6.
2859 for table_name in tables_with_primary:
2860 table = Table(table_name, self.db_meta, \
2861 Column('id', Integer, primary_key = True), \
2862 autoload=True, useexisting=True)
2863 setattr(self, 'tbl_%s' % table_name, table)
2865 for table_name in tables_no_primary:
2866 table = Table(table_name, self.db_meta, autoload=True)
2867 setattr(self, 'tbl_%s' % table_name, table)
2869 for view_name in views:
2870 view = Table(view_name, self.db_meta, autoload=True)
2871 setattr(self, 'view_%s' % view_name, view)
2873 def __setupmappers(self):
2874 mapper(Architecture, self.tbl_architecture,
2875 properties = dict(arch_id = self.tbl_architecture.c.id))
2877 mapper(Archive, self.tbl_archive,
2878 properties = dict(archive_id = self.tbl_archive.c.id,
2879 archive_name = self.tbl_archive.c.name))
2881 mapper(BinAssociation, self.tbl_bin_associations,
2882 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2883 suite_id = self.tbl_bin_associations.c.suite,
2884 suite = relation(Suite),
2885 binary_id = self.tbl_bin_associations.c.bin,
2886 binary = relation(DBBinary)))
2888 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2889 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2890 filename = self.tbl_pending_bin_contents.c.filename,
2891 package = self.tbl_pending_bin_contents.c.package,
2892 version = self.tbl_pending_bin_contents.c.version,
2893 arch = self.tbl_pending_bin_contents.c.arch,
2894 otype = self.tbl_pending_bin_contents.c.type))
2896 mapper(DebContents, self.tbl_deb_contents,
2897 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2898 package=self.tbl_deb_contents.c.package,
2899 suite=self.tbl_deb_contents.c.suite,
2900 arch=self.tbl_deb_contents.c.arch,
2901 section=self.tbl_deb_contents.c.section,
2902 filename=self.tbl_deb_contents.c.filename))
2904 mapper(UdebContents, self.tbl_udeb_contents,
2905 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2906 package=self.tbl_udeb_contents.c.package,
2907 suite=self.tbl_udeb_contents.c.suite,
2908 arch=self.tbl_udeb_contents.c.arch,
2909 section=self.tbl_udeb_contents.c.section,
2910 filename=self.tbl_udeb_contents.c.filename))
2912 mapper(BuildQueue, self.tbl_build_queue,
2913 properties = dict(queue_id = self.tbl_build_queue.c.id))
2915 mapper(BuildQueueFile, self.tbl_build_queue_files,
2916 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
2917 poolfile = relation(PoolFile, backref='buildqueueinstances')))
2919 mapper(DBBinary, self.tbl_binaries,
2920 properties = dict(binary_id = self.tbl_binaries.c.id,
2921 package = self.tbl_binaries.c.package,
2922 version = self.tbl_binaries.c.version,
2923 maintainer_id = self.tbl_binaries.c.maintainer,
2924 maintainer = relation(Maintainer),
2925 source_id = self.tbl_binaries.c.source,
2926 source = relation(DBSource),
2927 arch_id = self.tbl_binaries.c.architecture,
2928 architecture = relation(Architecture),
2929 poolfile_id = self.tbl_binaries.c.file,
2930 poolfile = relation(PoolFile),
2931 binarytype = self.tbl_binaries.c.type,
2932 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2933 fingerprint = relation(Fingerprint),
2934 install_date = self.tbl_binaries.c.install_date,
2935 binassociations = relation(BinAssociation,
2936 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
2938 mapper(BinaryACL, self.tbl_binary_acl,
2939 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
2941 mapper(BinaryACLMap, self.tbl_binary_acl_map,
2942 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
2943 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
2944 architecture = relation(Architecture)))
2946 mapper(Component, self.tbl_component,
2947 properties = dict(component_id = self.tbl_component.c.id,
2948 component_name = self.tbl_component.c.name))
2950 mapper(DBConfig, self.tbl_config,
2951 properties = dict(config_id = self.tbl_config.c.id))
2953 mapper(DSCFile, self.tbl_dsc_files,
2954 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2955 source_id = self.tbl_dsc_files.c.source,
2956 source = relation(DBSource),
2957 poolfile_id = self.tbl_dsc_files.c.file,
2958 poolfile = relation(PoolFile)))
2960 mapper(PoolFile, self.tbl_files,
2961 properties = dict(file_id = self.tbl_files.c.id,
2962 filesize = self.tbl_files.c.size,
2963 location_id = self.tbl_files.c.location,
2964 location = relation(Location)))
2966 mapper(Fingerprint, self.tbl_fingerprint,
2967 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2968 uid_id = self.tbl_fingerprint.c.uid,
2969 uid = relation(Uid),
2970 keyring_id = self.tbl_fingerprint.c.keyring,
2971 keyring = relation(Keyring),
2972 source_acl = relation(SourceACL),
2973 binary_acl = relation(BinaryACL)))
2975 mapper(Keyring, self.tbl_keyrings,
2976 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2977 keyring_id = self.tbl_keyrings.c.id))
2979 mapper(DBChange, self.tbl_changes,
2980 properties = dict(change_id = self.tbl_changes.c.id,
2981 poolfiles = relation(PoolFile,
2982 secondary=self.tbl_changes_pool_files,
2983 backref="changeslinks"),
2984 seen = self.tbl_changes.c.seen,
2985 source = self.tbl_changes.c.source,
2986 binaries = self.tbl_changes.c.binaries,
2987 architecture = self.tbl_changes.c.architecture,
2988 distribution = self.tbl_changes.c.distribution,
2989 urgency = self.tbl_changes.c.urgency,
2990 maintainer = self.tbl_changes.c.maintainer,
2991 changedby = self.tbl_changes.c.changedby,
2992 date = self.tbl_changes.c.date,
2993 version = self.tbl_changes.c.version,
2994 files = relation(ChangePendingFile,
2995 secondary=self.tbl_changes_pending_files_map,
2996 backref="changesfile"),
2997 in_queue_id = self.tbl_changes.c.in_queue,
2998 in_queue = relation(PolicyQueue,
2999 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3000 approved_for_id = self.tbl_changes.c.approved_for))
3002 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3003 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3005 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3006 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3007 filename = self.tbl_changes_pending_files.c.filename,
3008 size = self.tbl_changes_pending_files.c.size,
3009 md5sum = self.tbl_changes_pending_files.c.md5sum,
3010 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3011 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3013 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3014 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3015 change = relation(DBChange),
3016 maintainer = relation(Maintainer,
3017 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3018 changedby = relation(Maintainer,
3019 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3020 fingerprint = relation(Fingerprint),
3021 source_files = relation(ChangePendingFile,
3022 secondary=self.tbl_changes_pending_source_files,
3023 backref="pending_sources")))
3026 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3027 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3028 keyring = relation(Keyring, backref="keyring_acl_map"),
3029 architecture = relation(Architecture)))
3031 mapper(Location, self.tbl_location,
3032 properties = dict(location_id = self.tbl_location.c.id,
3033 component_id = self.tbl_location.c.component,
3034 component = relation(Component),
3035 archive_id = self.tbl_location.c.archive,
3036 archive = relation(Archive),
3037 archive_type = self.tbl_location.c.type))
3039 mapper(Maintainer, self.tbl_maintainer,
3040 properties = dict(maintainer_id = self.tbl_maintainer.c.id))
3042 mapper(NewComment, self.tbl_new_comments,
3043 properties = dict(comment_id = self.tbl_new_comments.c.id))
3045 mapper(Override, self.tbl_override,
3046 properties = dict(suite_id = self.tbl_override.c.suite,
3047 suite = relation(Suite),
3048 package = self.tbl_override.c.package,
3049 component_id = self.tbl_override.c.component,
3050 component = relation(Component),
3051 priority_id = self.tbl_override.c.priority,
3052 priority = relation(Priority),
3053 section_id = self.tbl_override.c.section,
3054 section = relation(Section),
3055 overridetype_id = self.tbl_override.c.type,
3056 overridetype = relation(OverrideType)))
3058 mapper(OverrideType, self.tbl_override_type,
3059 properties = dict(overridetype = self.tbl_override_type.c.type,
3060 overridetype_id = self.tbl_override_type.c.id))
3062 mapper(PolicyQueue, self.tbl_policy_queue,
3063 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3065 mapper(Priority, self.tbl_priority,
3066 properties = dict(priority_id = self.tbl_priority.c.id))
3068 mapper(Section, self.tbl_section,
3069 properties = dict(section_id = self.tbl_section.c.id,
3070 section=self.tbl_section.c.section))
3072 mapper(DBSource, self.tbl_source,
3073 properties = dict(source_id = self.tbl_source.c.id,
3074 version = self.tbl_source.c.version,
3075 maintainer_id = self.tbl_source.c.maintainer,
3076 maintainer = relation(Maintainer,
3077 primaryjoin=(self.tbl_source.c.maintainer==self.tbl_maintainer.c.id)),
3078 poolfile_id = self.tbl_source.c.file,
3079 poolfile = relation(PoolFile),
3080 fingerprint_id = self.tbl_source.c.sig_fpr,
3081 fingerprint = relation(Fingerprint),
3082 changedby_id = self.tbl_source.c.changedby,
3083 changedby = relation(Maintainer,
3084 primaryjoin=(self.tbl_source.c.changedby==self.tbl_maintainer.c.id)),
3085 srcfiles = relation(DSCFile,
3086 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3087 srcassociations = relation(SrcAssociation,
3088 primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
3089 srcuploaders = relation(SrcUploader)))
3091 mapper(SourceACL, self.tbl_source_acl,
3092 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3094 mapper(SrcAssociation, self.tbl_src_associations,
3095 properties = dict(sa_id = self.tbl_src_associations.c.id,
3096 suite_id = self.tbl_src_associations.c.suite,
3097 suite = relation(Suite),
3098 source_id = self.tbl_src_associations.c.source,
3099 source = relation(DBSource)))
3101 mapper(SrcFormat, self.tbl_src_format,
3102 properties = dict(src_format_id = self.tbl_src_format.c.id,
3103 format_name = self.tbl_src_format.c.format_name))
3105 mapper(SrcUploader, self.tbl_src_uploaders,
3106 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3107 source_id = self.tbl_src_uploaders.c.source,
3108 source = relation(DBSource,
3109 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3110 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3111 maintainer = relation(Maintainer,
3112 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3114 mapper(Suite, self.tbl_suite,
3115 properties = dict(suite_id = self.tbl_suite.c.id,
3116 policy_queue = relation(PolicyQueue),
3117 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3119 mapper(SuiteArchitecture, self.tbl_suite_architectures,
3120 properties = dict(suite_id = self.tbl_suite_architectures.c.suite,
3121 suite = relation(Suite, backref='suitearchitectures'),
3122 arch_id = self.tbl_suite_architectures.c.architecture,
3123 architecture = relation(Architecture)))
3125 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3126 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3127 suite = relation(Suite, backref='suitesrcformats'),
3128 src_format_id = self.tbl_suite_src_formats.c.src_format,
3129 src_format = relation(SrcFormat)))
3131 mapper(Uid, self.tbl_uid,
3132 properties = dict(uid_id = self.tbl_uid.c.id,
3133 fingerprint = relation(Fingerprint)))
3135 mapper(UploadBlock, self.tbl_upload_blocks,
3136 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3137 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3138 uid = relation(Uid, backref="uploadblocks")))
3140 ## Connection functions
3141 def __createconn(self):
3142 from config import Config
3146 connstr = "postgres://%s" % cnf["DB::Host"]
3147 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3148 connstr += ":%s" % cnf["DB::Port"]
3149 connstr += "/%s" % cnf["DB::Name"]
3152 connstr = "postgres:///%s" % cnf["DB::Name"]
3153 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3154 connstr += "?port=%s" % cnf["DB::Port"]
3156 self.db_pg = create_engine(connstr, echo=self.debug)
3157 self.db_meta = MetaData()
3158 self.db_meta.bind = self.db_pg
3159 self.db_smaker = sessionmaker(bind=self.db_pg,
3163 self.__setuptables()
3164 self.__setupmappers()
3167 return self.db_smaker()
3169 __all__.append('DBConn')