import os
import stat
+import time
+
+import datetime
from cPickle import Unpickler, Pickler
from errno import EPERM
from apt_pkg import ParseSection
from utils import open_file, fubar, poolify
+from config import *
+from dbconn import *
###############################################################################
__all__.append('CHANGESFIELDS_DSCFILES_OPTIONAL')
+CHANGESFIELDS_ORIGFILES = [ "id", "location" ]
+
+__all__.append('CHANGESFIELDS_ORIGFILES')
+
###############################################################################
class Changes(object):
self.dsc = {}
self.files = {}
self.dsc_files = {}
-
- self.orig_tar_id = None
- self.orig_tar_location = ""
- self.orig_tar_gz = None
+ self.orig_files = {}
def file_summary(self):
# changes["distribution"] may not exist in corner cases
or the text of a warning if there are
"""
- conf = Config()
summary = ""
# Abandon the check if it's a non-sourceful upload
entry["override section"])
if entry["priority"] != "-":
- if entry["priority"] != entry["override_priority"]:
+ if entry["priority"] != entry["override priority"]:
summary += "%s: package says priority is %s, override says %s.\n" % (name,
entry["priority"],
entry["override priority"])
return summary
+ def remove_known_changes(self, session=None):
+ if session is None:
+ session = DBConn().session()
+ privatetrans = True
+
+ session.delete(get_knownchange(self.changes_file, session))
+
+ if privatetrans:
+ session.commit()
+ session.close()
+
+
+ def mark_missing_fields(self):
+ """add "missing" in fields which we will require for the known_changes table"""
+ for key in ['urgency', 'maintainer', 'fingerprint', 'changedby' ]:
+ if (not self.changes.has_key(key)) or (not self.changes[key]):
+ self.changes[key]='missing'
+
+ def add_known_changes(self, queue, session=None):
+ """add "missing" in fields which we will require for the known_changes table"""
+ cnf = Config()
+ if session is None:
+ session = DBConn().session()
+ privatetrans = True
+
+ dirpath = cnf["Dir::Queue::%s" % (queue) ]
+ changesfile = os.path.join(dirpath, self.changes_file)
+ filetime = datetime.datetime.fromtimestamp(os.path.getctime(changesfile))
+
+ self.mark_missing_fields()
+
+ session.execute(
+ """INSERT INTO known_changes
+ (changesname, seen, source, binaries, architecture, version,
+ distribution, urgency, maintainer, fingerprint, changedby, date)
+ VALUES (:changesfile,:filetime,:source,:binary, :architecture,
+ :version,:distribution,:urgency,:maintainer,:fingerprint,:changedby,:date)""",
+ { 'changesfile':changesfile,
+ 'filetime':filetime,
+ 'source':self.changes["source"],
+ 'binary':self.changes["binary"],
+ 'architecture':self.changes["architecture"],
+ 'version':self.changes["version"],
+ 'distribution':self.changes["distribution"],
+ 'urgency':self.changes["urgency"],
+ 'maintainer':self.changes["maintainer"],
+ 'fingerprint':self.changes["fingerprint"],
+ 'changedby':self.changes["changed-by"],
+ 'date':self.changes["date"]} )
+
+ if privatetrans:
+ session.commit()
+ session.close()
def load_dot_dak(self, changesfile):
"""
self.files.update(p.load())
self.dsc_files.update(p.load())
- self.orig_tar_id = p.load()
- self.orig_tar_location = p.load()
+ next_obj = p.load()
+ if isinstance(next_obj, dict):
+ self.orig_files.update(next_obj)
+ else:
+ # Auto-convert old dak files to new format supporting
+ # multiple tarballs
+ orig_tar_gz = None
+ for dsc_file in self.dsc_files.keys():
+ if dsc_file.endswith(".orig.tar.gz"):
+ orig_tar_gz = dsc_file
+ self.orig_files[orig_tar_gz] = {}
+ if next_obj != None:
+ self.orig_files[orig_tar_gz]["id"] = next_obj
+ next_obj = p.load()
+ if next_obj != None and next_obj != "":
+ self.orig_files[orig_tar_gz]["location"] = next_obj
+ if len(self.orig_files[orig_tar_gz]) == 0:
+ del self.orig_files[orig_tar_gz]
dump_file.close()
return ret
+ def sanitised_orig_files(self):
+ ret = {}
+ for name, entry in self.orig_files.items():
+ ret[name] = {}
+ # Optional orig_files fields
+ for i in CHANGESFIELDS_ORIGFILES:
+ if entry.has_key(i):
+ ret[name][i] = entry[i]
+
+ return ret
+
def write_dot_dak(self, dest_dir):
"""
Dump ourself into a cPickle file.
p.dump(self.sanitised_dsc())
p.dump(self.sanitised_files())
p.dump(self.sanitised_dsc_files())
- p.dump(self.orig_tar_id)
- p.dump(self.orig_tar_location)
+ p.dump(self.sanitised_orig_files())
dump_file.close()