- # Validate the component
- component = files[file]["component"]
- component_id = daklib.database.get_component_id(component)
- if component_id == -1:
- reject("file '%s' has unknown component '%s'." % (file, component))
- continue
-
- # See if the package is NEW
- if not Upload.in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
- files[file]["new"] = 1
-
- # Validate the priority
- if files[file]["priority"].find('/') != -1:
- reject("file '%s' has invalid priority '%s' [contains '/']." % (file, files[file]["priority"]))
-
- # Determine the location
- location = Cnf["Dir::Pool"]
- location_id = daklib.database.get_location_id (location, component, archive)
- if location_id == -1:
- reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
- files[file]["location id"] = location_id
-
- # Check the md5sum & size against existing files (if any)
- files[file]["pool name"] = daklib.utils.poolify (changes["source"], files[file]["component"])
- files_id = daklib.database.get_files_id(files[file]["pool name"] + file, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
- if files_id == -1:
- reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (file))
- elif files_id == -2:
- reject("md5sum and/or size mismatch on existing copy of %s." % (file))
- files[file]["files id"] = files_id
-
- # Check for packages that have moved from one component to another
- q = Upload.projectB.query("""
-SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
- component c, architecture a, files f
- WHERE b.package = '%s' AND s.suite_name = '%s'
- AND (a.arch_string = '%s' OR a.arch_string = 'all')
- AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
- AND f.location = l.id AND l.component = c.id AND b.file = f.id"""
- % (files[file]["package"], suite,
- files[file]["architecture"]))
- ql = q.getresult()
- if ql:
- files[file]["othercomponents"] = ql[0][0]
-
- # If the .changes file says it has source, it must have source.
- if changes["architecture"].has_key("source"):
- if not has_source:
- reject("no source found and Architecture line in changes mention source.")
-
- if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
- reject("source only uploads are not supported.")
-
-###############################################################################
-
-def check_dsc():
- global reprocess
-
- # Ensure there is source to check
- if not changes["architecture"].has_key("source"):
- return 1
-
- # Find the .dsc
- dsc_filename = None
- for file in files.keys():
- if files[file]["type"] == "dsc":
- if dsc_filename:
- reject("can not process a .changes file with multiple .dsc's.")
- return 0
- else:
- dsc_filename = file
-
- # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
- if not dsc_filename:
- reject("source uploads must contain a dsc file")
- return 0
-
- # Parse the .dsc file
- try:
- dsc.update(daklib.utils.parse_changes(dsc_filename, signing_rules=1))
- except daklib.utils.cant_open_exc:
- # if not -n copy_to_holding() will have done this for us...
- if Options["No-Action"]:
- reject("%s: can't read file." % (dsc_filename))
- except daklib.utils.changes_parse_error_exc, line:
- reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
- except daklib.utils.invalid_dsc_format_exc, line:
- reject("%s: syntax error on line %s." % (dsc_filename, line))
- # Build up the file list of files mentioned by the .dsc
- try:
- dsc_files.update(daklib.utils.build_file_list(dsc, is_a_dsc=1))
- except daklib.utils.no_files_exc:
- reject("%s: no Files: field." % (dsc_filename))
- return 0
- except daklib.utils.changes_parse_error_exc, line:
- reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
- return 0
-
- # Enforce mandatory fields
- for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
- if not dsc.has_key(i):
- reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
- return 0
-
- # Validate the source and version fields
- if not re_valid_pkg_name.match(dsc["source"]):
- reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
- if not re_valid_version.match(dsc["version"]):
- reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
-
- # Bumping the version number of the .dsc breaks extraction by stable's
- # dpkg-source. So let's not do that...
- if dsc["format"] != "1.0":
- reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
-
- # Validate the Maintainer field
- try:
- daklib.utils.fix_maintainer (dsc["maintainer"])
- except daklib.utils.ParseMaintError, msg:
- reject("%s: Maintainer field ('%s') failed to parse: %s" \
- % (dsc_filename, dsc["maintainer"], msg))
-
- # Validate the build-depends field(s)
- for field_name in [ "build-depends", "build-depends-indep" ]:
- field = dsc.get(field_name)
- if field:
- # Check for broken dpkg-dev lossage...
- if field.startswith("ARRAY"):
- reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
-
- # Have apt try to parse them...
- try:
- apt_pkg.ParseSrcDepends(field)
- except:
- reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
- pass
-
- # Ensure the version number in the .dsc matches the version number in the .changes
- epochless_dsc_version = daklib.utils.re_no_epoch.sub('', dsc["version"])
- changes_version = files[dsc_filename]["version"]
- if epochless_dsc_version != files[dsc_filename]["version"]:
- reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
-
- # Ensure there is a .tar.gz in the .dsc file
- has_tar = 0
- for f in dsc_files.keys():
- m = daklib.utils.re_issource.match(f)
- if not m:
- reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
- continue
- type = m.group(3)
- if type == "orig.tar.gz" or type == "tar.gz":
- has_tar = 1
- if not has_tar:
- reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
-
- # Ensure source is newer than existing source in target suites
- reject(Upload.check_source_against_db(dsc_filename),"")
-
- (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
- reject(reject_msg, "")
- if is_in_incoming:
- if not Options["No-Action"]:
- copy_to_holding(is_in_incoming)
- orig_tar_gz = os.path.basename(is_in_incoming)
- files[orig_tar_gz] = {}
- files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
- files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
- files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
- files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
- files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
- files[orig_tar_gz]["type"] = "orig.tar.gz"
- reprocess = 2
-
- return 1
-
-################################################################################
-
-def get_changelog_versions(source_dir):
- """Extracts a the source package and (optionally) grabs the
- version history out of debian/changelog for the BTS."""
-
- # Find the .dsc (again)
- dsc_filename = None
- for file in files.keys():
- if files[file]["type"] == "dsc":
- dsc_filename = file
-
- # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
- if not dsc_filename:
- return
-
- # Create a symlink mirror of the source files in our temporary directory
- for f in files.keys():
- m = daklib.utils.re_issource.match(f)
- if m:
- src = os.path.join(source_dir, f)
- # If a file is missing for whatever reason, give up.
- if not os.path.exists(src):
- return
- type = m.group(3)
- if type == "orig.tar.gz" and pkg.orig_tar_gz:
- continue
- dest = os.path.join(os.getcwd(), f)
- os.symlink(src, dest)
-
- # If the orig.tar.gz is not a part of the upload, create a symlink to the
- # existing copy.
- if pkg.orig_tar_gz:
- dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
- os.symlink(pkg.orig_tar_gz, dest)
-
- # Extract the source
- cmd = "dpkg-source -sn -x %s" % (dsc_filename)
- (result, output) = commands.getstatusoutput(cmd)
- if (result != 0):
- reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
- reject(daklib.utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
- return
-
- if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
- return
-
- # Get the upstream version
- upstr_version = daklib.utils.re_no_epoch.sub('', dsc["version"])
- if re_strip_revision.search(upstr_version):
- upstr_version = re_strip_revision.sub('', upstr_version)
-
- # Ensure the changelog file exists
- changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
- if not os.path.exists(changelog_filename):
- reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
- return
-
- # Parse the changelog
- dsc["bts changelog"] = ""
- changelog_file = daklib.utils.open_file(changelog_filename)
- for line in changelog_file.readlines():
- m = re_changelog_versions.match(line)
- if m:
- dsc["bts changelog"] += line
- changelog_file.close()
-
- # Check we found at least one revision in the changelog
- if not dsc["bts changelog"]:
- reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
-
-########################################
-
-def check_source():
- # Bail out if:
- # a) there's no source
- # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
- # or c) the orig.tar.gz is MIA
- if not changes["architecture"].has_key("source") or reprocess == 2 \
- or pkg.orig_tar_gz == -1:
- return
-
- # Create a temporary directory to extract the source into
- if Options["No-Action"]:
- tmpdir = tempfile.mktemp()
- else:
- # We're in queue/holding and can create a random directory.
- tmpdir = "%s" % (os.getpid())
- os.mkdir(tmpdir)
-
- # Move into the temporary directory
- cwd = os.getcwd()
- os.chdir(tmpdir)
-
- # Get the changelog version history
- get_changelog_versions(cwd)
-
- # Move back and cleanup the temporary tree
- os.chdir(cwd)
- try:
- shutil.rmtree(tmpdir)
- except OSError, e:
- if errno.errorcode[e.errno] != 'EACCES':
- daklib.utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
-
- reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
- # We probably have u-r or u-w directories so chmod everything
- # and try again.
- cmd = "chmod -R u+rwx %s" % (tmpdir)
- result = os.system(cmd)
- if result != 0:
- daklib.utils.fubar("'%s' failed with result %s." % (cmd, result))
- shutil.rmtree(tmpdir)
- except:
- daklib.utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
-
-################################################################################
-
-# FIXME: should be a debian specific check called from a hook
-
-def check_urgency ():
- if changes["architecture"].has_key("source"):
- if not changes.has_key("urgency"):
- changes["urgency"] = Cnf["Urgency::Default"]
- if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
- reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
- changes["urgency"] = Cnf["Urgency::Default"]
- changes["urgency"] = changes["urgency"].lower()
-
-################################################################################
-
-def check_hashes ():
- # Make sure we recognise the format of the Files: field
- format = changes.get("format", "0.0").split(".",1)
- if len(format) == 2:
- format = int(format[0]), int(format[1])
- else:
- format = int(float(format[0])), 0
-
- check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
- check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
-
- if format >= (1,8):
- hashes = [("sha1", apt_pkg.sha1sum),
- ("sha256", apt_pkg.sha256sum)]
- else:
- hashes = []
-
- for x in changes:
- if x.startswith("checksum-"):
- h = x.split("-",1)[1]
- if h not in dict(hashes):
- reject("Unsupported checksum field in .changes" % (h))
-
- for x in dsc:
- if x.startswith("checksum-"):
- h = x.split("-",1)[1]
- if h not in dict(hashes):
- reject("Unsupported checksum field in .dsc" % (h))
-
- for h,f in hashes:
- try:
- fs = daklib.utils.build_file_list(changes, 0, "checksums-%s" % h, h)
- check_hash(".changes %s" % (h), fs, h, f, files)
- except daklib.utils.no_files_exc:
- reject("No Checksums-%s: field in .changes file" % (h))
-
- if "source" not in changes["architecture"]: continue
-
- try:
- fs = daklib.utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
- check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
- except daklib.utils.no_files_exc:
- reject("No Checksums-%s: field in .changes file" % (h))
-
-################################################################################
-
-def check_hash (where, files, key, testfn, basedict = None):
- if basedict:
- for file in basedict.keys():
- if file not in files:
- reject("%s: no %s checksum" % (file, key))
-
- for file in files.keys():
- if basedict and file not in basedict:
- reject("%s: extraneous entry in %s checksums" % (file, key))
-
- try:
- file_handle = daklib.utils.open_file(file)
- except daklib.utils.cant_open_exc:
- continue
-
- # Check hash
- if testfn(file_handle) != files[file][key]:
- reject("%s: %s check failed." % (file, key))
- file_handle.close()
- # Check size
- actual_size = os.stat(file)[stat.ST_SIZE]
- size = int(files[file]["size"])
- if size != actual_size:
- reject("%s: actual file size (%s) does not match size (%s) in %s"
- % (file, actual_size, size, where))
-
-################################################################################
-
-# Sanity check the time stamps of files inside debs.
-# [Files in the near future cause ugly warnings and extreme time
-# travel can cause errors on extraction]
-
-def check_timestamps():
- class Tar:
- def __init__(self, future_cutoff, past_cutoff):
- self.reset()
- self.future_cutoff = future_cutoff
- self.past_cutoff = past_cutoff
-
- def reset(self):
- self.future_files = {}
- self.ancient_files = {}
-
- def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
- if MTime > self.future_cutoff:
- self.future_files[Name] = MTime
- if MTime < self.past_cutoff:
- self.ancient_files[Name] = MTime
- ####
-
- future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
- past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
- tar = Tar(future_cutoff, past_cutoff)
- for filename in files.keys():
- if files[filename]["type"] == "deb":
- tar.reset()
- try:
- deb_file = daklib.utils.open_file(filename)
- apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
- deb_file.seek(0)
- try:
- apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
- except SystemError, e:
- # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
- if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
- raise
- deb_file.seek(0)
- apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
- deb_file.close()
- #
- future_files = tar.future_files.keys()
- if future_files:
- num_future_files = len(future_files)
- future_file = future_files[0]
- future_date = tar.future_files[future_file]
- reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
- % (filename, num_future_files, future_file,
- time.ctime(future_date)))
- #
- ancient_files = tar.ancient_files.keys()
- if ancient_files:
- num_ancient_files = len(ancient_files)
- ancient_file = ancient_files[0]
- ancient_date = tar.ancient_files[ancient_file]
- reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
- % (filename, num_ancient_files, ancient_file,
- time.ctime(ancient_date)))
- except:
- reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
-
-################################################################################
-
-def lookup_uid_from_fingerprint(fpr):
- q = Upload.projectB.query("SELECT u.uid, u.name FROM fingerprint f, uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
- qs = q.getresult()
- if len(qs) == 0:
- return (None, None)
- else:
- return qs[0]
-
-def check_signed_by_key():
- """Ensure the .changes is signed by an authorized uploader."""
-
- (uid, uid_name) = lookup_uid_from_fingerprint(changes["fingerprint"])
- if uid_name == None:
- uid_name = ""
-
- # match claimed name with actual name:
- if uid == None:
- uid, uid_email = changes["fingerprint"], uid
- may_nmu, may_sponsor = 1, 1
- # XXX by default new dds don't have a fingerprint/uid in the db atm,
- # and can't get one in there if we don't allow nmu/sponsorship
- elif uid[:3] == "dm:":
- uid_email = uid[3:]
- may_nmu, may_sponsor = 0, 0
- else:
- uid_email = "%s@debian.org" % (uid)
- may_nmu, may_sponsor = 1, 1
-
- if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
- sponsored = 0
- elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
- sponsored = 0
- if uid_name == "": sponsored = 1
- else:
- sponsored = 1
-
- if sponsored and not may_sponsor:
- reject("%s is not authorised to sponsor uploads" % (uid))
-
- if not sponsored and not may_nmu:
- source_ids = []
- check_suites = changes["distribution"].keys()
- if "unstable" not in check_suites: check_suites.append("unstable")
- for suite in check_suites:
- suite_id = daklib.database.get_suite_id(suite)
- q = Upload.projectB.query("SELECT s.id FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND sa.suite = %d" % (changes["source"], suite_id))
- for si in q.getresult():
- if si[0] not in source_ids: source_ids.append(si[0])
-
- print "source_ids: %s" % (",".join([str(x) for x in source_ids]))
-
- is_nmu = 1
- for si in source_ids:
- is_nmu = 1
- q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT maintainer FROM src_uploaders WHERE src_uploaders.source = %s)" % (si))
- for m in q.getresult():
- (rfc822, rfc2047, name, email) = daklib.utils.fix_maintainer(m[0])
- if email == uid_email or name == uid_name:
- is_nmu=0
- break
- if is_nmu:
- reject("%s may not upload/NMU source package %s" % (uid, changes["source"]))
-
- for b in changes["binary"].keys():
- for suite in changes["distribution"].keys():
- suite_id = daklib.database.get_suite_id(suite)
- q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id))
- for s in q.getresult():
- if s[0] != changes["source"]:
- reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
- for file in files.keys():
- if files[file].has_key("byhand"):
- reject("%s may not upload BYHAND file %s" % (uid, file))
- if files[file].has_key("new"):
- reject("%s may not upload NEW file %s" % (uid, file))
-
- # The remaining checks only apply to binary-only uploads right now
- if changes["architecture"].has_key("source"):
- return
-
- if not Cnf.Exists("Binary-Upload-Restrictions"):
- return
-
- restrictions = Cnf.SubTree("Binary-Upload-Restrictions")
-
- # If the restrictions only apply to certain components make sure
- # that the upload is actual targeted there.
- if restrictions.Exists("Components"):
- restricted_components = restrictions.SubTree("Components").ValueList()
- is_restricted = False
- for file in files:
- if files[file]["component"] in restricted_components:
- is_restricted = True
- break
- if not is_restricted:
- return
-
- # Assuming binary only upload restrictions are in place we then
- # iterate over suite and architecture checking the key is in the
- # allowed list. If no allowed list exists for a given suite or
- # architecture it's assumed to be open to anyone.
- for suite in changes["distribution"].keys():
- if not restrictions.Exists(suite):
- continue
- for arch in changes["architecture"].keys():
- if not restrictions.SubTree(suite).Exists(arch):
- continue
- allowed_keys = restrictions.SubTree("%s::%s" % (suite, arch)).ValueList()
- if changes["fingerprint"] not in allowed_keys:
- base_filename = os.path.basename(pkg.changes_file)
- reject("%s: not signed by authorised uploader for %s/%s"
- % (base_filename, suite, arch))
-
-################################################################################
-################################################################################
-
-# If any file of an upload has a recent mtime then chances are good
-# the file is still being uploaded.
-
-def upload_too_new():
- too_new = 0
- # Move back to the original directory to get accurate time stamps
- cwd = os.getcwd()
- os.chdir(pkg.directory)
- file_list = pkg.files.keys()
- file_list.extend(pkg.dsc_files.keys())
- file_list.append(pkg.changes_file)
- for file in file_list:
- try:
- last_modified = time.time()-os.path.getmtime(file)
- if last_modified < int(Cnf["Dinstall::SkipTime"]):
- too_new = 1
- break
- except:
- pass
- os.chdir(cwd)
- return too_new
-
-################################################################################
-
-def action ():