2 # Generate two rss feeds for a directory with .changes file
4 # License: GPL v2 or later
5 # Author: Filippo Giunchedi <filippo@debian.org>
15 from optparse import OptionParser
16 from datetime import datetime
17 from email.utils import parseaddr
21 from debian.deb822 import Changes
23 inrss_filename = "NEW_in.rss"
24 outrss_filename = "NEW_out.rss"
25 db_filename = "status.db"
27 parser = OptionParser()
28 parser.set_defaults(queuedir="queue", outdir="out", datadir="status",
29 logdir="log", max_entries="30")
31 parser.add_option("-q", "--queuedir", dest="queuedir",
32 help="The queue dir (%default)")
33 parser.add_option("-o", "--outdir", dest="outdir",
34 help="The output directory (%default)")
35 parser.add_option("-d", "--datadir", dest="datadir",
36 help="The data dir (%default)")
37 parser.add_option("-l", "--logdir", dest="logdir",
38 help="The ACCEPT/REJECT dak log dir (%default)")
39 parser.add_option("-m", "--max-entries", dest="max_entries", type="int",
40 help="Max number of entries to keep (%default)")
44 self.feed_in = PyRSS2Gen.RSS2(
45 title = "Packages entering NEW",
46 link = "https://ftp-master.debian.org/new.html",
47 description = "Debian packages entering the NEW queue" )
49 self.feed_out = PyRSS2Gen.RSS2(
50 title = "Packages leaving NEW",
51 link = "https://ftp-master.debian.org/new.html",
52 description = "Debian packages leaving the NEW queue" )
56 def purge_old_items(feed, max):
57 """ Purge RSSItem from feed, no more than max. """
58 if feed.items is None or len(feed.items) == 0:
61 feed.items = feed.items[:max]
64 def parse_changes(fname):
65 """ Parse a .changes file named fname.
67 Return {fname: parsed} """
69 m = Changes(open(fname))
71 wanted_fields = set(['Source', 'Version', 'Architecture', 'Distribution',
72 'Date', 'Maintainer', 'Description', 'Changes'])
74 if not set(m.keys()).issuperset(wanted_fields):
77 return {os.path.basename(fname): m}
79 def parse_queuedir(dir):
80 """ Parse dir for .changes files.
82 Return a dictionary {filename: parsed_file}"""
84 if not os.path.exists(dir):
88 for fname in os.listdir(dir):
89 if not fname.endswith(".changes"):
92 parsed = parse_changes(os.path.join(dir, fname))
98 def parse_leave_reason(fname):
99 """ Parse a dak log file fname for ACCEPT/REJECT reason from process-new.
101 Return a dictionary {filename: reason}"""
103 reason_re = re.compile(".+\|process-new\|.+\|NEW (ACCEPT|REJECT): (\S+)")
108 sys.stderr.write("Can't open %s: %s\n" % (fname, e))
112 for l in f.readlines():
113 m = reason_re.search(l)
115 res[m.group(2)] = m.group(1)
120 def add_rss_item(status, msg, direction):
121 if direction == "in":
122 feed = status.feed_in
123 title = "%s %s entered NEW" % (msg['Source'], msg['Version'])
124 pubdate = msg['Date']
125 elif direction == "out":
126 feed = status.feed_out
127 if msg.has_key('Leave-Reason'):
128 title = "%s %s left NEW (%s)" % (msg['Source'], msg['Version'],
131 title = "%s %s left NEW" % (msg['Source'], msg['Version'])
134 pubdate = datetime.utcnow()
138 description = "<pre>Description: %s\nChanges: %s\n</pre>" % \
139 (cgi.escape(msg['Description']),
140 cgi.escape(msg['Changes']))
142 link = "https://ftp-master.debian.org/new/%s_%s.html" % \
143 (msg['Source'], msg['Version'])
145 maintainer = parseaddr(msg['Maintainer'])
146 author = "%s (%s)" % (maintainer[1], maintainer[0])
152 description = description,
153 author = cgi.escape(author),
159 def update_feeds(curqueue, status, settings):
160 # inrss -> append all items in curqueue not in status.queue
161 # outrss -> append all items in status.queue not in curqueue
164 # logfile from dak's process-new
165 reason_log = os.path.join(settings.logdir, time.strftime("%Y-%m"))
167 for (name, parsed) in curqueue.items():
168 if not status.queue.has_key(name):
170 add_rss_item(status, parsed, "in")
172 for (name, parsed) in status.queue.items():
173 if not curqueue.has_key(name):
174 # removed package, try to find out why
175 if leave_reason is None:
176 leave_reason = parse_leave_reason(reason_log)
177 if leave_reason and leave_reason.has_key(name):
178 parsed['Leave-Reason'] = leave_reason[name]
179 add_rss_item(status, parsed, "out")
183 if __name__ == "__main__":
185 (settings, args) = parser.parse_args()
187 if not os.path.exists(settings.outdir):
188 sys.stderr.write("Outdir '%s' does not exists\n" % settings.outdir)
192 if not os.path.exists(settings.datadir):
193 sys.stderr.write("Datadir '%s' does not exists\n" % settings.datadir)
197 status_db = os.path.join(settings.datadir, db_filename)
200 status = cPickle.load(open(status_db))
204 current_queue = parse_queuedir(settings.queuedir)
206 update_feeds(current_queue, status, settings)
208 purge_old_items(status.feed_in, settings.max_entries)
209 purge_old_items(status.feed_out, settings.max_entries)
211 feed_in_file = os.path.join(settings.outdir, inrss_filename)
212 feed_out_file = os.path.join(settings.outdir, outrss_filename)
215 status.feed_in.write_xml(file(feed_in_file, "w+"), "utf-8")
216 status.feed_out.write_xml(file(feed_out_file, "w+"), "utf-8")
217 except IOError as why:
218 sys.stderr.write("Unable to write feeds: %s\n", why)
221 status.queue = current_queue
224 cPickle.dump(status, open(status_db, "w+"))
225 except IOError as why:
226 sys.stderr.write("Unable to save status: %s\n", why)