Show More
@@ -0,0 +1,21 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
2 | ||
|
3 | # Copyright (C) 2016-2019 RhodeCode GmbH | |
|
4 | # | |
|
5 | # This program is free software: you can redistribute it and/or modify | |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
|
7 | # (only), as published by the Free Software Foundation. | |
|
8 | # | |
|
9 | # This program is distributed in the hope that it will be useful, | |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
|
12 | # GNU General Public License for more details. | |
|
13 | # | |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
|
16 | # | |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
|
20 | ||
|
21 | from feedgenerator import Rss201rev2Feed, Atom1Feed No newline at end of file |
@@ -0,0 +1,117 b'' | |||
|
1 | # Copyright (c) Django Software Foundation and individual contributors. | |
|
2 | # All rights reserved. | |
|
3 | # | |
|
4 | # Redistribution and use in source and binary forms, with or without modification, | |
|
5 | # are permitted provided that the following conditions are met: | |
|
6 | # | |
|
7 | # 1. Redistributions of source code must retain the above copyright notice, | |
|
8 | # this list of conditions and the following disclaimer. | |
|
9 | # | |
|
10 | # 2. Redistributions in binary form must reproduce the above copyright | |
|
11 | # notice, this list of conditions and the following disclaimer in the | |
|
12 | # documentation and/or other materials provided with the distribution. | |
|
13 | # | |
|
14 | # 3. Neither the name of Django nor the names of its contributors may be used | |
|
15 | # to endorse or promote products derived from this software without | |
|
16 | # specific prior written permission. | |
|
17 | # | |
|
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | |
|
19 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |
|
20 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |
|
21 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR | |
|
22 | # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
|
23 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |
|
24 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |
|
25 | # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
|
26 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |
|
27 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
|
28 | ||
|
29 | # Python's datetime strftime doesn't handle dates before 1900. | |
|
30 | # These classes override date and datetime to support the formatting of a date | |
|
31 | # through its full "proleptic Gregorian" date range. | |
|
32 | # | |
|
33 | # Based on code submitted to comp.lang.python by Andrew Dalke | |
|
34 | # | |
|
35 | # >>> datetime_safe.date(1850, 8, 2).strftime("%Y/%m/%d was a %A") | |
|
36 | # '1850/08/02 was a Friday' | |
|
37 | ||
|
38 | from datetime import date as real_date, datetime as real_datetime | |
|
39 | import re | |
|
40 | import time | |
|
41 | ||
|
42 | class date(real_date): | |
|
43 | def strftime(self, fmt): | |
|
44 | return strftime(self, fmt) | |
|
45 | ||
|
46 | class datetime(real_datetime): | |
|
47 | def strftime(self, fmt): | |
|
48 | return strftime(self, fmt) | |
|
49 | ||
|
50 | def combine(self, date, time): | |
|
51 | return datetime(date.year, date.month, date.day, time.hour, time.minute, time.microsecond, time.tzinfo) | |
|
52 | ||
|
53 | def date(self): | |
|
54 | return date(self.year, self.month, self.day) | |
|
55 | ||
|
56 | def new_date(d): | |
|
57 | "Generate a safe date from a datetime.date object." | |
|
58 | return date(d.year, d.month, d.day) | |
|
59 | ||
|
60 | def new_datetime(d): | |
|
61 | """ | |
|
62 | Generate a safe datetime from a datetime.date or datetime.datetime object. | |
|
63 | """ | |
|
64 | kw = [d.year, d.month, d.day] | |
|
65 | if isinstance(d, real_datetime): | |
|
66 | kw.extend([d.hour, d.minute, d.second, d.microsecond, d.tzinfo]) | |
|
67 | return datetime(*kw) | |
|
68 | ||
|
69 | # This library does not support strftime's "%s" or "%y" format strings. | |
|
70 | # Allowed if there's an even number of "%"s because they are escaped. | |
|
71 | _illegal_formatting = re.compile(r"((^|[^%])(%%)*%[sy])") | |
|
72 | ||
|
73 | def _findall(text, substr): | |
|
74 | # Also finds overlaps | |
|
75 | sites = [] | |
|
76 | i = 0 | |
|
77 | while 1: | |
|
78 | j = text.find(substr, i) | |
|
79 | if j == -1: | |
|
80 | break | |
|
81 | sites.append(j) | |
|
82 | i=j+1 | |
|
83 | return sites | |
|
84 | ||
|
85 | def strftime(dt, fmt): | |
|
86 | if dt.year >= 1900: | |
|
87 | return super(type(dt), dt).strftime(fmt) | |
|
88 | illegal_formatting = _illegal_formatting.search(fmt) | |
|
89 | if illegal_formatting: | |
|
90 | raise TypeError("strftime of dates before 1900 does not handle" + illegal_formatting.group(0)) | |
|
91 | ||
|
92 | year = dt.year | |
|
93 | # For every non-leap year century, advance by | |
|
94 | # 6 years to get into the 28-year repeat cycle | |
|
95 | delta = 2000 - year | |
|
96 | off = 6 * (delta // 100 + delta // 400) | |
|
97 | year = year + off | |
|
98 | ||
|
99 | # Move to around the year 2000 | |
|
100 | year = year + ((2000 - year) // 28) * 28 | |
|
101 | timetuple = dt.timetuple() | |
|
102 | s1 = time.strftime(fmt, (year,) + timetuple[1:]) | |
|
103 | sites1 = _findall(s1, str(year)) | |
|
104 | ||
|
105 | s2 = time.strftime(fmt, (year+28,) + timetuple[1:]) | |
|
106 | sites2 = _findall(s2, str(year+28)) | |
|
107 | ||
|
108 | sites = [] | |
|
109 | for site in sites1: | |
|
110 | if site in sites2: | |
|
111 | sites.append(site) | |
|
112 | ||
|
113 | s = s1 | |
|
114 | syear = "%04d" % (dt.year,) | |
|
115 | for site in sites: | |
|
116 | s = s[:site] + syear + s[site+4:] | |
|
117 | return s |
@@ -0,0 +1,444 b'' | |||
|
1 | # Copyright (c) Django Software Foundation and individual contributors. | |
|
2 | # All rights reserved. | |
|
3 | # | |
|
4 | # Redistribution and use in source and binary forms, with or without modification, | |
|
5 | # are permitted provided that the following conditions are met: | |
|
6 | # | |
|
7 | # 1. Redistributions of source code must retain the above copyright notice, | |
|
8 | # this list of conditions and the following disclaimer. | |
|
9 | # | |
|
10 | # 2. Redistributions in binary form must reproduce the above copyright | |
|
11 | # notice, this list of conditions and the following disclaimer in the | |
|
12 | # documentation and/or other materials provided with the distribution. | |
|
13 | # | |
|
14 | # 3. Neither the name of Django nor the names of its contributors may be used | |
|
15 | # to endorse or promote products derived from this software without | |
|
16 | # specific prior written permission. | |
|
17 | # | |
|
18 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | |
|
19 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |
|
20 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |
|
21 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR | |
|
22 | # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |
|
23 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |
|
24 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |
|
25 | # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
|
26 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |
|
27 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
|
28 | ||
|
29 | """ | |
|
30 | For definitions of the different versions of RSS, see: | |
|
31 | http://web.archive.org/web/20110718035220/http://diveintomark.org/archives/2004/02/04/incompatible-rss | |
|
32 | """ | |
|
33 | from __future__ import unicode_literals | |
|
34 | ||
|
35 | import datetime | |
|
36 | from StringIO import StringIO | |
|
37 | from six.moves.urllib import parse as urlparse | |
|
38 | ||
|
39 | from rhodecode.lib.feedgenerator import datetime_safe | |
|
40 | from rhodecode.lib.feedgenerator.utils import SimplerXMLGenerator, iri_to_uri, force_text | |
|
41 | ||
|
42 | ||
|
43 | #### The following code comes from ``django.utils.feedgenerator`` #### | |
|
44 | ||
|
45 | ||
|
46 | def rfc2822_date(date): | |
|
47 | # We can't use strftime() because it produces locale-dependent results, so | |
|
48 | # we have to map english month and day names manually | |
|
49 | months = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec',) | |
|
50 | days = ('Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun') | |
|
51 | # Support datetime objects older than 1900 | |
|
52 | date = datetime_safe.new_datetime(date) | |
|
53 | # We do this ourselves to be timezone aware, email.Utils is not tz aware. | |
|
54 | dow = days[date.weekday()] | |
|
55 | month = months[date.month - 1] | |
|
56 | time_str = date.strftime('%s, %%d %s %%Y %%H:%%M:%%S ' % (dow, month)) | |
|
57 | ||
|
58 | time_str = time_str.decode('utf-8') | |
|
59 | offset = date.utcoffset() | |
|
60 | # Historically, this function assumes that naive datetimes are in UTC. | |
|
61 | if offset is None: | |
|
62 | return time_str + '-0000' | |
|
63 | else: | |
|
64 | timezone = (offset.days * 24 * 60) + (offset.seconds // 60) | |
|
65 | hour, minute = divmod(timezone, 60) | |
|
66 | return time_str + '%+03d%02d' % (hour, minute) | |
|
67 | ||
|
68 | ||
|
69 | def rfc3339_date(date): | |
|
70 | # Support datetime objects older than 1900 | |
|
71 | date = datetime_safe.new_datetime(date) | |
|
72 | time_str = date.strftime('%Y-%m-%dT%H:%M:%S') | |
|
73 | ||
|
74 | time_str = time_str.decode('utf-8') | |
|
75 | offset = date.utcoffset() | |
|
76 | # Historically, this function assumes that naive datetimes are in UTC. | |
|
77 | if offset is None: | |
|
78 | return time_str + 'Z' | |
|
79 | else: | |
|
80 | timezone = (offset.days * 24 * 60) + (offset.seconds // 60) | |
|
81 | hour, minute = divmod(timezone, 60) | |
|
82 | return time_str + '%+03d:%02d' % (hour, minute) | |
|
83 | ||
|
84 | ||
|
85 | def get_tag_uri(url, date): | |
|
86 | """ | |
|
87 | Creates a TagURI. | |
|
88 | ||
|
89 | See http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id | |
|
90 | """ | |
|
91 | bits = urlparse(url) | |
|
92 | d = '' | |
|
93 | if date is not None: | |
|
94 | d = ',%s' % datetime_safe.new_datetime(date).strftime('%Y-%m-%d') | |
|
95 | return 'tag:%s%s:%s/%s' % (bits.hostname, d, bits.path, bits.fragment) | |
|
96 | ||
|
97 | ||
|
98 | class SyndicationFeed(object): | |
|
99 | """Base class for all syndication feeds. Subclasses should provide write()""" | |
|
100 | ||
|
101 | def __init__(self, title, link, description, language=None, author_email=None, | |
|
102 | author_name=None, author_link=None, subtitle=None, categories=None, | |
|
103 | feed_url=None, feed_copyright=None, feed_guid=None, ttl=None, **kwargs): | |
|
104 | def to_unicode(s): | |
|
105 | return force_text(s, strings_only=True) | |
|
106 | if categories: | |
|
107 | categories = [force_text(c) for c in categories] | |
|
108 | if ttl is not None: | |
|
109 | # Force ints to unicode | |
|
110 | ttl = force_text(ttl) | |
|
111 | self.feed = { | |
|
112 | 'title': to_unicode(title), | |
|
113 | 'link': iri_to_uri(link), | |
|
114 | 'description': to_unicode(description), | |
|
115 | 'language': to_unicode(language), | |
|
116 | 'author_email': to_unicode(author_email), | |
|
117 | 'author_name': to_unicode(author_name), | |
|
118 | 'author_link': iri_to_uri(author_link), | |
|
119 | 'subtitle': to_unicode(subtitle), | |
|
120 | 'categories': categories or (), | |
|
121 | 'feed_url': iri_to_uri(feed_url), | |
|
122 | 'feed_copyright': to_unicode(feed_copyright), | |
|
123 | 'id': feed_guid or link, | |
|
124 | 'ttl': ttl, | |
|
125 | } | |
|
126 | self.feed.update(kwargs) | |
|
127 | self.items = [] | |
|
128 | ||
|
129 | def add_item(self, title, link, description, author_email=None, | |
|
130 | author_name=None, author_link=None, pubdate=None, comments=None, | |
|
131 | unique_id=None, unique_id_is_permalink=None, enclosure=None, | |
|
132 | categories=(), item_copyright=None, ttl=None, updateddate=None, | |
|
133 | enclosures=None, **kwargs): | |
|
134 | """ | |
|
135 | Adds an item to the feed. All args are expected to be Python Unicode | |
|
136 | objects except pubdate and updateddate, which are datetime.datetime | |
|
137 | objects, and enclosures, which is an iterable of instances of the | |
|
138 | Enclosure class. | |
|
139 | """ | |
|
140 | def to_unicode(s): | |
|
141 | return force_text(s, strings_only=True) | |
|
142 | if categories: | |
|
143 | categories = [to_unicode(c) for c in categories] | |
|
144 | if ttl is not None: | |
|
145 | # Force ints to unicode | |
|
146 | ttl = force_text(ttl) | |
|
147 | if enclosure is None: | |
|
148 | enclosures = [] if enclosures is None else enclosures | |
|
149 | ||
|
150 | item = { | |
|
151 | 'title': to_unicode(title), | |
|
152 | 'link': iri_to_uri(link), | |
|
153 | 'description': to_unicode(description), | |
|
154 | 'author_email': to_unicode(author_email), | |
|
155 | 'author_name': to_unicode(author_name), | |
|
156 | 'author_link': iri_to_uri(author_link), | |
|
157 | 'pubdate': pubdate, | |
|
158 | 'updateddate': updateddate, | |
|
159 | 'comments': to_unicode(comments), | |
|
160 | 'unique_id': to_unicode(unique_id), | |
|
161 | 'unique_id_is_permalink': unique_id_is_permalink, | |
|
162 | 'enclosures': enclosures, | |
|
163 | 'categories': categories or (), | |
|
164 | 'item_copyright': to_unicode(item_copyright), | |
|
165 | 'ttl': ttl, | |
|
166 | } | |
|
167 | item.update(kwargs) | |
|
168 | self.items.append(item) | |
|
169 | ||
|
170 | def num_items(self): | |
|
171 | return len(self.items) | |
|
172 | ||
|
173 | def root_attributes(self): | |
|
174 | """ | |
|
175 | Return extra attributes to place on the root (i.e. feed/channel) element. | |
|
176 | Called from write(). | |
|
177 | """ | |
|
178 | return {} | |
|
179 | ||
|
180 | def add_root_elements(self, handler): | |
|
181 | """ | |
|
182 | Add elements in the root (i.e. feed/channel) element. Called | |
|
183 | from write(). | |
|
184 | """ | |
|
185 | pass | |
|
186 | ||
|
187 | def item_attributes(self, item): | |
|
188 | """ | |
|
189 | Return extra attributes to place on each item (i.e. item/entry) element. | |
|
190 | """ | |
|
191 | return {} | |
|
192 | ||
|
193 | def add_item_elements(self, handler, item): | |
|
194 | """ | |
|
195 | Add elements on each item (i.e. item/entry) element. | |
|
196 | """ | |
|
197 | pass | |
|
198 | ||
|
199 | def write(self, outfile, encoding): | |
|
200 | """ | |
|
201 | Outputs the feed in the given encoding to outfile, which is a file-like | |
|
202 | object. Subclasses should override this. | |
|
203 | """ | |
|
204 | raise NotImplementedError('subclasses of SyndicationFeed must provide a write() method') | |
|
205 | ||
|
206 | def writeString(self, encoding): | |
|
207 | """ | |
|
208 | Returns the feed in the given encoding as a string. | |
|
209 | """ | |
|
210 | s = StringIO() | |
|
211 | self.write(s, encoding) | |
|
212 | return s.getvalue() | |
|
213 | ||
|
214 | def latest_post_date(self): | |
|
215 | """ | |
|
216 | Returns the latest item's pubdate or updateddate. If no items | |
|
217 | have either of these attributes this returns the current UTC date/time. | |
|
218 | """ | |
|
219 | latest_date = None | |
|
220 | date_keys = ('updateddate', 'pubdate') | |
|
221 | ||
|
222 | for item in self.items: | |
|
223 | for date_key in date_keys: | |
|
224 | item_date = item.get(date_key) | |
|
225 | if item_date: | |
|
226 | if latest_date is None or item_date > latest_date: | |
|
227 | latest_date = item_date | |
|
228 | ||
|
229 | # datetime.now(tz=utc) is slower, as documented in django.utils.timezone.now | |
|
230 | return latest_date or datetime.datetime.utcnow().replace(tzinfo=utc) | |
|
231 | ||
|
232 | ||
|
233 | class Enclosure(object): | |
|
234 | "Represents an RSS enclosure" | |
|
235 | def __init__(self, url, length, mime_type): | |
|
236 | "All args are expected to be Python Unicode objects" | |
|
237 | self.length, self.mime_type = length, mime_type | |
|
238 | self.url = iri_to_uri(url) | |
|
239 | ||
|
240 | ||
|
241 | class RssFeed(SyndicationFeed): | |
|
242 | content_type = 'application/rss+xml; charset=utf-8' | |
|
243 | ||
|
244 | def write(self, outfile, encoding): | |
|
245 | handler = SimplerXMLGenerator(outfile, encoding) | |
|
246 | handler.startDocument() | |
|
247 | handler.startElement("rss", self.rss_attributes()) | |
|
248 | handler.startElement("channel", self.root_attributes()) | |
|
249 | self.add_root_elements(handler) | |
|
250 | self.write_items(handler) | |
|
251 | self.endChannelElement(handler) | |
|
252 | handler.endElement("rss") | |
|
253 | ||
|
254 | def rss_attributes(self): | |
|
255 | return {"version": self._version, | |
|
256 | "xmlns:atom": "http://www.w3.org/2005/Atom"} | |
|
257 | ||
|
258 | def write_items(self, handler): | |
|
259 | for item in self.items: | |
|
260 | handler.startElement('item', self.item_attributes(item)) | |
|
261 | self.add_item_elements(handler, item) | |
|
262 | handler.endElement("item") | |
|
263 | ||
|
264 | def add_root_elements(self, handler): | |
|
265 | handler.addQuickElement("title", self.feed['title']) | |
|
266 | handler.addQuickElement("link", self.feed['link']) | |
|
267 | handler.addQuickElement("description", self.feed['description']) | |
|
268 | if self.feed['feed_url'] is not None: | |
|
269 | handler.addQuickElement("atom:link", None, {"rel": "self", "href": self.feed['feed_url']}) | |
|
270 | if self.feed['language'] is not None: | |
|
271 | handler.addQuickElement("language", self.feed['language']) | |
|
272 | for cat in self.feed['categories']: | |
|
273 | handler.addQuickElement("category", cat) | |
|
274 | if self.feed['feed_copyright'] is not None: | |
|
275 | handler.addQuickElement("copyright", self.feed['feed_copyright']) | |
|
276 | handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date())) | |
|
277 | if self.feed['ttl'] is not None: | |
|
278 | handler.addQuickElement("ttl", self.feed['ttl']) | |
|
279 | ||
|
280 | def endChannelElement(self, handler): | |
|
281 | handler.endElement("channel") | |
|
282 | ||
|
283 | ||
|
284 | class RssUserland091Feed(RssFeed): | |
|
285 | _version = "0.91" | |
|
286 | ||
|
287 | def add_item_elements(self, handler, item): | |
|
288 | handler.addQuickElement("title", item['title']) | |
|
289 | handler.addQuickElement("link", item['link']) | |
|
290 | if item['description'] is not None: | |
|
291 | handler.addQuickElement("description", item['description']) | |
|
292 | ||
|
293 | ||
|
294 | class Rss201rev2Feed(RssFeed): | |
|
295 | # Spec: http://blogs.law.harvard.edu/tech/rss | |
|
296 | _version = "2.0" | |
|
297 | ||
|
298 | def add_item_elements(self, handler, item): | |
|
299 | handler.addQuickElement("title", item['title']) | |
|
300 | handler.addQuickElement("link", item['link']) | |
|
301 | if item['description'] is not None: | |
|
302 | handler.addQuickElement("description", item['description']) | |
|
303 | ||
|
304 | # Author information. | |
|
305 | if item["author_name"] and item["author_email"]: | |
|
306 | handler.addQuickElement("author", "%s (%s)" % (item['author_email'], item['author_name'])) | |
|
307 | elif item["author_email"]: | |
|
308 | handler.addQuickElement("author", item["author_email"]) | |
|
309 | elif item["author_name"]: | |
|
310 | handler.addQuickElement( | |
|
311 | "dc:creator", item["author_name"], {"xmlns:dc": "http://purl.org/dc/elements/1.1/"} | |
|
312 | ) | |
|
313 | ||
|
314 | if item['pubdate'] is not None: | |
|
315 | handler.addQuickElement("pubDate", rfc2822_date(item['pubdate'])) | |
|
316 | if item['comments'] is not None: | |
|
317 | handler.addQuickElement("comments", item['comments']) | |
|
318 | if item['unique_id'] is not None: | |
|
319 | guid_attrs = {} | |
|
320 | if isinstance(item.get('unique_id_is_permalink'), bool): | |
|
321 | guid_attrs['isPermaLink'] = str(item['unique_id_is_permalink']).lower() | |
|
322 | handler.addQuickElement("guid", item['unique_id'], guid_attrs) | |
|
323 | if item['ttl'] is not None: | |
|
324 | handler.addQuickElement("ttl", item['ttl']) | |
|
325 | ||
|
326 | # Enclosure. | |
|
327 | if item['enclosures']: | |
|
328 | enclosures = list(item['enclosures']) | |
|
329 | if len(enclosures) > 1: | |
|
330 | raise ValueError( | |
|
331 | "RSS feed items may only have one enclosure, see " | |
|
332 | "http://www.rssboard.org/rss-profile#element-channel-item-enclosure" | |
|
333 | ) | |
|
334 | enclosure = enclosures[0] | |
|
335 | handler.addQuickElement('enclosure', '', { | |
|
336 | 'url': enclosure.url, | |
|
337 | 'length': enclosure.length, | |
|
338 | 'type': enclosure.mime_type, | |
|
339 | }) | |
|
340 | ||
|
341 | # Categories. | |
|
342 | for cat in item['categories']: | |
|
343 | handler.addQuickElement("category", cat) | |
|
344 | ||
|
345 | ||
|
346 | class Atom1Feed(SyndicationFeed): | |
|
347 | # Spec: https://tools.ietf.org/html/rfc4287 | |
|
348 | content_type = 'application/atom+xml; charset=utf-8' | |
|
349 | ns = "http://www.w3.org/2005/Atom" | |
|
350 | ||
|
351 | def write(self, outfile, encoding): | |
|
352 | handler = SimplerXMLGenerator(outfile, encoding) | |
|
353 | handler.startDocument() | |
|
354 | handler.startElement('feed', self.root_attributes()) | |
|
355 | self.add_root_elements(handler) | |
|
356 | self.write_items(handler) | |
|
357 | handler.endElement("feed") | |
|
358 | ||
|
359 | def root_attributes(self): | |
|
360 | if self.feed['language'] is not None: | |
|
361 | return {"xmlns": self.ns, "xml:lang": self.feed['language']} | |
|
362 | else: | |
|
363 | return {"xmlns": self.ns} | |
|
364 | ||
|
365 | def add_root_elements(self, handler): | |
|
366 | handler.addQuickElement("title", self.feed['title']) | |
|
367 | handler.addQuickElement("link", "", {"rel": "alternate", "href": self.feed['link']}) | |
|
368 | if self.feed['feed_url'] is not None: | |
|
369 | handler.addQuickElement("link", "", {"rel": "self", "href": self.feed['feed_url']}) | |
|
370 | handler.addQuickElement("id", self.feed['id']) | |
|
371 | handler.addQuickElement("updated", rfc3339_date(self.latest_post_date())) | |
|
372 | if self.feed['author_name'] is not None: | |
|
373 | handler.startElement("author", {}) | |
|
374 | handler.addQuickElement("name", self.feed['author_name']) | |
|
375 | if self.feed['author_email'] is not None: | |
|
376 | handler.addQuickElement("email", self.feed['author_email']) | |
|
377 | if self.feed['author_link'] is not None: | |
|
378 | handler.addQuickElement("uri", self.feed['author_link']) | |
|
379 | handler.endElement("author") | |
|
380 | if self.feed['subtitle'] is not None: | |
|
381 | handler.addQuickElement("subtitle", self.feed['subtitle']) | |
|
382 | for cat in self.feed['categories']: | |
|
383 | handler.addQuickElement("category", "", {"term": cat}) | |
|
384 | if self.feed['feed_copyright'] is not None: | |
|
385 | handler.addQuickElement("rights", self.feed['feed_copyright']) | |
|
386 | ||
|
387 | def write_items(self, handler): | |
|
388 | for item in self.items: | |
|
389 | handler.startElement("entry", self.item_attributes(item)) | |
|
390 | self.add_item_elements(handler, item) | |
|
391 | handler.endElement("entry") | |
|
392 | ||
|
393 | def add_item_elements(self, handler, item): | |
|
394 | handler.addQuickElement("title", item['title']) | |
|
395 | handler.addQuickElement("link", "", {"href": item['link'], "rel": "alternate"}) | |
|
396 | ||
|
397 | if item['pubdate'] is not None: | |
|
398 | handler.addQuickElement('published', rfc3339_date(item['pubdate'])) | |
|
399 | ||
|
400 | if item['updateddate'] is not None: | |
|
401 | handler.addQuickElement('updated', rfc3339_date(item['updateddate'])) | |
|
402 | ||
|
403 | # Author information. | |
|
404 | if item['author_name'] is not None: | |
|
405 | handler.startElement("author", {}) | |
|
406 | handler.addQuickElement("name", item['author_name']) | |
|
407 | if item['author_email'] is not None: | |
|
408 | handler.addQuickElement("email", item['author_email']) | |
|
409 | if item['author_link'] is not None: | |
|
410 | handler.addQuickElement("uri", item['author_link']) | |
|
411 | handler.endElement("author") | |
|
412 | ||
|
413 | # Unique ID. | |
|
414 | if item['unique_id'] is not None: | |
|
415 | unique_id = item['unique_id'] | |
|
416 | else: | |
|
417 | unique_id = get_tag_uri(item['link'], item['pubdate']) | |
|
418 | handler.addQuickElement("id", unique_id) | |
|
419 | ||
|
420 | # Summary. | |
|
421 | if item['description'] is not None: | |
|
422 | handler.addQuickElement("summary", item['description'], {"type": "html"}) | |
|
423 | ||
|
424 | # Enclosures. | |
|
425 | for enclosure in item['enclosures']: | |
|
426 | handler.addQuickElement('link', '', { | |
|
427 | 'rel': 'enclosure', | |
|
428 | 'href': enclosure.url, | |
|
429 | 'length': enclosure.length, | |
|
430 | 'type': enclosure.mime_type, | |
|
431 | }) | |
|
432 | ||
|
433 | # Categories. | |
|
434 | for cat in item['categories']: | |
|
435 | handler.addQuickElement("category", "", {"term": cat}) | |
|
436 | ||
|
437 | # Rights. | |
|
438 | if item['item_copyright'] is not None: | |
|
439 | handler.addQuickElement("rights", item['item_copyright']) | |
|
440 | ||
|
441 | ||
|
442 | # This isolates the decision of what the system default is, so calling code can | |
|
443 | # do "feedgenerator.DefaultFeed" instead of "feedgenerator.Rss201rev2Feed". | |
|
444 | DefaultFeed = Rss201rev2Feed No newline at end of file |
@@ -0,0 +1,57 b'' | |||
|
1 | """ | |
|
2 | Utilities for XML generation/parsing. | |
|
3 | """ | |
|
4 | ||
|
5 | import six | |
|
6 | ||
|
7 | from xml.sax.saxutils import XMLGenerator, quoteattr | |
|
8 | from urllib import quote | |
|
9 | from rhodecode.lib.utils import safe_str, safe_unicode | |
|
10 | ||
|
11 | ||
|
12 | class SimplerXMLGenerator(XMLGenerator): | |
|
13 | def addQuickElement(self, name, contents=None, attrs=None): | |
|
14 | "Convenience method for adding an element with no children" | |
|
15 | if attrs is None: | |
|
16 | attrs = {} | |
|
17 | self.startElement(name, attrs) | |
|
18 | if contents is not None: | |
|
19 | self.characters(contents) | |
|
20 | self.endElement(name) | |
|
21 | ||
|
22 | def startElement(self, name, attrs): | |
|
23 | self._write('<' + name) | |
|
24 | # sort attributes for consistent output | |
|
25 | for (name, value) in sorted(attrs.items()): | |
|
26 | self._write(' %s=%s' % (name, quoteattr(value))) | |
|
27 | self._write(six.u('>')) | |
|
28 | ||
|
29 | ||
|
30 | def iri_to_uri(iri): | |
|
31 | """ | |
|
32 | Convert an Internationalized Resource Identifier (IRI) portion to a URI | |
|
33 | portion that is suitable for inclusion in a URL. | |
|
34 | This is the algorithm from section 3.1 of RFC 3987. However, since we are | |
|
35 | assuming input is either UTF-8 or unicode already, we can simplify things a | |
|
36 | little from the full method. | |
|
37 | Returns an ASCII string containing the encoded result. | |
|
38 | """ | |
|
39 | # The list of safe characters here is constructed from the "reserved" and | |
|
40 | # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: | |
|
41 | # reserved = gen-delims / sub-delims | |
|
42 | # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" | |
|
43 | # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" | |
|
44 | # / "*" / "+" / "," / ";" / "=" | |
|
45 | # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" | |
|
46 | # Of the unreserved characters, urllib.quote already considers all but | |
|
47 | # the ~ safe. | |
|
48 | # The % character is also added to the list of safe characters here, as the | |
|
49 | # end of section 3.1 of RFC 3987 specifically mentions that % must not be | |
|
50 | # converted. | |
|
51 | if iri is None: | |
|
52 | return iri | |
|
53 | return quote(safe_str(iri), safe=b"/#%[]=:;$&()+,!?*@'~") | |
|
54 | ||
|
55 | ||
|
56 | def force_text(text, strings_only=False): | |
|
57 | return safe_unicode(text) |
@@ -1,388 +1,389 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import logging |
|
23 | 23 | import itertools |
|
24 | 24 | |
|
25 | from webhelpers.feedgenerator import Atom1Feed, Rss201rev2Feed | |
|
25 | ||
|
26 | 26 | |
|
27 | 27 | from pyramid.view import view_config |
|
28 | 28 | from pyramid.httpexceptions import HTTPBadRequest |
|
29 | 29 | from pyramid.response import Response |
|
30 | 30 | from pyramid.renderers import render |
|
31 | 31 | |
|
32 | 32 | from rhodecode.apps._base import BaseAppView |
|
33 | 33 | from rhodecode.model.db import ( |
|
34 | 34 | or_, joinedload, Repository, UserLog, UserFollowing, User, UserApiKeys) |
|
35 | 35 | from rhodecode.model.meta import Session |
|
36 | 36 | import rhodecode.lib.helpers as h |
|
37 | 37 | from rhodecode.lib.helpers import SqlPage |
|
38 | 38 | from rhodecode.lib.user_log_filter import user_log_filter |
|
39 | 39 | from rhodecode.lib.auth import LoginRequired, NotAnonymous, CSRFRequired, HasRepoPermissionAny |
|
40 | 40 | from rhodecode.lib.utils2 import safe_int, AttributeDict, md5_safe |
|
41 | from rhodecode.lib.feedgenerator.feedgenerator import Atom1Feed, Rss201rev2Feed | |
|
41 | 42 | from rhodecode.model.scm import ScmModel |
|
42 | 43 | |
|
43 | 44 | log = logging.getLogger(__name__) |
|
44 | 45 | |
|
45 | 46 | |
|
46 | 47 | class JournalView(BaseAppView): |
|
47 | 48 | |
|
48 | 49 | def load_default_context(self): |
|
49 | 50 | c = self._get_local_tmpl_context(include_app_defaults=True) |
|
50 | 51 | |
|
51 | 52 | self._load_defaults(c.rhodecode_name) |
|
52 | 53 | |
|
53 | 54 | # TODO(marcink): what is this, why we need a global register ? |
|
54 | 55 | c.search_term = self.request.GET.get('filter') or '' |
|
55 | 56 | return c |
|
56 | 57 | |
|
57 | 58 | def _get_config(self, rhodecode_name): |
|
58 | 59 | import rhodecode |
|
59 | 60 | config = rhodecode.CONFIG |
|
60 | 61 | |
|
61 | 62 | return { |
|
62 | 63 | 'language': 'en-us', |
|
63 | 64 | 'feed_ttl': '5', # TTL of feed, |
|
64 | 65 | 'feed_items_per_page': |
|
65 | 66 | safe_int(config.get('rss_items_per_page', 20)), |
|
66 | 67 | 'rhodecode_name': rhodecode_name |
|
67 | 68 | } |
|
68 | 69 | |
|
69 | 70 | def _load_defaults(self, rhodecode_name): |
|
70 | 71 | config = self._get_config(rhodecode_name) |
|
71 | 72 | # common values for feeds |
|
72 | 73 | self.language = config["language"] |
|
73 | 74 | self.ttl = config["feed_ttl"] |
|
74 | 75 | self.feed_items_per_page = config['feed_items_per_page'] |
|
75 | 76 | self.rhodecode_name = config['rhodecode_name'] |
|
76 | 77 | |
|
77 | 78 | def _get_daily_aggregate(self, journal): |
|
78 | 79 | groups = [] |
|
79 | 80 | for k, g in itertools.groupby(journal, lambda x: x.action_as_day): |
|
80 | 81 | user_group = [] |
|
81 | 82 | # groupby username if it's a present value, else |
|
82 | 83 | # fallback to journal username |
|
83 | 84 | for _, g2 in itertools.groupby( |
|
84 | 85 | list(g), lambda x: x.user.username if x.user else x.username): |
|
85 | 86 | l = list(g2) |
|
86 | 87 | user_group.append((l[0].user, l)) |
|
87 | 88 | |
|
88 | 89 | groups.append((k, user_group,)) |
|
89 | 90 | |
|
90 | 91 | return groups |
|
91 | 92 | |
|
92 | 93 | def _get_journal_data(self, following_repos, search_term): |
|
93 | 94 | repo_ids = [x.follows_repository.repo_id for x in following_repos |
|
94 | 95 | if x.follows_repository is not None] |
|
95 | 96 | user_ids = [x.follows_user.user_id for x in following_repos |
|
96 | 97 | if x.follows_user is not None] |
|
97 | 98 | |
|
98 | 99 | filtering_criterion = None |
|
99 | 100 | |
|
100 | 101 | if repo_ids and user_ids: |
|
101 | 102 | filtering_criterion = or_(UserLog.repository_id.in_(repo_ids), |
|
102 | 103 | UserLog.user_id.in_(user_ids)) |
|
103 | 104 | if repo_ids and not user_ids: |
|
104 | 105 | filtering_criterion = UserLog.repository_id.in_(repo_ids) |
|
105 | 106 | if not repo_ids and user_ids: |
|
106 | 107 | filtering_criterion = UserLog.user_id.in_(user_ids) |
|
107 | 108 | if filtering_criterion is not None: |
|
108 | 109 | journal = Session().query(UserLog)\ |
|
109 | 110 | .options(joinedload(UserLog.user))\ |
|
110 | 111 | .options(joinedload(UserLog.repository)) |
|
111 | 112 | # filter |
|
112 | 113 | try: |
|
113 | 114 | journal = user_log_filter(journal, search_term) |
|
114 | 115 | except Exception: |
|
115 | 116 | # we want this to crash for now |
|
116 | 117 | raise |
|
117 | 118 | journal = journal.filter(filtering_criterion)\ |
|
118 | 119 | .order_by(UserLog.action_date.desc()) |
|
119 | 120 | else: |
|
120 | 121 | journal = [] |
|
121 | 122 | |
|
122 | 123 | return journal |
|
123 | 124 | |
|
124 | 125 | def feed_uid(self, entry_id): |
|
125 | 126 | return '{}:{}'.format('journal', md5_safe(entry_id)) |
|
126 | 127 | |
|
127 | 128 | def _atom_feed(self, repos, search_term, public=True): |
|
128 | 129 | _ = self.request.translate |
|
129 | 130 | journal = self._get_journal_data(repos, search_term) |
|
130 | 131 | if public: |
|
131 | 132 | _link = h.route_url('journal_public_atom') |
|
132 | 133 | _desc = '%s %s %s' % (self.rhodecode_name, _('public journal'), |
|
133 | 134 | 'atom feed') |
|
134 | 135 | else: |
|
135 | 136 | _link = h.route_url('journal_atom') |
|
136 | 137 | _desc = '%s %s %s' % (self.rhodecode_name, _('journal'), 'atom feed') |
|
137 | 138 | |
|
138 | 139 | feed = Atom1Feed( |
|
139 | 140 | title=_desc, link=_link, description=_desc, |
|
140 | 141 | language=self.language, ttl=self.ttl) |
|
141 | 142 | |
|
142 | 143 | for entry in journal[:self.feed_items_per_page]: |
|
143 | 144 | user = entry.user |
|
144 | 145 | if user is None: |
|
145 | 146 | # fix deleted users |
|
146 | 147 | user = AttributeDict({'short_contact': entry.username, |
|
147 | 148 | 'email': '', |
|
148 | 149 | 'full_contact': ''}) |
|
149 | 150 | action, action_extra, ico = h.action_parser( |
|
150 | 151 | self.request, entry, feed=True) |
|
151 | 152 | title = "%s - %s %s" % (user.short_contact, action(), |
|
152 | 153 | entry.repository.repo_name) |
|
153 | 154 | desc = action_extra() |
|
154 | 155 | _url = h.route_url('home') |
|
155 | 156 | if entry.repository is not None: |
|
156 | 157 | _url = h.route_url('repo_commits', |
|
157 | 158 | repo_name=entry.repository.repo_name) |
|
158 | 159 | |
|
159 | 160 | feed.add_item( |
|
160 | 161 | unique_id=self.feed_uid(entry.user_log_id), |
|
161 | 162 | title=title, |
|
162 | 163 | pubdate=entry.action_date, |
|
163 | 164 | link=_url, |
|
164 | 165 | author_email=user.email, |
|
165 | 166 | author_name=user.full_contact, |
|
166 | 167 | description=desc) |
|
167 | 168 | |
|
168 | 169 | response = Response(feed.writeString('utf-8')) |
|
169 |
response.content_type = feed. |
|
|
170 | response.content_type = feed.content_type | |
|
170 | 171 | return response |
|
171 | 172 | |
|
172 | 173 | def _rss_feed(self, repos, search_term, public=True): |
|
173 | 174 | _ = self.request.translate |
|
174 | 175 | journal = self._get_journal_data(repos, search_term) |
|
175 | 176 | if public: |
|
176 | 177 | _link = h.route_url('journal_public_atom') |
|
177 | 178 | _desc = '%s %s %s' % ( |
|
178 | 179 | self.rhodecode_name, _('public journal'), 'rss feed') |
|
179 | 180 | else: |
|
180 | 181 | _link = h.route_url('journal_atom') |
|
181 | 182 | _desc = '%s %s %s' % ( |
|
182 | 183 | self.rhodecode_name, _('journal'), 'rss feed') |
|
183 | 184 | |
|
184 | 185 | feed = Rss201rev2Feed( |
|
185 | 186 | title=_desc, link=_link, description=_desc, |
|
186 | 187 | language=self.language, ttl=self.ttl) |
|
187 | 188 | |
|
188 | 189 | for entry in journal[:self.feed_items_per_page]: |
|
189 | 190 | user = entry.user |
|
190 | 191 | if user is None: |
|
191 | 192 | # fix deleted users |
|
192 | 193 | user = AttributeDict({'short_contact': entry.username, |
|
193 | 194 | 'email': '', |
|
194 | 195 | 'full_contact': ''}) |
|
195 | 196 | action, action_extra, ico = h.action_parser( |
|
196 | 197 | self.request, entry, feed=True) |
|
197 | 198 | title = "%s - %s %s" % (user.short_contact, action(), |
|
198 | 199 | entry.repository.repo_name) |
|
199 | 200 | desc = action_extra() |
|
200 | 201 | _url = h.route_url('home') |
|
201 | 202 | if entry.repository is not None: |
|
202 | 203 | _url = h.route_url('repo_commits', |
|
203 | 204 | repo_name=entry.repository.repo_name) |
|
204 | 205 | |
|
205 | 206 | feed.add_item( |
|
206 | 207 | unique_id=self.feed_uid(entry.user_log_id), |
|
207 | 208 | title=title, |
|
208 | 209 | pubdate=entry.action_date, |
|
209 | 210 | link=_url, |
|
210 | 211 | author_email=user.email, |
|
211 | 212 | author_name=user.full_contact, |
|
212 | 213 | description=desc) |
|
213 | 214 | |
|
214 | 215 | response = Response(feed.writeString('utf-8')) |
|
215 |
response.content_type = feed. |
|
|
216 | response.content_type = feed.content_type | |
|
216 | 217 | return response |
|
217 | 218 | |
|
218 | 219 | @LoginRequired() |
|
219 | 220 | @NotAnonymous() |
|
220 | 221 | @view_config( |
|
221 | 222 | route_name='journal', request_method='GET', |
|
222 | 223 | renderer=None) |
|
223 | 224 | def journal(self): |
|
224 | 225 | c = self.load_default_context() |
|
225 | 226 | |
|
226 | 227 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
227 | 228 | c.user = User.get(self._rhodecode_user.user_id) |
|
228 | 229 | following = Session().query(UserFollowing)\ |
|
229 | 230 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
230 | 231 | .options(joinedload(UserFollowing.follows_repository))\ |
|
231 | 232 | .all() |
|
232 | 233 | |
|
233 | 234 | journal = self._get_journal_data(following, c.search_term) |
|
234 | 235 | |
|
235 | 236 | def url_generator(page_num): |
|
236 | 237 | query_params = { |
|
237 | 238 | 'page': page_num, |
|
238 | 239 | 'filter': c.search_term |
|
239 | 240 | } |
|
240 | 241 | return self.request.current_route_path(_query=query_params) |
|
241 | 242 | |
|
242 | 243 | c.journal_pager = SqlPage( |
|
243 | 244 | journal, page=p, items_per_page=20, url_maker=url_generator) |
|
244 | 245 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) |
|
245 | 246 | |
|
246 | 247 | c.journal_data = render( |
|
247 | 248 | 'rhodecode:templates/journal/journal_data.mako', |
|
248 | 249 | self._get_template_context(c), self.request) |
|
249 | 250 | |
|
250 | 251 | if self.request.is_xhr: |
|
251 | 252 | return Response(c.journal_data) |
|
252 | 253 | |
|
253 | 254 | html = render( |
|
254 | 255 | 'rhodecode:templates/journal/journal.mako', |
|
255 | 256 | self._get_template_context(c), self.request) |
|
256 | 257 | return Response(html) |
|
257 | 258 | |
|
258 | 259 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
259 | 260 | @NotAnonymous() |
|
260 | 261 | @view_config( |
|
261 | 262 | route_name='journal_atom', request_method='GET', |
|
262 | 263 | renderer=None) |
|
263 | 264 | def journal_atom(self): |
|
264 | 265 | """ |
|
265 | 266 | Produce an atom-1.0 feed via feedgenerator module |
|
266 | 267 | """ |
|
267 | 268 | c = self.load_default_context() |
|
268 | 269 | following_repos = Session().query(UserFollowing)\ |
|
269 | 270 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
270 | 271 | .options(joinedload(UserFollowing.follows_repository))\ |
|
271 | 272 | .all() |
|
272 | 273 | return self._atom_feed(following_repos, c.search_term, public=False) |
|
273 | 274 | |
|
274 | 275 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
275 | 276 | @NotAnonymous() |
|
276 | 277 | @view_config( |
|
277 | 278 | route_name='journal_rss', request_method='GET', |
|
278 | 279 | renderer=None) |
|
279 | 280 | def journal_rss(self): |
|
280 | 281 | """ |
|
281 | 282 | Produce an rss feed via feedgenerator module |
|
282 | 283 | """ |
|
283 | 284 | c = self.load_default_context() |
|
284 | 285 | following_repos = Session().query(UserFollowing)\ |
|
285 | 286 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
286 | 287 | .options(joinedload(UserFollowing.follows_repository))\ |
|
287 | 288 | .all() |
|
288 | 289 | return self._rss_feed(following_repos, c.search_term, public=False) |
|
289 | 290 | |
|
290 | 291 | @LoginRequired() |
|
291 | 292 | @NotAnonymous() |
|
292 | 293 | @CSRFRequired() |
|
293 | 294 | @view_config( |
|
294 | 295 | route_name='toggle_following', request_method='POST', |
|
295 | 296 | renderer='json_ext') |
|
296 | 297 | def toggle_following(self): |
|
297 | 298 | user_id = self.request.POST.get('follows_user_id') |
|
298 | 299 | if user_id: |
|
299 | 300 | try: |
|
300 | 301 | ScmModel().toggle_following_user(user_id, self._rhodecode_user.user_id) |
|
301 | 302 | Session().commit() |
|
302 | 303 | return 'ok' |
|
303 | 304 | except Exception: |
|
304 | 305 | raise HTTPBadRequest() |
|
305 | 306 | |
|
306 | 307 | repo_id = self.request.POST.get('follows_repo_id') |
|
307 | 308 | repo = Repository.get_or_404(repo_id) |
|
308 | 309 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
309 | 310 | has_perm = HasRepoPermissionAny(*perm_set)(repo.repo_name, 'RepoWatch check') |
|
310 | 311 | if repo and has_perm: |
|
311 | 312 | try: |
|
312 | 313 | ScmModel().toggle_following_repo(repo_id, self._rhodecode_user.user_id) |
|
313 | 314 | Session().commit() |
|
314 | 315 | return 'ok' |
|
315 | 316 | except Exception: |
|
316 | 317 | raise HTTPBadRequest() |
|
317 | 318 | |
|
318 | 319 | raise HTTPBadRequest() |
|
319 | 320 | |
|
320 | 321 | @LoginRequired() |
|
321 | 322 | @view_config( |
|
322 | 323 | route_name='journal_public', request_method='GET', |
|
323 | 324 | renderer=None) |
|
324 | 325 | def journal_public(self): |
|
325 | 326 | c = self.load_default_context() |
|
326 | 327 | # Return a rendered template |
|
327 | 328 | p = safe_int(self.request.GET.get('page', 1), 1) |
|
328 | 329 | |
|
329 | 330 | c.following = Session().query(UserFollowing)\ |
|
330 | 331 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
331 | 332 | .options(joinedload(UserFollowing.follows_repository))\ |
|
332 | 333 | .all() |
|
333 | 334 | |
|
334 | 335 | journal = self._get_journal_data(c.following, c.search_term) |
|
335 | 336 | |
|
336 | 337 | def url_generator(page_num): |
|
337 | 338 | query_params = { |
|
338 | 339 | 'page': page_num |
|
339 | 340 | } |
|
340 | 341 | return self.request.current_route_path(_query=query_params) |
|
341 | 342 | |
|
342 | 343 | c.journal_pager = SqlPage( |
|
343 | 344 | journal, page=p, items_per_page=20, url_maker=url_generator) |
|
344 | 345 | c.journal_day_aggreagate = self._get_daily_aggregate(c.journal_pager) |
|
345 | 346 | |
|
346 | 347 | c.journal_data = render( |
|
347 | 348 | 'rhodecode:templates/journal/journal_data.mako', |
|
348 | 349 | self._get_template_context(c), self.request) |
|
349 | 350 | |
|
350 | 351 | if self.request.is_xhr: |
|
351 | 352 | return Response(c.journal_data) |
|
352 | 353 | |
|
353 | 354 | html = render( |
|
354 | 355 | 'rhodecode:templates/journal/public_journal.mako', |
|
355 | 356 | self._get_template_context(c), self.request) |
|
356 | 357 | return Response(html) |
|
357 | 358 | |
|
358 | 359 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
359 | 360 | @view_config( |
|
360 | 361 | route_name='journal_public_atom', request_method='GET', |
|
361 | 362 | renderer=None) |
|
362 | 363 | def journal_public_atom(self): |
|
363 | 364 | """ |
|
364 | 365 | Produce an atom-1.0 feed via feedgenerator module |
|
365 | 366 | """ |
|
366 | 367 | c = self.load_default_context() |
|
367 | 368 | following_repos = Session().query(UserFollowing)\ |
|
368 | 369 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
369 | 370 | .options(joinedload(UserFollowing.follows_repository))\ |
|
370 | 371 | .all() |
|
371 | 372 | |
|
372 | 373 | return self._atom_feed(following_repos, c.search_term) |
|
373 | 374 | |
|
374 | 375 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
375 | 376 | @view_config( |
|
376 | 377 | route_name='journal_public_rss', request_method='GET', |
|
377 | 378 | renderer=None) |
|
378 | 379 | def journal_public_rss(self): |
|
379 | 380 | """ |
|
380 | 381 | Produce an rss2 feed via feedgenerator module |
|
381 | 382 | """ |
|
382 | 383 | c = self.load_default_context() |
|
383 | 384 | following_repos = Session().query(UserFollowing)\ |
|
384 | 385 | .filter(UserFollowing.user_id == self._rhodecode_user.user_id)\ |
|
385 | 386 | .options(joinedload(UserFollowing.follows_repository))\ |
|
386 | 387 | .all() |
|
387 | 388 | |
|
388 | 389 | return self._rss_feed(following_repos, c.search_term) |
@@ -1,137 +1,137 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import pytest |
|
22 | 22 | from rhodecode.model.auth_token import AuthTokenModel |
|
23 | 23 | from rhodecode.tests import TestController |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def route_path(name, params=None, **kwargs): |
|
27 | 27 | import urllib |
|
28 | 28 | |
|
29 | 29 | base_url = { |
|
30 | 30 | 'rss_feed_home': '/{repo_name}/feed-rss', |
|
31 | 31 | 'atom_feed_home': '/{repo_name}/feed-atom', |
|
32 | 32 | 'rss_feed_home_old': '/{repo_name}/feed/rss', |
|
33 | 33 | 'atom_feed_home_old': '/{repo_name}/feed/atom', |
|
34 | 34 | }[name].format(**kwargs) |
|
35 | 35 | |
|
36 | 36 | if params: |
|
37 | 37 | base_url = '{}?{}'.format(base_url, urllib.urlencode(params)) |
|
38 | 38 | return base_url |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | class TestFeedView(TestController): |
|
42 | 42 | |
|
43 | 43 | @pytest.mark.parametrize("feed_type,response_types,content_type",[ |
|
44 |
('rss', ['<rss version="2.0" |
|
|
44 | ('rss', ['<rss version="2.0"'], | |
|
45 | 45 | "application/rss+xml"), |
|
46 | 46 | ('atom', ['xmlns="http://www.w3.org/2005/Atom"', 'xml:lang="en-us"'], |
|
47 | 47 | "application/atom+xml"), |
|
48 | 48 | ]) |
|
49 | 49 | def test_feed(self, backend, feed_type, response_types, content_type): |
|
50 | 50 | self.log_user() |
|
51 | 51 | response = self.app.get( |
|
52 | 52 | route_path('{}_feed_home'.format(feed_type), |
|
53 | 53 | repo_name=backend.repo_name)) |
|
54 | 54 | |
|
55 | 55 | for content in response_types: |
|
56 | 56 | response.mustcontain(content) |
|
57 | 57 | |
|
58 | 58 | assert response.content_type == content_type |
|
59 | 59 | |
|
60 | 60 | @pytest.mark.parametrize("feed_type, content_type", [ |
|
61 | 61 | ('rss', "application/rss+xml"), |
|
62 | 62 | ('atom', "application/atom+xml") |
|
63 | 63 | ]) |
|
64 | 64 | def test_feed_with_auth_token( |
|
65 | 65 | self, backend, user_admin, feed_type, content_type): |
|
66 | 66 | auth_token = user_admin.feed_token |
|
67 | 67 | assert auth_token != '' |
|
68 | 68 | |
|
69 | 69 | response = self.app.get( |
|
70 | 70 | route_path( |
|
71 | 71 | '{}_feed_home'.format(feed_type), |
|
72 | 72 | repo_name=backend.repo_name, |
|
73 | 73 | params=dict(auth_token=auth_token)), |
|
74 | 74 | status=200) |
|
75 | 75 | |
|
76 | 76 | assert response.content_type == content_type |
|
77 | 77 | |
|
78 | 78 | @pytest.mark.parametrize("feed_type, content_type", [ |
|
79 | 79 | ('rss', "application/rss+xml"), |
|
80 | 80 | ('atom', "application/atom+xml") |
|
81 | 81 | ]) |
|
82 | 82 | def test_feed_with_auth_token_by_uid( |
|
83 | 83 | self, backend, user_admin, feed_type, content_type): |
|
84 | 84 | auth_token = user_admin.feed_token |
|
85 | 85 | assert auth_token != '' |
|
86 | 86 | |
|
87 | 87 | response = self.app.get( |
|
88 | 88 | route_path( |
|
89 | 89 | '{}_feed_home'.format(feed_type), |
|
90 | 90 | repo_name='_{}'.format(backend.repo.repo_id), |
|
91 | 91 | params=dict(auth_token=auth_token)), |
|
92 | 92 | status=200) |
|
93 | 93 | |
|
94 | 94 | assert response.content_type == content_type |
|
95 | 95 | |
|
96 | 96 | @pytest.mark.parametrize("feed_type, content_type", [ |
|
97 | 97 | ('rss', "application/rss+xml"), |
|
98 | 98 | ('atom', "application/atom+xml") |
|
99 | 99 | ]) |
|
100 | 100 | def test_feed_old_urls_with_auth_token( |
|
101 | 101 | self, backend, user_admin, feed_type, content_type): |
|
102 | 102 | auth_token = user_admin.feed_token |
|
103 | 103 | assert auth_token != '' |
|
104 | 104 | |
|
105 | 105 | response = self.app.get( |
|
106 | 106 | route_path( |
|
107 | 107 | '{}_feed_home_old'.format(feed_type), |
|
108 | 108 | repo_name=backend.repo_name, |
|
109 | 109 | params=dict(auth_token=auth_token)), |
|
110 | 110 | status=200) |
|
111 | 111 | |
|
112 | 112 | assert response.content_type == content_type |
|
113 | 113 | |
|
114 | 114 | @pytest.mark.parametrize("feed_type", ['rss', 'atom']) |
|
115 | 115 | def test_feed_with_auth_token_of_wrong_type( |
|
116 | 116 | self, backend, user_util, feed_type): |
|
117 | 117 | user = user_util.create_user() |
|
118 | 118 | auth_token = AuthTokenModel().create( |
|
119 | 119 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_API) |
|
120 | 120 | auth_token = auth_token.api_key |
|
121 | 121 | |
|
122 | 122 | self.app.get( |
|
123 | 123 | route_path( |
|
124 | 124 | '{}_feed_home'.format(feed_type), |
|
125 | 125 | repo_name=backend.repo_name, |
|
126 | 126 | params=dict(auth_token=auth_token)), |
|
127 | 127 | status=302) |
|
128 | 128 | |
|
129 | 129 | auth_token = AuthTokenModel().create( |
|
130 | 130 | user.user_id, u'test-token', -1, AuthTokenModel.cls.ROLE_FEED) |
|
131 | 131 | auth_token = auth_token.api_key |
|
132 | 132 | self.app.get( |
|
133 | 133 | route_path( |
|
134 | 134 | '{}_feed_home'.format(feed_type), |
|
135 | 135 | repo_name=backend.repo_name, |
|
136 | 136 | params=dict(auth_token=auth_token)), |
|
137 | 137 | status=200) |
@@ -1,205 +1,212 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2019 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | import pytz |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pyramid.view import view_config |
|
24 | 24 | from pyramid.response import Response |
|
25 | from webhelpers.feedgenerator import Rss201rev2Feed, Atom1Feed | |
|
25 | ||
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import RepoAppView |
|
28 | from rhodecode.lib.feedgenerator import Rss201rev2Feed, Atom1Feed | |
|
28 | 29 | from rhodecode.lib import audit_logger |
|
29 | 30 | from rhodecode.lib import rc_cache |
|
30 | 31 | from rhodecode.lib import helpers as h |
|
31 | 32 | from rhodecode.lib.auth import ( |
|
32 | 33 | LoginRequired, HasRepoPermissionAnyDecorator) |
|
33 | 34 | from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer |
|
34 | 35 | from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe |
|
35 | 36 | from rhodecode.model.db import UserApiKeys, CacheKey |
|
36 | 37 | |
|
37 | 38 | log = logging.getLogger(__name__) |
|
38 | 39 | |
|
39 | 40 | |
|
40 | 41 | class RepoFeedView(RepoAppView): |
|
41 | 42 | def load_default_context(self): |
|
42 | 43 | c = self._get_local_tmpl_context() |
|
43 | 44 | self._load_defaults() |
|
44 | 45 | return c |
|
45 | 46 | |
|
46 | 47 | def _get_config(self): |
|
47 | 48 | import rhodecode |
|
48 | 49 | config = rhodecode.CONFIG |
|
49 | 50 | |
|
50 | 51 | return { |
|
51 | 52 | 'language': 'en-us', |
|
52 | 53 | 'feed_ttl': '5', # TTL of feed, |
|
53 | 54 | 'feed_include_diff': |
|
54 | 55 | str2bool(config.get('rss_include_diff', False)), |
|
55 | 56 | 'feed_items_per_page': |
|
56 | 57 | safe_int(config.get('rss_items_per_page', 20)), |
|
57 | 58 | 'feed_diff_limit': |
|
58 | 59 | # we need to protect from parsing huge diffs here other way |
|
59 | 60 | # we can kill the server |
|
60 | 61 | safe_int(config.get('rss_cut_off_limit', 32 * 1024)), |
|
61 | 62 | } |
|
62 | 63 | |
|
63 | 64 | def _load_defaults(self): |
|
64 | 65 | _ = self.request.translate |
|
65 | 66 | config = self._get_config() |
|
66 | 67 | # common values for feeds |
|
67 | 68 | self.description = _('Changes on %s repository') |
|
68 |
|
|
|
69 | self.title = _('%s %s feed') % (self.db_repo_name, '%s') | |
|
69 | 70 | self.language = config["language"] |
|
70 | 71 | self.ttl = config["feed_ttl"] |
|
71 | 72 | self.feed_include_diff = config['feed_include_diff'] |
|
72 | 73 | self.feed_diff_limit = config['feed_diff_limit'] |
|
73 | 74 | self.feed_items_per_page = config['feed_items_per_page'] |
|
74 | 75 | |
|
75 | 76 | def _changes(self, commit): |
|
76 | 77 | diff_processor = DiffProcessor( |
|
77 | 78 | commit.diff(), diff_limit=self.feed_diff_limit) |
|
78 | 79 | _parsed = diff_processor.prepare(inline_diff=False) |
|
79 | 80 | limited_diff = isinstance(_parsed, LimitedDiffContainer) |
|
80 | 81 | |
|
81 | 82 | return diff_processor, _parsed, limited_diff |
|
82 | 83 | |
|
83 | 84 | def _get_title(self, commit): |
|
84 |
return h. |
|
|
85 | return h.chop_at_smart(commit.message, '\n', suffix_if_chopped='...') | |
|
85 | 86 | |
|
86 | 87 | def _get_description(self, commit): |
|
87 | 88 | _renderer = self.request.get_partial_renderer( |
|
88 | 89 | 'rhodecode:templates/feed/atom_feed_entry.mako') |
|
89 | 90 | diff_processor, parsed_diff, limited_diff = self._changes(commit) |
|
90 | 91 | filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff) |
|
91 | 92 | return _renderer( |
|
92 | 93 | 'body', |
|
93 | 94 | commit=commit, |
|
94 | 95 | parsed_diff=filtered_parsed_diff, |
|
95 | 96 | limited_diff=limited_diff, |
|
96 | 97 | feed_include_diff=self.feed_include_diff, |
|
97 | 98 | diff_processor=diff_processor, |
|
98 | 99 | has_hidden_changes=has_hidden_changes |
|
99 | 100 | ) |
|
100 | 101 | |
|
101 | 102 | def _set_timezone(self, date, tzinfo=pytz.utc): |
|
102 | 103 | if not getattr(date, "tzinfo", None): |
|
103 | 104 | date.replace(tzinfo=tzinfo) |
|
104 | 105 | return date |
|
105 | 106 | |
|
106 | 107 | def _get_commits(self): |
|
107 | return list(self.rhodecode_vcs_repo[-self.feed_items_per_page:]) | |
|
108 | pre_load = ['author', 'branch', 'date', 'message', 'parents'] | |
|
109 | collection = self.rhodecode_vcs_repo.get_commits( | |
|
110 | branch_name=None, show_hidden=False, pre_load=pre_load, | |
|
111 | translate_tags=False) | |
|
112 | ||
|
113 | return list(collection[-self.feed_items_per_page:]) | |
|
108 | 114 | |
|
109 | 115 | def uid(self, repo_id, commit_id): |
|
110 | 116 | return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id)) |
|
111 | 117 | |
|
112 | 118 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) |
|
113 | 119 | @HasRepoPermissionAnyDecorator( |
|
114 | 120 | 'repository.read', 'repository.write', 'repository.admin') |
|
115 | 121 | @view_config(route_name='atom_feed_home', request_method='GET', renderer=None) |
|
116 | 122 | @view_config(route_name='atom_feed_home_old', request_method='GET', renderer=None) |
|
117 | 123 | def atom(self): |
|
118 | 124 | """ |
|
119 | 125 | Produce an atom-1.0 feed via feedgenerator module |
|
120 | 126 | """ |
|
121 | 127 | self.load_default_context() |
|
128 | force_recache = self.get_recache_flag() | |
|
122 | 129 | |
|
123 | 130 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) |
|
124 | condition = not self.path_filter.is_enabled | |
|
131 | condition = not (self.path_filter.is_enabled or force_recache) | |
|
125 | 132 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
126 | 133 | |
|
127 | 134 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, |
|
128 | 135 | condition=condition) |
|
129 | def generate_atom_feed(repo_id, _repo_name, commit_id, _feed_type): | |
|
136 | def generate_atom_feed(repo_id, _repo_name, _commit_id, _feed_type): | |
|
130 | 137 | feed = Atom1Feed( |
|
131 |
title=self.title % |
|
|
138 | title=self.title % 'atom', | |
|
139 | link=h.route_url('repo_summary', repo_name=_repo_name), | |
|
140 | description=self.description % _repo_name, | |
|
141 | language=self.language, | |
|
142 | ttl=self.ttl | |
|
143 | ) | |
|
144 | for commit in reversed(self._get_commits()): | |
|
145 | date = self._set_timezone(commit.date) | |
|
146 | feed.add_item( | |
|
147 | unique_id=self.uid(repo_id, commit.raw_id), | |
|
148 | title=self._get_title(commit), | |
|
149 | author_name=commit.author, | |
|
150 | description=self._get_description(commit), | |
|
151 | link=h.route_url( | |
|
152 | 'repo_commit', repo_name=_repo_name, | |
|
153 | commit_id=commit.raw_id), | |
|
154 | pubdate=date,) | |
|
155 | ||
|
156 | return feed.content_type, feed.writeString('utf-8') | |
|
157 | ||
|
158 | commit_id = self.db_repo.changeset_cache.get('raw_id') | |
|
159 | content_type, feed = generate_atom_feed( | |
|
160 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'atom') | |
|
161 | ||
|
162 | response = Response(feed) | |
|
163 | response.content_type = content_type | |
|
164 | return response | |
|
165 | ||
|
166 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
|
167 | @HasRepoPermissionAnyDecorator( | |
|
168 | 'repository.read', 'repository.write', 'repository.admin') | |
|
169 | @view_config(route_name='rss_feed_home', request_method='GET', renderer=None) | |
|
170 | @view_config(route_name='rss_feed_home_old', request_method='GET', renderer=None) | |
|
171 | def rss(self): | |
|
172 | """ | |
|
173 | Produce an rss2 feed via feedgenerator module | |
|
174 | """ | |
|
175 | self.load_default_context() | |
|
176 | force_recache = self.get_recache_flag() | |
|
177 | ||
|
178 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) | |
|
179 | condition = not (self.path_filter.is_enabled or force_recache) | |
|
180 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
|
181 | ||
|
182 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |
|
183 | condition=condition) | |
|
184 | def generate_rss_feed(repo_id, _repo_name, _commit_id, _feed_type): | |
|
185 | feed = Rss201rev2Feed( | |
|
186 | title=self.title % 'rss', | |
|
132 | 187 | link=h.route_url('repo_summary', repo_name=_repo_name), |
|
133 | 188 | description=self.description % _repo_name, |
|
134 | 189 | language=self.language, |
|
135 | 190 | ttl=self.ttl |
|
136 | 191 | ) |
|
137 | 192 | |
|
138 | 193 | for commit in reversed(self._get_commits()): |
|
139 | 194 | date = self._set_timezone(commit.date) |
|
140 | 195 | feed.add_item( |
|
141 | 196 | unique_id=self.uid(repo_id, commit.raw_id), |
|
142 | 197 | title=self._get_title(commit), |
|
143 | 198 | author_name=commit.author, |
|
144 | 199 | description=self._get_description(commit), |
|
145 | 200 | link=h.route_url( |
|
146 | 201 | 'repo_commit', repo_name=_repo_name, |
|
147 | 202 | commit_id=commit.raw_id), |
|
148 | 203 | pubdate=date,) |
|
149 | ||
|
150 | return feed.mime_type, feed.writeString('utf-8') | |
|
204 | return feed.content_type, feed.writeString('utf-8') | |
|
151 | 205 | |
|
152 | 206 | commit_id = self.db_repo.changeset_cache.get('raw_id') |
|
153 |
|
|
|
154 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'atom') | |
|
155 | ||
|
156 | response = Response(feed) | |
|
157 | response.content_type = mime_type | |
|
158 | return response | |
|
159 | ||
|
160 | @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED]) | |
|
161 | @HasRepoPermissionAnyDecorator( | |
|
162 | 'repository.read', 'repository.write', 'repository.admin') | |
|
163 | @view_config(route_name='rss_feed_home', request_method='GET', renderer=None) | |
|
164 | @view_config(route_name='rss_feed_home_old', request_method='GET', renderer=None) | |
|
165 | def rss(self): | |
|
166 | """ | |
|
167 | Produce an rss2 feed via feedgenerator module | |
|
168 | """ | |
|
169 | self.load_default_context() | |
|
170 | ||
|
171 | cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id) | |
|
172 | condition = not self.path_filter.is_enabled | |
|
173 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
|
174 | ||
|
175 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, | |
|
176 | condition=condition) | |
|
177 | def generate_rss_feed(repo_id, _repo_name, commit_id, _feed_type): | |
|
178 | feed = Rss201rev2Feed( | |
|
179 | title=self.title % _repo_name, | |
|
180 | link=h.route_url('repo_summary', repo_name=_repo_name), | |
|
181 | description=self.description % _repo_name, | |
|
182 | language=self.language, | |
|
183 | ttl=self.ttl | |
|
184 | ) | |
|
185 | ||
|
186 | for commit in reversed(self._get_commits()): | |
|
187 | date = self._set_timezone(commit.date) | |
|
188 | feed.add_item( | |
|
189 | unique_id=self.uid(repo_id, commit.raw_id), | |
|
190 | title=self._get_title(commit), | |
|
191 | author_name=commit.author, | |
|
192 | description=self._get_description(commit), | |
|
193 | link=h.route_url( | |
|
194 | 'repo_commit', repo_name=_repo_name, | |
|
195 | commit_id=commit.raw_id), | |
|
196 | pubdate=date,) | |
|
197 | return feed.mime_type, feed.writeString('utf-8') | |
|
198 | ||
|
199 | commit_id = self.db_repo.changeset_cache.get('raw_id') | |
|
200 | mime_type, feed = generate_rss_feed( | |
|
207 | content_type, feed = generate_rss_feed( | |
|
201 | 208 | self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'rss') |
|
202 | 209 | |
|
203 | 210 | response = Response(feed) |
|
204 |
response.content_type = |
|
|
211 | response.content_type = content_type | |
|
205 | 212 | return response |
General Comments 0
You need to be logged in to leave comments.
Login now